#include <agents-cpp/agent_context.h>
#include <agents-cpp/agents/actor_agent.h>
#include <agents-cpp/config_loader.h>
#include <agents-cpp/llm_interface.h>
#include <agents-cpp/logger.h>
#include <agents-cpp/tool.h>
#include <agents-cpp/workflows/parallelization_workflow.h>
#include <agents-cpp/workflows/prompt_chaining_workflow.h>
#include <chrono>
#include <iostream>
#include <thread>
ToolResult calculatorTool(
const JsonObject& params) {
try {
if (params.contains("expression")) {
String expr = params["expression"];
double result = 0.0;
if (expr == "1+1") {
result = 2.0;
} else if (expr == "2*3") {
result = 6.0;
} else {
result = 42.0;
}
return {
true,
"Calculated result: " + std::to_string(result),
{{"result", result}}
};
} else {
return {
false,
"Missing expression parameter",
{{"error", "Missing expression parameter"}}
};
}
} catch (const std::exception& e) {
return {
false,
"Error calculating result: " + String(e.what()),
{{"error", e.what()}}
};
}
}
ToolResult weatherTool(
const JsonObject& params) {
try {
if (params.contains("location")) {
String location = params["location"];
String weather = "sunny";
double temperature = 22.0;
return {
true,
"Weather in " + location + ": " + weather + ", " + std::to_string(temperature) + "°C",
{
{"location", location},
{"weather", weather},
{"temperature", temperature}
}
};
} else {
return {
false,
"Missing location parameter",
{{"error", "Missing location parameter"}}
};
}
} catch (const std::exception& e) {
return {
false,
"Error getting weather: " + String(e.what()),
{{"error", e.what()}}
};
}
}
int main(int argc, char* argv[]) {
String api_key;
api_key = config.get("GEMINI_API_KEY", "");
if (api_key.empty() && argc > 1) {
api_key = argv[1];
}
if (api_key.empty()) {
Logger::error(
"1. Create a .env file with GEMINI_API_KEY=your_key, or");
Logger::error(
"2. Set the GEMINI_API_KEY environment variable, or");
Logger::error(
"3. Provide an API key as a command line argument");
return 1;
}
try {
auto llm = createLLM("google", api_key, "gemini-1.5-flash");
llm->setOptions(options);
auto calculator = createTool(
"calculator",
"Calculate mathematical expressions",
{
{"expression", "string", "The mathematical expression to calculate", true}
},
calculatorTool
);
auto weather = createTool(
"weather",
"Get weather information for a location",
{
{"location", "string", "The location to get weather for", true}
},
weatherTool
);
auto context = std::make_shared<AgentContext>();
context->setLLM(llm);
context->registerTool(calculator);
context->registerTool(weather);
std::cout << "\n=== Example 1: Prompt Chaining Workflow ===\n\n";
auto chaining_workflow = std::make_shared<PromptChainingWorkflow>(context);
chaining_workflow->addStep(
"brainstorm",
"Brainstorm 3 creative ideas for a short story about space exploration. Return them as a JSON array."
);
chaining_workflow->addStep(
"select",
"From these ideas, select the most interesting one and explain why you chose it:\n{{response}}"
);
chaining_workflow->addStep(
"outline",
"Create a brief outline for a story based on this idea:\n{{response}}"
);
auto result = chaining_workflow->run();
std::cout << "Prompt chaining result: " << result.dump(2) << "\n\n";
std::cout << "\n=== Example 2: Parallelization Workflow (Sectioning) ===\n\n";
auto parallel_workflow = std::make_shared<ParallelizationWorkflow>(
context, ParallelizationWorkflow::Strategy::SECTIONING
);
parallel_workflow->addTask(
"characters",
"Create 2 interesting characters for a sci-fi story set on Mars."
);
parallel_workflow->addTask(
"setting",
"Describe the environment and setting of a Mars colony in the year 2150."
);
parallel_workflow->addTask(
"plot",
"Create a plot outline for a mystery story set on Mars."
);
parallel_workflow->init();
result = parallel_workflow->run();
std::cout << "Parallelization result: " << result.dump(2) << "\n\n";
std::cout << "\n=== Example 3: Actor Agent with Tools ===\n\n";
auto agent = std::make_shared<ActorAgent>(context);
agent->setSystemPrompt(
"You are a helpful assistant that can answer questions and use tools to get information. "
"When using tools, make sure to include all necessary parameters."
);
agent->setOptions(agent_options);
agent->setStatusCallback([](const String& status) {
std::cout << "Agent status: " << status << "\n";
});
agent->init();
std::vector<String> tasks = {
"What is 1+1?",
"What's the weather like in New York?",
"Tell me a short story about a robot learning to feel emotions."
};
for (const auto& task : tasks) {
std::cout << "\nTask: " << task << "\n";
result = blockingWait(agent->run(task));
std::cout << "Result: " << result.dump(2) << "\n";
std::this_thread::sleep_for(std::chrono::seconds(1));
}
return 0;
} catch (const std::exception& e) {
std::cerr << "Error: " << e.what() << "\n";
return 1;
}
}
static ConfigLoader & getInstance()
Get the singleton instance of ConfigLoader.
static void error(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at error level.
Definition logger.h:124
static void setLevel(Level level)
Set the log level.
Actor-based agent implementation.
Definition actor_agent.h:31
Context for an agent, containing tools, LLM, and memory.
Definition agent_context.h:30
Interface for agents.
Definition agent.h:26
A utility class to load and access configuration values from .env files.
Definition config_loader.h:24
Interface for language model providers (OpenAI, Anthropic, Google, Ollama)
Definition llm_interface.h:68
Logger utility class that wraps spdlog functionality.
Definition logger.h:23
A workflow that runs multiple tasks in parallel.
Definition parallelization_workflow.h:27
A workflow that chains multiple prompts together.
Definition prompt_chaining_workflow.h:26
nlohmann::json JsonObject
JSON object type.
Definition types.h:39
std::shared_ptr< LLMInterface > createLLM(const String &provider, const String &api_key, const String &model="")
Factory function to create a specific LLM provider.
std::string String
String type.
Definition types.h:27
std::shared_ptr< Tool > createTool(const String &name, const String &description, const std::vector< Parameter > ¶meters, ToolCallback callback)
Create a custom tool with a name, description, parameters, and callback.
T blockingWait(Task< T > &&task)
Helper to run a task and get the result synchronously.
Definition coroutine_utils.h:169
Agent execution options.
Definition agent.h:61
bool human_feedback_enabled
Whether human feedback is enabled.
Definition agent.h:75
int max_iterations
The maximum number of iterations.
Definition agent.h:65
Options for LLM API calls.
Definition llm_interface.h:25
double temperature
The temperature of the LLM.
Definition llm_interface.h:29
int max_tokens
The maximum number of tokens.
Definition llm_interface.h:33
Options for LLM API calls.
Definition llm_interface.h:25
Response from an LLM.
Definition types.h:85
Message in a conversation.
Definition types.h:105