Routing Example.
#include <agents-cpp/agent_context.h>
#include <agents-cpp/config_loader.h>
#include <agents-cpp/llm_interface.h>
#include <agents-cpp/logger.h>
#include <agents-cpp/tools/tool_registry.h>
#include <agents-cpp/types.h>
#include <agents-cpp/workflows/routing_workflow.h>
#include <iostream>
#include <string>
int main(int argc, char* argv[]) {
String api_key;
api_key = config.get("GEMINI_API_KEY", "");
if (api_key.empty() && argc > 1) {
api_key = argv[1];
}
if (api_key.empty()) {
Logger::error(
"1. Create a .env file with GEMINI_API_KEY=your_key, or");
Logger::error(
"2. Set the GEMINI_API_KEY environment variable, or");
Logger::error(
"3. Provide an API key as a command line argument");
return 1;
}
auto llm = createLLM("google", api_key, "gemini-1.5-flash");
llm->setOptions(options);
auto context = std::make_shared<AgentContext>();
context->setLLM(llm);
"You are a routing assistant that examines user queries and classifies them into appropriate categories. "
"Determine the most suitable category for handling the user's query based on the available routes."
);
"factual_query",
"Questions about facts, events, statistics, or general knowledge",
[context](const String& input, const JsonObject& routing_info) -> JsonObject {
ToolResult result = wiki_tool->execute({{
"query", input}});
JsonObject response;
response[
"answer"] =
"Based on research: " + result.
content;
return response;
}
);
"opinion_query",
"Questions seeking opinions, evaluations, or judgments on topics",
[context](const String& input, const JsonObject& routing_info) -> JsonObject {
auto opinion_context = std::make_shared<AgentContext>(*context);
opinion_context->setSystemPrompt(
"You are a balanced and thoughtful assistant that provides nuanced perspectives on complex topics. "
"Consider multiple viewpoints and provide balanced opinions."
);
LLMResponse llm_response = opinion_context->getLLM()->complete(input);
String response = llm_response.
content;
JsonObject result;
result["answer"] = "Opinion analysis: " + response;
return result;
}
);
"technical_query",
"Questions about technical topics, programming, or specialized domains",
[context](const String& input, const JsonObject& routing_info) -> JsonObject {
auto technical_context = std::make_shared<AgentContext>(*context);
technical_context->setSystemPrompt(
"You are a technical expert assistant that provides accurate and detailed information on technical topics. "
"Focus on clarity, precision, and correctness."
);
LLMResponse llm_response = technical_context->getLLM()->complete(input);
String response = llm_response.
content;
JsonObject result;
result["answer"] = "Technical explanation: " + response;
return result;
}
);
router.
setDefaultRoute([context](
const String& input,
const JsonObject& routing_info) -> JsonObject {
LLMResponse llm_response = context->getLLM()->complete(input);
String response = llm_response.
content;
JsonObject result;
result["answer"] = "General response: " + response;
return result;
});
String user_input;
while (true) {
std::getline(std::cin, user_input);
if (user_input == "exit" || user_input == "quit" || user_input == "q") {
break;
}
if (user_input.empty()) {
continue;
}
try {
JsonObject result = router.
run(user_input);
Logger::info(
"\nResponse: {}", result[
"answer"].get<String>());
} catch (const std::exception& e) {
}
}
return 0;
}
static ConfigLoader & getInstance()
Get the singleton instance of ConfigLoader.
static void error(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at error level.
Definition logger.h:124
static void init(Level level=Level::INFO)
Initialize the logger.
static void info(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at info level.
Definition logger.h:104
@ INFO
Info logging level.
Definition logger.h:40
Routing workflow using the actor model.
Definition routing_workflow.h:26
void addRoute(const String &name, const String &description, std::function< JsonObject(const String &, const JsonObject &)> handler)
Add a route with a direct function handler.
void setDefaultRoute(std::function< JsonObject(const String &, const JsonObject &)> handler)
Set default route.
void setRouterPrompt(const String &prompt_template)
Set the router prompt.
Definition routing_workflow.h:132
JsonObject run(const String &input) override
Execute the workflow with input.
Framework Namespace.
Definition agent.h:18
Options for LLM API calls.
Definition llm_interface.h:25
double temperature
The temperature of the LLM.
Definition llm_interface.h:29
int max_tokens
The maximum number of tokens.
Definition llm_interface.h:33
Response from an LLM.
Definition types.h:85
String content
The content of the response.
Definition types.h:89