Agents 0.0.2
Edge AI Agents SDK
Loading...
Searching...
No Matches
routing_example.cpp

Routing Example.

Routing Example

Version
0.1
Date
2025-07-20
#include <agents-cpp/agent_context.h>
#include <agents-cpp/config_loader.h>
#include <agents-cpp/llm_interface.h>
#include <agents-cpp/logger.h>
#include <agents-cpp/tools/tool_registry.h>
#include <agents-cpp/types.h>
#include <agents-cpp/workflows/routing_workflow.h>
#include <iostream>
#include <string>
using namespace agents;
int main(int argc, char* argv[]) {
// Initialize the logger
// Get API key from .env, environment, or command line
String api_key;
auto& config = ConfigLoader::getInstance();
// Try to get API key from config or environment
api_key = config.get("GEMINI_API_KEY", "");
// If not found, check command line
if (api_key.empty() && argc > 1) {
api_key = argv[1];
}
// Still not found, show error and exit
if (api_key.empty()) {
Logger::error("API key not found. Please:");
Logger::error("1. Create a .env file with GEMINI_API_KEY=your_key, or");
Logger::error("2. Set the GEMINI_API_KEY environment variable, or");
Logger::error("3. Provide an API key as a command line argument");
return 1;
}
// Create LLM
auto llm = createLLM("google", api_key, "gemini-1.5-flash");
// Configure LLM options
LLMOptions options;
options.temperature = 0.2;
options.max_tokens = 2048;
llm->setOptions(options);
// Create agent context
auto context = std::make_shared<AgentContext>();
context->setLLM(llm);
// Register some tools
context->registerTool(tools::createWebSearchTool());
context->registerTool(tools::createWikipediaTool());
// Create routing workflow
workflows::RoutingWorkflow router(context);
// Set router prompt
"You are a routing assistant that examines user queries and classifies them into appropriate categories. "
"Determine the most suitable category for handling the user's query based on the available routes."
);
// Add routes for different query types
router.addRoute(
"factual_query",
"Questions about facts, events, statistics, or general knowledge",
[context](const String& input, const JsonObject& routing_info) -> JsonObject {
Logger::info("Handling factual query: {}", input);
auto wiki_tool = tools::createWikipediaTool();
ToolResult result = wiki_tool->execute({{"query", input}});
JsonObject response;
response["answer"] = "Based on research: " + result.content;
return response;
}
);
router.addRoute(
"opinion_query",
"Questions seeking opinions, evaluations, or judgments on topics",
[context](const String& input, const JsonObject& routing_info) -> JsonObject {
Logger::info("Handling opinion query: {}", input);
// Create specific context for opinion handling
auto opinion_context = std::make_shared<AgentContext>(*context);
opinion_context->setSystemPrompt(
"You are a balanced and thoughtful assistant that provides nuanced perspectives on complex topics. "
"Consider multiple viewpoints and provide balanced opinions."
);
// Get response from LLM
LLMResponse llm_response = opinion_context->getLLM()->complete(input);
String response = llm_response.content;
JsonObject result;
result["answer"] = "Opinion analysis: " + response;
return result;
}
);
router.addRoute(
"technical_query",
"Questions about technical topics, programming, or specialized domains",
[context](const String& input, const JsonObject& routing_info) -> JsonObject {
Logger::info("Handling technical query: {}", input);
// Create specific context for technical handling
auto technical_context = std::make_shared<AgentContext>(*context);
technical_context->setSystemPrompt(
"You are a technical expert assistant that provides accurate and detailed information on technical topics. "
"Focus on clarity, precision, and correctness."
);
// Get response from LLM
LLMResponse llm_response = technical_context->getLLM()->complete(input);
String response = llm_response.content;
JsonObject result;
result["answer"] = "Technical explanation: " + response;
return result;
}
);
// Set default route
router.setDefaultRoute([context](const String& input, const JsonObject& routing_info) -> JsonObject {
Logger::info("Handling with default route: {}", input);
// Get response from LLM
LLMResponse llm_response = context->getLLM()->complete(input);
String response = llm_response.content;
JsonObject result;
result["answer"] = "General response: " + response;
return result;
});
// Process user inputs until exit
Logger::info("Enter queries (or 'exit' to quit):");
String user_input;
while (true) {
Logger::info("> ");
std::getline(std::cin, user_input);
if (user_input == "exit" || user_input == "quit" || user_input == "q") {
break;
}
if (user_input.empty()) {
continue;
}
try {
// Run the routing workflow
JsonObject result = router.run(user_input);
// Display the result
Logger::info("\nResponse: {}", result["answer"].get<String>());
Logger::info("--------------------------------------");
} catch (const std::exception& e) {
Logger::error("Error: {}", e.what());
}
}
return 0;
}
static ConfigLoader & getInstance()
Get the singleton instance of ConfigLoader.
static void error(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at error level.
Definition logger.h:124
static void init(Level level=Level::INFO)
Initialize the logger.
static void info(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at info level.
Definition logger.h:104
@ INFO
Info logging level.
Definition logger.h:40
Routing workflow using the actor model.
Definition routing_workflow.h:26
void addRoute(const String &name, const String &description, std::function< JsonObject(const String &, const JsonObject &)> handler)
Add a route with a direct function handler.
void setDefaultRoute(std::function< JsonObject(const String &, const JsonObject &)> handler)
Set default route.
void setRouterPrompt(const String &prompt_template)
Set the router prompt.
Definition routing_workflow.h:132
JsonObject run(const String &input) override
Execute the workflow with input.
std::shared_ptr< Tool > createWikipediaTool()
Creates a tool for retrieving information from Wikipedia.
std::shared_ptr< Tool > createWebSearchTool()
Creates a tool for performing web searches.
Framework Namespace.
Definition agent.h:18
Options for LLM API calls.
Definition llm_interface.h:25
double temperature
The temperature of the LLM.
Definition llm_interface.h:29
int max_tokens
The maximum number of tokens.
Definition llm_interface.h:33
Response from an LLM.
Definition types.h:85
String content
The content of the response.
Definition types.h:89
Result of a tool execution.
Definition tool.h:22
String content
The content of the tool result.
Definition tool.h:30