Coroutine Example.
#include <agents-cpp/agent_context.h>
#include <agents-cpp/agents/autonomous_agent.h>
#include <agents-cpp/config_loader.h>
#include <agents-cpp/coroutine_utils.h>
#include <agents-cpp/llm_interface.h>
#include <agents-cpp/logger.h>
#include <agents-cpp/tools/tool_registry.h>
#include <agents-cpp/types.h>
#include <iostream>
#include <string>
#include <chrono>
#include <coroutine>
#include <thread>
} else {
}
}
bool humanApproval(const String& message, const JsonObject& context) {
char response;
std::cin >> response;
return (response == 'y' || response == 'Y');
}
Task<JsonObject> performResearchTask(std::shared_ptr<AgentContext> context,
const String& topic) {
auto search_result = co_await context->executeTool("web_search", {{"query", topic}});
Logger::info(
"Extracting key points from search results...");
auto extract_prompt = "Extract the key points from this search result about " + topic + ":\n\n" + search_result.content;
auto extract_response = co_await context->chat(extract_prompt);
auto wiki_result = co_await context->executeTool("wikipedia", {{"query", topic}});
auto summarize_prompt = "Synthesize and summarize the following information about " + topic + ":\n\n";
summarize_prompt += "Key Points:\n" + extract_response.content + "\n\n";
summarize_prompt += "Wikipedia Information:\n" + wiki_result.content;
auto summary_response = co_await context->chat(summarize_prompt);
JsonObject result = {
{"topic", topic},
{"summary", summary_response.content},
{"search_results", search_result.content},
{"wiki_results", wiki_result.content}
};
co_return result;
}
Task<JsonObject> generateContentInParallel(std::shared_ptr<AgentContext> context,
const String& topic) {
String intro_prompt = "Write an introduction paragraph for an article about " + topic + ".";
String body_prompt = "Write three key points about " + topic + " with detailed explanations.";
String conclusion_prompt = "Write a conclusion paragraph for an article about " + topic + ".";
auto intro_task = context->chat(intro_prompt);
auto body_task = context->chat(body_prompt);
auto conclusion_task = context->chat(conclusion_prompt);
auto intro_response = co_await std::move(intro_task);
auto body_response = co_await std::move(body_task);
auto conclusion_response = co_await std::move(conclusion_task);
String article = intro_response.content + "\n\n" + body_response.content + "\n\n" + conclusion_response.content;
String title_prompt = "Create a catchy title for this article:\n\n" + article;
auto title_response = co_await context->chat(title_prompt);
JsonObject result = {
{"title", title_response.content},
{"introduction", intro_response.content},
{"body", body_response.content},
{"conclusion", conclusion_response.content},
{"full_article", article}
};
co_return result;
}
Task<void> streamText(std::shared_ptr<AgentContext> context,
const String& prompt) {
auto generator = context->streamChat(prompt);
std::cout << "Response: ";
while (auto item = co_await generator.next()) {
String chunk = *item;
std::cout << chunk << std::flush;
}
std::cout << std::endl;
co_return;
}
int main(int argc, char* argv[]) {
String api_key;
api_key = config.get("GEMINI_API_KEY", "");
if (api_key.empty() && argc > 1) {
api_key = argv[1];
}
if (api_key.empty()) {
Logger::error(
"1. Create a .env file with GEMINI_API_KEY=your_key, or");
Logger::error(
"2. Set the GEMINI_API_KEY environment variable, or");
Logger::error(
"3. Provide an API key as a command line argument");
return 1;
}
auto llm = createLLM("google", api_key, "gemini-1.5-flash");
llm->setOptions(options);
auto context = std::make_shared<AgentContext>();
context->setLLM(llm);
while (true) {
std::cout << "\n========== COROUTINE EXAMPLES ==========\n";
std::cout << "1. Run autonomous agent with coroutines\n";
std::cout << "2. Perform research with parallel tool use\n";
std::cout << "3. Generate content in parallel\n";
std::cout << "4. Stream text example\n";
std::cout << "5. Exit\n";
std::cout << "Enter your choice: ";
int choice;
std::cin >> choice;
std::cin.ignore();
if (choice == 5) {
break;
}
std::string topic;
if (choice >= 1 && choice <= 4) {
std::cout << "Enter a topic: ";
std::getline(std::cin, topic);
}
try {
switch (choice) {
case 1: {
agent.setSystemPrompt("You are a helpful assistant that can use tools to perform tasks.");
agent.setStepCallback(stepCallback);
agent.setOptions(agent_options);
agent.init();
auto result = blockingWait(agent.run(topic));
if (result.contains("answer")) {
Logger::info(
"\nFinal Answer: {}", result[
"answer"].get<String>());
} else {
}
break;
}
case 2: {
auto result = blockingWait(performResearchTask(context, topic));
Logger::info(
"\nResearch Summary: {}", result[
"summary"].get<String>());
break;
}
case 3: {
auto result = blockingWait(generateContentInParallel(context, topic));
Logger::info(
"\nFull Article:\n{}", result[
"full_article"].get<String>());
break;
}
case 4: {
blockingWait(streamText(context, "Write a short story about " + topic));
break;
}
default:
}
} catch (const std::exception& e) {
}
}
return 0;
}
static ConfigLoader & getInstance()
Get the singleton instance of ConfigLoader.
static void error(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at error level.
Definition logger.h:124
static void init(Level level=Level::INFO)
Initialize the logger.
static void info(fmt::format_string< Args... > fmt, Args &&... args)
Log a message at info level.
Definition logger.h:104
An agent that operates autonomously to complete a task.
Definition autonomous_agent.h:28
@ REACT
Reasoning and acting.
Definition autonomous_agent.h:75
@ INFO
Info logging level.
Definition logger.h:40
Provide a future-based fallback for Task.
Definition coroutine_utils.h:115
Framework Namespace.
Definition agent.h:18
Options for LLM API calls.
Definition llm_interface.h:25
double temperature
The temperature of the LLM.
Definition llm_interface.h:29
int max_tokens
The maximum number of tokens.
Definition llm_interface.h:33
Agent execution options.
Definition agent.h:61
bool human_feedback_enabled
Whether human feedback is enabled.
Definition agent.h:75
int max_iterations
The maximum number of iterations.
Definition agent.h:65
std::function< bool(const String &, const JsonObject &)> human_in_the_loop
The human in the loop function.
Definition agent.h:80
Step in the agent's execution.
Definition autonomous_agent.h:33
JsonObject result
The result of the step.
Definition autonomous_agent.h:45
bool success
Whether the step was successful.
Definition autonomous_agent.h:49
String description
The description of the step.
Definition autonomous_agent.h:37
String status
The status of the step.
Definition autonomous_agent.h:41