12#include <agents-cpp/llm_interface.h>
95 const std::vector<Message>& messages,
96 const std::vector<std::shared_ptr<Tool>>&
tools
105 const std::vector<Message>& messages,
106 std::function<
void(
const String&,
bool)> callback
110 String api_base_ =
"http://localhost:11434/api";
119 JsonObject messagesToOllamaFormat(
const std::vector<Message>& messages);
141 String formatMessagesAsPrompt(
const std::vector<Message>& messages);
147 bool modelSupportsChatFormat()
const;
153 bool modelSupportsToolCalls()
const;
Interface for language model providers (OpenAI, Anthropic, Google, Ollama)
Definition llm_interface.h:68
void streamChat(const std::vector< Message > &messages, std::function< void(const String &, bool)> callback) override
Stream results with callback.
OllamaLLM(const String &model="llama3")
Constructor.
String getModel() const override
Get current model.
void setModel(const String &model) override
Set the model to use.
std::vector< String > getAvailableModels() override
Get available models from Ollama.
void setApiKey(const String &api_key) override
Set API key (not used for Ollama, but implemented for interface compliance)
~OllamaLLM() override=default
Destructor.
LLMOptions getOptions() const override
Get current options.
void setOptions(const LLMOptions &options) override
Set options for API calls.
LLMResponse chat(const std::vector< Message > &messages) override
Generate completion from a list of messages.
LLMResponse complete(const String &prompt) override
Generate completion from a prompt.
void setApiBase(const String &api_base) override
Set API base URL for Ollama server.
LLMResponse chatWithTools(const std::vector< Message > &messages, const std::vector< std::shared_ptr< Tool > > &tools) override
Generate completion with available tools.
Large Language Models Namespace.
Definition anthropic_llm.h:19
Framework Namespace.
Definition agent.h:18
nlohmann::json JsonObject
JSON object type.
Definition types.h:39
std::string String
String type.
Definition types.h:27
Options for LLM API calls.
Definition llm_interface.h:25
Response from an LLM.
Definition types.h:85