Agents 0.0.2
Edge AI Agents SDK
Loading...
Searching...
No Matches
ollama_llm.h
1
10#pragma once
11
12#include <agents-cpp/llm_interface.h>
13
14namespace agents {
15namespace llms {
16
20class OllamaLLM : public LLMInterface {
21public:
26 OllamaLLM(const String& model = "llama3");
30 ~OllamaLLM() override = default;
31
36 std::vector<String> getAvailableModels() override;
37
42 void setModel(const String& model) override;
43
48 String getModel() const override;
49
54 void setApiKey(const String& api_key) override;
55
60 void setApiBase(const String& api_base) override;
61
66 void setOptions(const LLMOptions& options) override;
67
72 LLMOptions getOptions() const override;
73
79 LLMResponse complete(const String& prompt) override;
80
86 LLMResponse chat(const std::vector<Message>& messages) override;
87
95 const std::vector<Message>& messages,
96 const std::vector<std::shared_ptr<Tool>>& tools
97 ) override;
98
105 const std::vector<Message>& messages,
106 std::function<void(const String&, bool)> callback
107 ) override;
108
109private:
110 String api_base_ = "http://localhost:11434/api";
111 String model_;
112 LLMOptions options_;
113
119 JsonObject messagesToOllamaFormat(const std::vector<Message>& messages);
120
126 LLMResponse parseOllamaResponse(const JsonObject& response);
127
134 JsonObject makeApiCall(const JsonObject& request_body, bool stream = false);
135
141 String formatMessagesAsPrompt(const std::vector<Message>& messages);
142
147 bool modelSupportsChatFormat() const;
148
153 bool modelSupportsToolCalls() const;
154};
155
156} // namespace llms
157} // namespace agents
Interface for language model providers (OpenAI, Anthropic, Google, Ollama)
Definition llm_interface.h:68
void streamChat(const std::vector< Message > &messages, std::function< void(const String &, bool)> callback) override
Stream results with callback.
OllamaLLM(const String &model="llama3")
Constructor.
String getModel() const override
Get current model.
void setModel(const String &model) override
Set the model to use.
std::vector< String > getAvailableModels() override
Get available models from Ollama.
void setApiKey(const String &api_key) override
Set API key (not used for Ollama, but implemented for interface compliance)
~OllamaLLM() override=default
Destructor.
LLMOptions getOptions() const override
Get current options.
void setOptions(const LLMOptions &options) override
Set options for API calls.
LLMResponse chat(const std::vector< Message > &messages) override
Generate completion from a list of messages.
LLMResponse complete(const String &prompt) override
Generate completion from a prompt.
void setApiBase(const String &api_base) override
Set API base URL for Ollama server.
LLMResponse chatWithTools(const std::vector< Message > &messages, const std::vector< std::shared_ptr< Tool > > &tools) override
Generate completion with available tools.
Large Language Models Namespace.
Definition anthropic_llm.h:19
Tools Namespace.
Definition file_tool.h:15
Framework Namespace.
Definition agent.h:18
nlohmann::json JsonObject
JSON object type.
Definition types.h:39
std::string String
String type.
Definition types.h:27
Options for LLM API calls.
Definition llm_interface.h:25
Response from an LLM.
Definition types.h:85