Agents 0.0.2
Edge AI Agents SDK
|
Implementation of LLMInterface for Ollama models. More...
#include <ollama_llm.h>
Public Member Functions | |
OllamaLLM (const String &model="llama3") | |
Constructor. | |
~OllamaLLM () override=default | |
Destructor. | |
std::vector< String > | getAvailableModels () override |
Get available models from Ollama. | |
void | setModel (const String &model) override |
Set the model to use. | |
String | getModel () const override |
Get current model. | |
void | setApiKey (const String &api_key) override |
Set API key (not used for Ollama, but implemented for interface compliance) | |
void | setApiBase (const String &api_base) override |
Set API base URL for Ollama server. | |
void | setOptions (const LLMOptions &options) override |
Set options for API calls. | |
LLMOptions | getOptions () const override |
Get current options. | |
LLMResponse | complete (const String &prompt) override |
Generate completion from a prompt. | |
LLMResponse | chat (const std::vector< Message > &messages) override |
Generate completion from a list of messages. | |
LLMResponse | chatWithTools (const std::vector< Message > &messages, const std::vector< std::shared_ptr< Tool > > &tools) override |
Generate completion with available tools. | |
void | streamChat (const std::vector< Message > &messages, std::function< void(const String &, bool)> callback) override |
Stream results with callback. | |
virtual LLMResponse | complete (const std::vector< Message > &messages) |
Generate completion from a list of messages. | |
virtual LLMResponse | completeWithTools (const std::vector< Message > &messages, const std::vector< JsonObject > &tools_schema) |
Generate completion with available tools. | |
virtual Task< LLMResponse > | completeAsync (const String &prompt) |
Async complete from a prompt. | |
virtual Task< LLMResponse > | completeAsync (const std::vector< Message > &messages) |
Async complete from a list of messages. | |
virtual Task< LLMResponse > | chatAsync (const std::vector< Message > &messages) |
Async chat from a list of messages. | |
virtual Task< LLMResponse > | chatWithToolsAsync (const std::vector< Message > &messages, const std::vector< std::shared_ptr< Tool > > &tools) |
Async chat with tools. | |
virtual AsyncGenerator< String > | streamChatAsync (const std::vector< Message > &messages) |
Stream chat with AsyncGenerator. | |
Implementation of LLMInterface for Ollama models.
agents::llms::OllamaLLM::OllamaLLM | ( | const String & | model = "llama3" | ) |
Constructor.
model | The model to use |
|
overridevirtual |
Generate completion from a list of messages.
messages | The messages |
Implements agents::LLMInterface.
|
virtualinherited |
Async chat from a list of messages.
messages | The messages to generate completion from |
|
overridevirtual |
Generate completion with available tools.
messages | The messages |
tools | The tools |
Implements agents::LLMInterface.
|
virtualinherited |
Async chat with tools.
messages | The messages to generate completion from |
tools | The tools to use |
|
virtualinherited |
Generate completion from a list of messages.
messages | The messages to generate completion from |
|
overridevirtual |
Generate completion from a prompt.
prompt | The prompt |
Reimplemented from agents::LLMInterface.
|
virtualinherited |
Async complete from a list of messages.
messages | The messages to generate completion from |
|
virtualinherited |
Async complete from a prompt.
prompt | The prompt to generate completion from |
|
virtualinherited |
Generate completion with available tools.
messages | The messages to generate completion from |
tools_schema | The tools schema to use |
|
overridevirtual |
|
overridevirtual |
|
overridevirtual |
|
overridevirtual |
Set API base URL for Ollama server.
api_base | The API base URL |
Implements agents::LLMInterface.
|
overridevirtual |
Set API key (not used for Ollama, but implemented for interface compliance)
api_key | The API key |
Implements agents::LLMInterface.
|
overridevirtual |
|
overridevirtual |
|
overridevirtual |
Stream results with callback.
messages | The messages |
callback | The callback |
Implements agents::LLMInterface.
|
virtualinherited |
Stream chat with AsyncGenerator.
messages | The messages to generate completion from |