From 69d898e7eac40f84ba2fc33501636b919dce2336 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Krzysztof=20Hasi=C5=84ski?= Date: Thu, 17 Apr 2025 08:30:50 +0900 Subject: [PATCH] Add Ollama support --- README.md | 4 ++++ .../generation_provider/ollama_provider.rb | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 lib/active_agent/generation_provider/ollama_provider.rb diff --git a/README.md b/README.md index d49966be..e66cb0b1 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,10 @@ development: api_key: <%= Rails.application.credentials.dig(:openai, :api_key) %> model: "gpt-3.5-turbo" temperature: 0.7 + ollama: + service: "Local Ollama" + model: "llama3.2" + temperature: 0.7 production: openai: diff --git a/lib/active_agent/generation_provider/ollama_provider.rb b/lib/active_agent/generation_provider/ollama_provider.rb new file mode 100644 index 00000000..5f7b3a37 --- /dev/null +++ b/lib/active_agent/generation_provider/ollama_provider.rb @@ -0,0 +1,18 @@ +# lib/active_agent/generation_provider/ollama_provider.rb + +require "openai" +require_relative "open_ai_provider" + +module ActiveAgent + module GenerationProvider + class OllamaProvider < OpenAIProvider + def initialize(config) + @config = config + @api_key = config["api_key"] + @model_name = config["model"] + @host = config["host"] || "http://localhost:11434" + @client = OpenAI::Client.new(uri_base: @host, access_token: @api_key, log_errors: true) + end + end + end +end