From 0bcc029997f5226f22d9ac8b22bace4a537aab24 Mon Sep 17 00:00:00 2001 From: Viswanath129 Date: Thu, 2 Apr 2026 21:07:59 +0530 Subject: [PATCH 1/2] Add Nexa local LLM support --- .env.example | 6 ++++++ backend/app/config.py | 10 +++++++--- backend/app/utils/llm_client.py | 2 ++ docker-compose.nexa.yml | 20 ++++++++++++++++++++ 4 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 docker-compose.nexa.yml diff --git a/.env.example b/.env.example index 393571b..f7c02bf 100644 --- a/.env.example +++ b/.env.example @@ -14,6 +14,12 @@ LLM_MODEL_NAME=claude-sonnet-4-20250514 # LLM_BASE_URL=https://api.openai.com/v1 # LLM_MODEL_NAME=gpt-4o-mini +# For local Nexa (OpenAI-compatible): +# LLM_PROVIDER=nexa +# LLM_BASE_URL=http://127.0.0.1:11434/v1 +# LLM_MODEL_NAME=NexaAI/Llama3.2-3B-NPU-Turbo +# NEXA_API_KEY=nexa + # For OpenAI-compatible providers (e.g., OpenRouter): # LLM_API_KEY=sk-or-your-key # LLM_BASE_URL=https://openrouter.ai/api/v1 diff --git a/backend/app/config.py b/backend/app/config.py index fe02836..38b06f8 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -63,6 +63,9 @@ def _get_llm_api_key() -> str: return explicit provider = (os.environ.get('LLM_PROVIDER', '') or '').strip().lower() + if provider == 'nexa': + # Nexa local server does not require a real key, but OpenAI SDK needs a string + return os.environ.get('NEXA_API_KEY', '') or 'nexa' if provider == 'anthropic': return os.environ.get('ANTHROPIC_API_KEY', '') @@ -89,7 +92,8 @@ class Config: LLM_API_KEY = _get_llm_api_key() LLM_BASE_URL = _get_env_or_default('LLM_BASE_URL', 'https://api.openai.com/v1') LLM_MODEL_NAME = _get_env_or_default('LLM_MODEL_NAME', 'gpt-4o-mini') - LLM_PROVIDER = os.environ.get('LLM_PROVIDER', '') # 'openai', 'anthropic', 'claude-cli', 'codex-cli' + # Providers: openai | anthropic | claude-cli | codex-cli | nexa (OpenAI-compatible local server) + LLM_PROVIDER = os.environ.get('LLM_PROVIDER', '') # Graph storage config GRAPH_BACKEND = os.environ.get("GRAPH_BACKEND", "kuzu").lower() @@ -129,8 +133,8 @@ class Config: def validate(cls): """Validate required configuration.""" errors = [] - if cls.LLM_PROVIDER not in ("claude-cli", "codex-cli") and not cls.LLM_API_KEY: - errors.append("LLM_API_KEY not configured (set LLM_PROVIDER=claude-cli or codex-cli to use CLI instead)") + if cls.LLM_PROVIDER not in ("claude-cli", "codex-cli", "nexa") and not cls.LLM_API_KEY: + errors.append("LLM_API_KEY not configured (set LLM_PROVIDER=claude-cli/codex-cli, or use nexa/local provider without a key)") if cls.GRAPH_BACKEND not in {"kuzu", "json"}: errors.append("GRAPH_BACKEND must be either 'kuzu' or 'json'") return errors diff --git a/backend/app/utils/llm_client.py b/backend/app/utils/llm_client.py index 710ae88..7d30116 100644 --- a/backend/app/utils/llm_client.py +++ b/backend/app/utils/llm_client.py @@ -63,6 +63,8 @@ def _detect_provider(self) -> str: model_lower = (self.model or "").lower() base_lower = (self.base_url or "").lower() + if "nexa" in model_lower or "nexa" in base_lower or "11434" in base_lower: + return "nexa" if any(k in model_lower for k in ["claude", "anthropic"]): return "anthropic" if "anthropic" in base_lower: diff --git a/docker-compose.nexa.yml b/docker-compose.nexa.yml new file mode 100644 index 0000000..21c960e --- /dev/null +++ b/docker-compose.nexa.yml @@ -0,0 +1,20 @@ +services: + mirofish: + depends_on: {} + networks: + - default + environment: + LLM_PROVIDER: openai + LLM_BASE_URL: http://host.docker.internal:11434/v1 + LLM_API_KEY: nexa + LLM_MODEL_NAME: NexaAI/Llama3.2-3B-NPU-Turbo + ports: + - "5001:5001" + volumes: + - ./backend/uploads:/app/backend/uploads + - ./backend/data:/app/backend/data + labels: {} + +networks: + default: + name: mirofish-nexa From 8650c2f351aa5645158f50e45cd71c65d529f31e Mon Sep 17 00:00:00 2001 From: Viswanath129 Date: Thu, 2 Apr 2026 21:16:27 +0530 Subject: [PATCH 2/2] modified by viswa --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 2662574..e4af43f 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@ A swarm intelligence prediction engine. Upload documents describing any scenario **Live:** [synth.scty.org](https://synth.scty.org) +Modified by Viswa. + > Fork of [666ghj/MiroFish](https://github.com/666ghj/MiroFish) — fully translated to English, local graph storage with embedded KuzuDB by default, Claude/Codex CLI support added. ## What it does