From 064b9221a90138f218198db3f4b4446ab4d28fb2 Mon Sep 17 00:00:00 2001 From: Mars Date: Mon, 23 Feb 2026 23:32:13 -0500 Subject: [PATCH 1/2] add kilo gateway provider --- README.md | 4 +- docs/content/docs/(configuration)/config.mdx | 5 ++- docs/content/docs/(deployment)/roadmap.mdx | 2 +- .../docs/(getting-started)/quickstart.mdx | 3 +- interface/src/api/client.ts | 1 + interface/src/components/ModelSelect.tsx | 2 + interface/src/lib/providerIcons.tsx | 21 +++++++++ interface/src/routes/Settings.tsx | 8 ++++ src/api/models.rs | 4 ++ src/api/providers.rs | 14 ++++++ src/config.rs | 45 ++++++++++++++++++- src/llm/model.rs | 35 ++++++++++----- src/llm/providers.rs | 4 ++ src/llm/routing.rs | 17 +++++++ 14 files changed, 148 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 0387d04ec..c35b4b027 100644 --- a/README.md +++ b/README.md @@ -192,7 +192,7 @@ api_key = "env:MY_PROVIDER_KEY" channel = "my-provider/my-model" ``` -Additional built-in providers include **NVIDIA**, **MiniMax**, **Moonshot AI (Kimi)**, and **Z.AI Coding Plan** — configure with `nvidia_key`, `minimax_key`, `moonshot_key`, or `zai_coding_plan_key` in `[llm]`. +Additional built-in providers include **Kilo Gateway**, **NVIDIA**, **MiniMax**, **Moonshot AI (Kimi)**, and **Z.AI Coding Plan** — configure with `kilo_key`, `nvidia_key`, `minimax_key`, `moonshot_key`, or `zai_coding_plan_key` in `[llm]`. ### Skills @@ -381,7 +381,7 @@ Read the full vision in the [roadmap](docs/content/docs/(deployment)/roadmap.mdx ### Prerequisites - **Rust** 1.85+ ([rustup](https://rustup.rs/)) -- An LLM API key from any supported provider (Anthropic, OpenAI, OpenRouter, Z.ai, Groq, Together, Fireworks, DeepSeek, xAI, Mistral, NVIDIA, MiniMax, Moonshot AI, OpenCode Zen) — or use `spacebot auth login` for Anthropic OAuth +- An LLM API key from any supported provider (Anthropic, OpenAI, OpenRouter, Kilo Gateway, Z.ai, Groq, Together, Fireworks, DeepSeek, xAI, Mistral, NVIDIA, MiniMax, Moonshot AI, OpenCode Zen) — or use `spacebot auth login` for Anthropic OAuth ### Build and Run diff --git a/docs/content/docs/(configuration)/config.mdx b/docs/content/docs/(configuration)/config.mdx index acba06dfc..854ad7b0e 100644 --- a/docs/content/docs/(configuration)/config.mdx +++ b/docs/content/docs/(configuration)/config.mdx @@ -24,6 +24,7 @@ spacebot --config /path/to.toml # CLI override anthropic_key = "env:ANTHROPIC_API_KEY" openai_key = "env:OPENAI_API_KEY" openrouter_key = "env:OPENROUTER_API_KEY" +kilo_key = "env:KILO_API_KEY" zhipu_key = "env:ZHIPU_API_KEY" groq_key = "env:GROQ_API_KEY" together_key = "env:TOGETHER_API_KEY" @@ -168,7 +169,7 @@ anthropic_key = "env:ANTHROPIC_API_KEY" This reads `ANTHROPIC_API_KEY` from the environment at startup. If the variable is unset, the value is treated as missing. -LLM keys also have implicit env fallbacks — if no key is set in the TOML, Spacebot checks `ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, and `OPENROUTER_API_KEY` automatically. +LLM keys also have implicit env fallbacks — if no key is set in the TOML, Spacebot checks `ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, `OPENROUTER_API_KEY`, and `KILO_API_KEY` automatically. ## Env-Only Mode @@ -203,6 +204,7 @@ Model names include the provider as a prefix: | Anthropic | `anthropic/` | `anthropic/claude-sonnet-4-20250514` | | OpenAI | `openai/` | `openai/gpt-4o` | | OpenRouter | `openrouter//` | `openrouter/anthropic/claude-sonnet-4-20250514` | +| Kilo Gateway | `kilo//` | `kilo/anthropic/claude-sonnet-4.5` | | Custom provider | `/` | `my_openai/gpt-4o-mini` | You can mix providers across process types. See [Routing](/docs/routing) for the full routing system. @@ -324,6 +326,7 @@ If you define a custom provider with the same ID as a legacy key, your custom co | `anthropic_key` | string | None | Anthropic API key (or `env:VAR_NAME`) | | `openai_key` | string | None | OpenAI API key (or `env:VAR_NAME`) | | `openrouter_key` | string | None | OpenRouter API key (or `env:VAR_NAME`) | +| `kilo_key` | string | None | Kilo Gateway API key (or `env:VAR_NAME`) | | `zhipu_key` | string | None | Zhipu AI (GLM) API key (or `env:VAR_NAME`) | | `groq_key` | string | None | Groq API key (or `env:VAR_NAME`) | | `together_key` | string | None | Together AI API key (or `env:VAR_NAME`) | diff --git a/docs/content/docs/(deployment)/roadmap.mdx b/docs/content/docs/(deployment)/roadmap.mdx index 6711bdd2d..d087ef54d 100644 --- a/docs/content/docs/(deployment)/roadmap.mdx +++ b/docs/content/docs/(deployment)/roadmap.mdx @@ -18,7 +18,7 @@ The full message-in → LLM → response-out pipeline is wired end-to-end across - **Config** — hierarchical TOML with `Config`, `AgentConfig`, `ResolvedAgentConfig`, `Binding`, `MessagingConfig`. File watcher with event filtering and content hash debounce for hot-reload. - **Multi-agent** — per-agent database isolation, `Agent` struct bundles all dependencies - **Database connections** — SQLite + LanceDB + redb per-agent, migrations for all tables -- **LLM** — `SpacebotModel` implements Rig's `CompletionModel`, routes through `LlmManager` via HTTP with retries and fallback chains across 11 providers (Anthropic, OpenAI, OpenRouter, Z.ai, Groq, Together, Fireworks, DeepSeek, xAI, Mistral, OpenCode Zen) +- **LLM** — `SpacebotModel` implements Rig's `CompletionModel`, routes through `LlmManager` via HTTP with retries and fallback chains across 12 providers (Anthropic, OpenAI, OpenRouter, Kilo Gateway, Z.ai, Groq, Together, Fireworks, DeepSeek, xAI, Mistral, OpenCode Zen) - **Model routing** — `RoutingConfig` with process-type defaults, task overrides, fallback chains - **Memory** — full stack: types, SQLite store (CRUD + graph), LanceDB (embeddings + vector + FTS), fastembed, hybrid search (RRF fusion). `memory_type` filter wired end-to-end through SearchConfig. `total_cmp` for safe sorting. - **Memory maintenance** — decay + prune implemented diff --git a/docs/content/docs/(getting-started)/quickstart.mdx b/docs/content/docs/(getting-started)/quickstart.mdx index 0cfb813ce..f775832a4 100644 --- a/docs/content/docs/(getting-started)/quickstart.mdx +++ b/docs/content/docs/(getting-started)/quickstart.mdx @@ -36,7 +36,7 @@ See [Docker deployment](/docs/docker) for image variants, compose files, and con - **Rust 1.85+** — `rustup update stable` - **Bun** (optional, for the web UI) — `curl -fsSL https://bun.sh/install | bash` -- **An LLM API key** — Anthropic, OpenAI, or OpenRouter +- **An LLM API key** — Anthropic, OpenAI, OpenRouter, or Kilo Gateway ### Install @@ -77,6 +77,7 @@ Create `~/.spacebot/config.toml`: [llm] anthropic_key = "sk-ant-..." # or: openrouter_key = "sk-or-..." +# or: kilo_key = "sk-..." # or: openai_key = "sk-..." # Keys also support env references: anthropic_key = "env:ANTHROPIC_API_KEY" diff --git a/interface/src/api/client.ts b/interface/src/api/client.ts index a2e020545..e743b9043 100644 --- a/interface/src/api/client.ts +++ b/interface/src/api/client.ts @@ -663,6 +663,7 @@ export interface ProviderStatus { openai: boolean; openai_chatgpt: boolean; openrouter: boolean; + kilo: boolean; zhipu: boolean; groq: boolean; together: boolean; diff --git a/interface/src/components/ModelSelect.tsx b/interface/src/components/ModelSelect.tsx index 8abab4391..96f0c00c2 100644 --- a/interface/src/components/ModelSelect.tsx +++ b/interface/src/components/ModelSelect.tsx @@ -15,6 +15,7 @@ interface ModelSelectProps { const PROVIDER_LABELS: Record = { anthropic: "Anthropic", openrouter: "OpenRouter", + kilo: "Kilo Gateway", openai: "OpenAI", "openai-chatgpt": "ChatGPT Plus (OAuth)", deepseek: "DeepSeek", @@ -128,6 +129,7 @@ export function ModelSelect({ const providerOrder = [ "openrouter", + "kilo", "anthropic", "openai", "openai-chatgpt", diff --git a/interface/src/lib/providerIcons.tsx b/interface/src/lib/providerIcons.tsx index 29487961a..1a47725f5 100644 --- a/interface/src/lib/providerIcons.tsx +++ b/interface/src/lib/providerIcons.tsx @@ -90,6 +90,26 @@ function OllamaIcon({ size = 24, className }: IconProps) { ); } +function KiloIcon({ size = 24, className }: IconProps) { + return ( + + ); +} + export function ProviderIcon({ provider, className = "text-ink-faint", size = 24 }: ProviderIconProps) { const iconProps: Partial = { size, @@ -101,6 +121,7 @@ export function ProviderIcon({ provider, className = "text-ink-faint", size = 24 openai: OpenAI, "openai-chatgpt": OpenAI, openrouter: OpenRouter, + kilo: KiloIcon, groq: Groq, mistral: Mistral, gemini: Google, diff --git a/interface/src/routes/Settings.tsx b/interface/src/routes/Settings.tsx index fba2026e9..a216b9122 100644 --- a/interface/src/routes/Settings.tsx +++ b/interface/src/routes/Settings.tsx @@ -72,6 +72,14 @@ const PROVIDERS = [ envVar: "OPENROUTER_API_KEY", defaultModel: "openrouter/anthropic/claude-sonnet-4", }, + { + id: "kilo", + name: "Kilo Gateway", + description: "OpenAI-compatible multi-provider gateway", + placeholder: "sk-...", + envVar: "KILO_API_KEY", + defaultModel: "kilo/anthropic/claude-sonnet-4.5", + }, { id: "opencode-zen", name: "OpenCode Zen", diff --git a/src/api/models.rs b/src/api/models.rs index aa26da052..4b0244e06 100644 --- a/src/api/models.rs +++ b/src/api/models.rs @@ -105,6 +105,7 @@ fn direct_provider_mapping(models_dev_id: &str) -> Option<&'static str> { match models_dev_id { "anthropic" => Some("anthropic"), "openai" => Some("openai"), + "kilo" => Some("kilo"), "deepseek" => Some("deepseek"), "xai" => Some("xai"), "mistral" => Some("mistral"), @@ -406,6 +407,9 @@ pub(super) async fn configured_providers(config_path: &std::path::Path) -> Vec<& if has_key("openrouter_key", "OPENROUTER_API_KEY") { providers.push("openrouter"); } + if has_key("kilo_key", "KILO_API_KEY") { + providers.push("kilo"); + } if has_key("zhipu_key", "ZHIPU_API_KEY") { providers.push("zhipu"); } diff --git a/src/api/providers.rs b/src/api/providers.rs index 8fb5e54d6..4b905f7ce 100644 --- a/src/api/providers.rs +++ b/src/api/providers.rs @@ -41,6 +41,7 @@ pub(super) struct ProviderStatus { openai: bool, openai_chatgpt: bool, openrouter: bool, + kilo: bool, zhipu: bool, groq: bool, together: bool, @@ -132,6 +133,7 @@ fn provider_toml_key(provider: &str) -> Option<&'static str> { "anthropic" => Some("anthropic_key"), "openai" => Some("openai_key"), "openrouter" => Some("openrouter_key"), + "kilo" => Some("kilo_key"), "zhipu" => Some("zhipu_key"), "groq" => Some("groq_key"), "together" => Some("together_key"), @@ -192,6 +194,12 @@ fn build_test_llm_config(provider: &str, credential: &str) -> crate::config::Llm api_key: credential.to_string(), name: None, }), + "kilo" => Some(ProviderConfig { + api_type: ApiType::OpenAiCompletions, + base_url: "https://api.kilo.ai/api/gateway".to_string(), + api_key: credential.to_string(), + name: None, + }), "zhipu" => Some(ProviderConfig { api_type: ApiType::OpenAiCompletions, base_url: "https://api.z.ai/api/paas/v4".to_string(), @@ -287,6 +295,7 @@ fn build_test_llm_config(provider: &str, credential: &str) -> crate::config::Llm anthropic_key: (provider == "anthropic").then(|| credential.to_string()), openai_key: (provider == "openai").then(|| credential.to_string()), openrouter_key: (provider == "openrouter").then(|| credential.to_string()), + kilo_key: (provider == "kilo").then(|| credential.to_string()), zhipu_key: (provider == "zhipu").then(|| credential.to_string()), groq_key: (provider == "groq").then(|| credential.to_string()), together_key: (provider == "together").then(|| credential.to_string()), @@ -490,6 +499,7 @@ pub(super) async fn get_providers( openai, openai_chatgpt, openrouter, + kilo, zhipu, groq, together, @@ -531,6 +541,7 @@ pub(super) async fn get_providers( has_value("openai_key", "OPENAI_API_KEY"), openai_oauth_configured, has_value("openrouter_key", "OPENROUTER_API_KEY"), + has_value("kilo_key", "KILO_API_KEY"), has_value("zhipu_key", "ZHIPU_API_KEY"), has_value("groq_key", "GROQ_API_KEY"), has_value("together_key", "TOGETHER_API_KEY"), @@ -554,6 +565,7 @@ pub(super) async fn get_providers( std::env::var("OPENAI_API_KEY").is_ok(), openai_oauth_configured, std::env::var("OPENROUTER_API_KEY").is_ok(), + std::env::var("KILO_API_KEY").is_ok(), std::env::var("ZHIPU_API_KEY").is_ok(), std::env::var("GROQ_API_KEY").is_ok(), std::env::var("TOGETHER_API_KEY").is_ok(), @@ -577,6 +589,7 @@ pub(super) async fn get_providers( openai, openai_chatgpt, openrouter, + kilo, zhipu, groq, together, @@ -597,6 +610,7 @@ pub(super) async fn get_providers( || providers.openai || providers.openai_chatgpt || providers.openrouter + || providers.kilo || providers.zhipu || providers.groq || providers.together diff --git a/src/config.rs b/src/config.rs index 3d3f79349..d988255a2 100644 --- a/src/config.rs +++ b/src/config.rs @@ -155,6 +155,7 @@ pub struct LlmConfig { pub anthropic_key: Option, pub openai_key: Option, pub openrouter_key: Option, + pub kilo_key: Option, pub zhipu_key: Option, pub groq_key: Option, pub together_key: Option, @@ -189,6 +190,7 @@ impl std::fmt::Debug for LlmConfig { "openrouter_key", &self.openrouter_key.as_ref().map(|_| "[REDACTED]"), ) + .field("kilo_key", &self.kilo_key.as_ref().map(|_| "[REDACTED]")) .field("zhipu_key", &self.zhipu_key.as_ref().map(|_| "[REDACTED]")) .field("groq_key", &self.groq_key.as_ref().map(|_| "[REDACTED]")) .field( @@ -248,6 +250,7 @@ impl LlmConfig { self.anthropic_key.is_some() || self.openai_key.is_some() || self.openrouter_key.is_some() + || self.kilo_key.is_some() || self.zhipu_key.is_some() || self.groq_key.is_some() || self.together_key.is_some() @@ -271,6 +274,7 @@ impl LlmConfig { const ANTHROPIC_PROVIDER_BASE_URL: &str = "https://api.anthropic.com"; const OPENAI_PROVIDER_BASE_URL: &str = "https://api.openai.com"; const OPENROUTER_PROVIDER_BASE_URL: &str = "https://openrouter.ai/api"; +const KILO_PROVIDER_BASE_URL: &str = "https://api.kilo.ai/api/gateway"; const OPENCODE_ZEN_PROVIDER_BASE_URL: &str = "https://opencode.ai/zen"; const MINIMAX_PROVIDER_BASE_URL: &str = "https://api.minimax.io/anthropic"; const MINIMAX_CN_PROVIDER_BASE_URL: &str = "https://api.minimaxi.com/anthropic"; @@ -1506,6 +1510,7 @@ struct TomlLlmConfigFields { anthropic_key: Option, openai_key: Option, openrouter_key: Option, + kilo_key: Option, zhipu_key: Option, groq_key: Option, together_key: Option, @@ -1534,6 +1539,7 @@ struct TomlLlmConfig { anthropic_key: Option, openai_key: Option, openrouter_key: Option, + kilo_key: Option, zhipu_key: Option, groq_key: Option, together_key: Option, @@ -1587,6 +1593,7 @@ impl<'de> Deserialize<'de> for TomlLlmConfig { anthropic_key: fields.anthropic_key, openai_key: fields.openai_key, openrouter_key: fields.openrouter_key, + kilo_key: fields.kilo_key, zhipu_key: fields.zhipu_key, groq_key: fields.groq_key, together_key: fields.together_key, @@ -2109,6 +2116,7 @@ impl Config { let has_legacy_keys = std::env::var("ANTHROPIC_API_KEY").is_ok() || std::env::var("OPENAI_API_KEY").is_ok() || std::env::var("OPENROUTER_API_KEY").is_ok() + || std::env::var("KILO_API_KEY").is_ok() || std::env::var("ZHIPU_API_KEY").is_ok() || std::env::var("GROQ_API_KEY").is_ok() || std::env::var("TOGETHER_API_KEY").is_ok() @@ -2141,6 +2149,7 @@ impl Config { || std::env::var("ANTHROPIC_OAUTH_TOKEN").is_ok() || std::env::var("OPENAI_API_KEY").is_ok() || std::env::var("OPENROUTER_API_KEY").is_ok() + || std::env::var("KILO_API_KEY").is_ok() || std::env::var("OPENCODE_ZEN_API_KEY").is_ok(); !has_provider_env_vars && !has_legacy_bootstrap_vars @@ -2182,6 +2191,7 @@ impl Config { .or_else(|| std::env::var("ANTHROPIC_AUTH_TOKEN").ok()), openai_key: std::env::var("OPENAI_API_KEY").ok(), openrouter_key: std::env::var("OPENROUTER_API_KEY").ok(), + kilo_key: std::env::var("KILO_API_KEY").ok(), zhipu_key: std::env::var("ZHIPU_API_KEY").ok(), groq_key: std::env::var("GROQ_API_KEY").ok(), together_key: std::env::var("TOGETHER_API_KEY").ok(), @@ -2237,6 +2247,17 @@ impl Config { }); } + if let Some(kilo_key) = llm.kilo_key.clone() { + llm.providers + .entry("kilo".to_string()) + .or_insert_with(|| ProviderConfig { + api_type: ApiType::OpenAiCompletions, + base_url: KILO_PROVIDER_BASE_URL.to_string(), + api_key: kilo_key, + name: None, + }); + } + if let Some(zhipu_key) = llm.zhipu_key.clone() { llm.providers .entry("zhipu".to_string()) @@ -2533,6 +2554,12 @@ impl Config { .as_deref() .and_then(resolve_env_value) .or_else(|| std::env::var("OPENROUTER_API_KEY").ok()), + kilo_key: toml + .llm + .kilo_key + .as_deref() + .and_then(resolve_env_value) + .or_else(|| std::env::var("KILO_API_KEY").ok()), zhipu_key: toml .llm .zhipu_key @@ -2685,6 +2712,17 @@ impl Config { }); } + if let Some(kilo_key) = llm.kilo_key.clone() { + llm.providers + .entry("kilo".to_string()) + .or_insert_with(|| ProviderConfig { + api_type: ApiType::OpenAiCompletions, + base_url: KILO_PROVIDER_BASE_URL.to_string(), + api_key: kilo_key, + name: None, + }); + } + if let Some(zhipu_key) = llm.zhipu_key.clone() { llm.providers .entry("zhipu".to_string()) @@ -4012,6 +4050,7 @@ pub fn run_onboarding() -> anyhow::Result> { "MiniMax", "Moonshot AI (Kimi)", "Z.AI Coding Plan", + "Kilo Gateway", ]; let provider_idx = Select::new() .with_prompt("Which LLM provider do you want to use?") @@ -4075,6 +4114,7 @@ pub fn run_onboarding() -> anyhow::Result> { "zai_coding_plan_key", "zai-coding-plan", ), + 16 => ("Kilo Gateway API key", "kilo_key", "kilo"), _ => unreachable!(), }; let is_secret = provider_id != "ollama"; @@ -4287,7 +4327,7 @@ mod tests { impl EnvGuard { fn new() -> Self { - const KEYS: [&str; 22] = [ + const KEYS: [&str; 23] = [ "SPACEBOT_DIR", "SPACEBOT_DEPLOYMENT", "SPACEBOT_CRON_TIMEZONE", @@ -4295,6 +4335,7 @@ mod tests { "ANTHROPIC_OAUTH_TOKEN", "OPENAI_API_KEY", "OPENROUTER_API_KEY", + "KILO_API_KEY", "ZHIPU_API_KEY", "GROQ_API_KEY", "TOGETHER_API_KEY", @@ -5111,6 +5152,7 @@ startup_delay_secs = 2 ("anthropic_key", "test-key", "anthropic", "anthropic.com"), ("openai_key", "test-key", "openai", "openai.com"), ("openrouter_key", "test-key", "openrouter", "openrouter.ai"), + ("kilo_key", "test-key", "kilo", "api.kilo.ai"), ("deepseek_key", "test-key", "deepseek", "deepseek.com"), ("minimax_key", "test-key", "minimax", "minimax.io"), ("minimax_cn_key", "test-key", "minimax-cn", "minimaxi.com"), @@ -5174,6 +5216,7 @@ startup_delay_secs = 2 "openrouter", "openrouter.ai", ), + ("KILO_API_KEY", "test-key", "kilo", "api.kilo.ai"), ("DEEPSEEK_API_KEY", "test-key", "deepseek", "deepseek.com"), ("MINIMAX_API_KEY", "test-key", "minimax", "minimax.io"), ("NVIDIA_API_KEY", "test-key", "nvidia", "nvidia.com"), diff --git a/src/llm/model.rs b/src/llm/model.rs index c2a166dd0..9819492e5 100644 --- a/src/llm/model.rs +++ b/src/llm/model.rs @@ -111,11 +111,20 @@ impl SpacebotModel { .map_err(|e| CompletionError::ProviderError(e.to_string()))?, }; - if provider_id == "zai-coding-plan" || provider_id == "zhipu" { - let display_name = if provider_id == "zhipu" { - "Z.AI (GLM)" + if provider_id == "zai-coding-plan" || provider_id == "zhipu" || provider_id == "kilo" { + let (display_name, extra_headers): (&str, &[(&str, &str)]) = if provider_id == "zhipu" + { + ("Z.AI (GLM)", &[]) + } else if provider_id == "zai-coding-plan" { + ("Z.AI Coding Plan", &[]) } else { - "Z.AI Coding Plan" + ( + "Kilo Gateway", + &[ + ("HTTP-Referer", "https://github.com/spacedriveapp/spacebot"), + ("X-Title", "spacebot"), + ], + ) }; let endpoint = format!( "{}/chat/completions", @@ -127,6 +136,7 @@ impl SpacebotModel { display_name, &endpoint, Some(provider_config.api_key.clone()), + extra_headers, ) .await; } @@ -826,6 +836,7 @@ impl SpacebotModel { provider_display_name: &str, endpoint: &str, api_key: Option, + extra_headers: &[(&str, &str)], ) -> Result, CompletionError> { let mut messages = Vec::new(); @@ -869,15 +880,17 @@ impl SpacebotModel { body["tools"] = serde_json::json!(tools); } - let response = self.llm_manager.http_client().post(endpoint); + let mut request_builder = self.llm_manager.http_client().post(endpoint); - let response = if let Some(api_key) = api_key { - response.header("authorization", format!("Bearer {api_key}")) - } else { - response - }; + if let Some(api_key) = api_key { + request_builder = request_builder.header("authorization", format!("Bearer {api_key}")); + } - let response = response + for (header_name, header_value) in extra_headers { + request_builder = request_builder.header(*header_name, *header_value); + } + + let response = request_builder .header("content-type", "application/json") .json(&body) .send() diff --git a/src/llm/providers.rs b/src/llm/providers.rs index 6153e6682..c4b4ffc82 100644 --- a/src/llm/providers.rs +++ b/src/llm/providers.rs @@ -17,6 +17,10 @@ pub async fn init_providers(config: &LlmConfig) -> Result<()> { tracing::info!("OpenAI provider configured"); } + if config.kilo_key.is_some() { + tracing::info!("Kilo Gateway provider configured"); + } + if config.ollama_base_url.is_some() || config.ollama_key.is_some() { tracing::info!("Ollama provider configured"); } diff --git a/src/llm/routing.rs b/src/llm/routing.rs index 0549692e6..7d5be45da 100644 --- a/src/llm/routing.rs +++ b/src/llm/routing.rs @@ -174,6 +174,22 @@ pub fn defaults_for_provider(provider: &str) -> RoutingConfig { ..RoutingConfig::default() } } + "kilo" => { + let channel: String = "kilo/anthropic/claude-sonnet-4.5".into(); + let worker: String = "kilo/anthropic/claude-haiku-4.5".into(); + RoutingConfig { + channel: channel.clone(), + branch: channel.clone(), + worker: worker.clone(), + compactor: worker.clone(), + cortex: worker, + voice: String::new(), + task_overrides: HashMap::from([("coding".into(), channel)]), + fallbacks: HashMap::new(), + rate_limit_cooldown_secs: 60, + ..RoutingConfig::default() + } + } "openai" => { let channel: String = "openai/gpt-4.1".into(); let worker: String = "openai/gpt-4.1-mini".into(); @@ -367,6 +383,7 @@ pub fn defaults_for_provider(provider: &str) -> RoutingConfig { pub fn provider_to_prefix(provider: &str) -> &str { match provider { "openrouter" => "openrouter/", + "kilo" => "kilo/", "openai" => "openai/", "openai-chatgpt" => "openai-chatgpt/", "anthropic" => "anthropic/", From 64066869ac2ed0c5c6c6a57db842f047ca0c363b Mon Sep 17 00:00:00 2001 From: Mars Date: Tue, 24 Feb 2026 00:08:27 -0500 Subject: [PATCH 2/2] fix(kilo): use explicit API types and clamp settings overscroll --- README.md | 2 +- docs/content/docs/(configuration)/config.mdx | 8 ++- interface/src/routes/Settings.tsx | 8 +-- src/api/providers.rs | 12 ++-- src/config.rs | 68 ++++++++++++++------ src/llm/model.rs | 63 ++++++++++-------- 6 files changed, 98 insertions(+), 63 deletions(-) diff --git a/README.md b/README.md index c35b4b027..ba59680ad 100644 --- a/README.md +++ b/README.md @@ -184,7 +184,7 @@ coding = "ollama/qwen3" ```toml [llm.provider.my-provider] -api_type = "openai_completions" # or "anthropic" +api_type = "openai_completions" # or "openai_chat_completions", "openai_responses", "anthropic" base_url = "https://my-llm-host.example.com" api_key = "env:MY_PROVIDER_KEY" diff --git a/docs/content/docs/(configuration)/config.mdx b/docs/content/docs/(configuration)/config.mdx index 854ad7b0e..e2a2d6f64 100644 --- a/docs/content/docs/(configuration)/config.mdx +++ b/docs/content/docs/(configuration)/config.mdx @@ -342,7 +342,7 @@ Custom providers allow configuring LLM providers with custom endpoints and API t ```toml [llm.provider.] -api_type = "anthropic" # Required - one of: anthropic, openai_completions, openai_responses +api_type = "anthropic" # Required - see supported values below base_url = "https://api..." # Required - valid URL api_key = "env:API_KEY" # Required - API key (supports env:VAR_NAME format) name = "My Provider" # Optional - friendly name for display @@ -350,16 +350,18 @@ name = "My Provider" # Optional - friendly name for display | Field | Type | Required | Description | |-------|------|----------|-------------| -| `api_type` | string | Yes | API protocol type. One of: `anthropic` (Anthropic Messages API), `openai_completions` (OpenAI Chat Completions-compatible API), or `openai_responses` (OpenAI Responses API-compatible) | +| `api_type` | string | Yes | API protocol type. One of: `anthropic`, `openai_completions`, `openai_chat_completions`, `openai_responses`, `gemini`, or `kilo_gateway` | | `base_url` | string | Yes | Base URL of the API endpoint. Must be a valid URL (including protocol) | | `api_key` | string | Yes | API key for authentication. Supports `env:VAR_NAME` syntax to reference environment variables | | `name` | string | No | Optional friendly name for the provider (displayed in logs and UI) | > Note: -> - For `openai_completions` and `openai_responses`, configure `base_url` as the provider root URL (usually without a trailing `/v1`). +> - For `openai_completions`, `openai_chat_completions`, and `openai_responses`, configure `base_url` as the provider root URL (usually without a trailing `/v1`). > - Spacebot appends the endpoint path automatically: > - `openai_completions` -> `/v1/chat/completions` +> - `openai_chat_completions` -> `/chat/completions` > - `openai_responses` -> `/v1/responses` +> - `kilo_gateway` -> `/chat/completions` plus Kilo-required `HTTP-Referer` / `X-Title` headers > - If you include `/v1` in `base_url`, requests can end up with duplicated paths such as `/v1/v1/...`. **Provider ID Requirements:** diff --git a/interface/src/routes/Settings.tsx b/interface/src/routes/Settings.tsx index a216b9122..28cd34e94 100644 --- a/interface/src/routes/Settings.tsx +++ b/interface/src/routes/Settings.tsx @@ -453,9 +453,9 @@ export function Settings() { }; return ( -
+
{/* Sidebar */} -
+
Settings @@ -475,13 +475,13 @@ export function Settings() {
{/* Content */} -
+

{SECTIONS.find((s) => s.id === activeSection)?.label}

-
+
{activeSection === "providers" ? (
{/* Section header */} diff --git a/src/api/providers.rs b/src/api/providers.rs index 4b905f7ce..c4efbe4f5 100644 --- a/src/api/providers.rs +++ b/src/api/providers.rs @@ -195,16 +195,16 @@ fn build_test_llm_config(provider: &str, credential: &str) -> crate::config::Llm name: None, }), "kilo" => Some(ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::KiloGateway, base_url: "https://api.kilo.ai/api/gateway".to_string(), api_key: credential.to_string(), - name: None, + name: Some("Kilo Gateway".to_string()), }), "zhipu" => Some(ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::OpenAiChatCompletions, base_url: "https://api.z.ai/api/paas/v4".to_string(), api_key: credential.to_string(), - name: None, + name: Some("Z.AI (GLM)".to_string()), }), "groq" => Some(ProviderConfig { api_type: ApiType::OpenAiCompletions, @@ -279,10 +279,10 @@ fn build_test_llm_config(provider: &str, credential: &str) -> crate::config::Llm name: None, }), "zai-coding-plan" => Some(ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::OpenAiChatCompletions, base_url: "https://api.z.ai/api/coding/paas/v4".to_string(), api_key: credential.to_string(), - name: None, + name: Some("Z.AI Coding Plan".to_string()), }), _ => None, }; diff --git a/src/config.rs b/src/config.rs index d988255a2..e5a376693 100644 --- a/src/config.rs +++ b/src/config.rs @@ -101,9 +101,13 @@ impl Default for MetricsConfig { /// API types supported by LLM providers. #[derive(Debug, Clone, PartialEq, Eq)] pub enum ApiType { - /// OpenAI Completions API (https://api.openai.com/v1/completions) + /// OpenAI Chat Completions API (`/v1/chat/completions`) OpenAiCompletions, - /// OpenAI Responses API (https://api.openai.com/v1/chat/completions) + /// OpenAI-compatible Chat Completions API (`/chat/completions`) + OpenAiChatCompletions, + /// Kilo Gateway API (`/chat/completions`) with required gateway headers + KiloGateway, + /// OpenAI Responses API (`/v1/responses`) OpenAiResponses, /// Anthropic Messages API (https://api.anthropic.com/v1/messages) Anthropic, @@ -118,12 +122,14 @@ impl<'de> serde::Deserialize<'de> for ApiType { let s = String::deserialize(deserializer)?; match s.as_str() { "openai_completions" => Ok(Self::OpenAiCompletions), + "openai_chat_completions" => Ok(Self::OpenAiChatCompletions), + "kilo_gateway" => Ok(Self::KiloGateway), "openai_responses" => Ok(Self::OpenAiResponses), "anthropic" => Ok(Self::Anthropic), "gemini" => Ok(Self::Gemini), other => Err(serde::de::Error::invalid_value( serde::de::Unexpected::Str(other), - &"one of \"openai_completions\", \"openai_responses\", \"anthropic\", or \"gemini\"", + &"one of \"openai_completions\", \"openai_chat_completions\", \"kilo_gateway\", \"openai_responses\", \"anthropic\", or \"gemini\"", )), } } @@ -2251,10 +2257,10 @@ impl Config { llm.providers .entry("kilo".to_string()) .or_insert_with(|| ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::KiloGateway, base_url: KILO_PROVIDER_BASE_URL.to_string(), api_key: kilo_key, - name: None, + name: Some("Kilo Gateway".to_string()), }); } @@ -2262,10 +2268,10 @@ impl Config { llm.providers .entry("zhipu".to_string()) .or_insert_with(|| ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::OpenAiChatCompletions, base_url: ZHIPU_PROVIDER_BASE_URL.to_string(), api_key: zhipu_key, - name: None, + name: Some("Z.AI (GLM)".to_string()), }); } @@ -2273,10 +2279,10 @@ impl Config { llm.providers .entry("zai-coding-plan".to_string()) .or_insert_with(|| ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::OpenAiChatCompletions, base_url: ZAI_CODING_PLAN_BASE_URL.to_string(), api_key: zai_coding_plan_key, - name: None, + name: Some("Z.AI Coding Plan".to_string()), }); } @@ -2716,10 +2722,10 @@ impl Config { llm.providers .entry("kilo".to_string()) .or_insert_with(|| ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::KiloGateway, base_url: KILO_PROVIDER_BASE_URL.to_string(), api_key: kilo_key, - name: None, + name: Some("Kilo Gateway".to_string()), }); } @@ -2727,10 +2733,10 @@ impl Config { llm.providers .entry("zhipu".to_string()) .or_insert_with(|| ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::OpenAiChatCompletions, base_url: ZHIPU_PROVIDER_BASE_URL.to_string(), api_key: zhipu_key, - name: None, + name: Some("Z.AI (GLM)".to_string()), }); } @@ -2738,10 +2744,10 @@ impl Config { llm.providers .entry("zai-coding-plan".to_string()) .or_insert_with(|| ProviderConfig { - api_type: ApiType::OpenAiCompletions, + api_type: ApiType::OpenAiChatCompletions, base_url: ZAI_CODING_PLAN_BASE_URL.to_string(), api_key: zai_coding_plan_key, - name: None, + name: Some("Z.AI Coding Plan".to_string()), }); } @@ -4407,22 +4413,40 @@ api_key = "test-key" assert_eq!(result1.unwrap().api_type, ApiType::OpenAiCompletions); let toml2 = r#" -api_type = "openai_responses" -base_url = "https://api.openai.com" +api_type = "openai_chat_completions" +base_url = "https://api.example.com" api_key = "test-key" "#; let result2: StdResult = toml::from_str(toml2); assert!(result2.is_ok(), "Error: {:?}", result2.err()); - assert_eq!(result2.unwrap().api_type, ApiType::OpenAiResponses); + assert_eq!(result2.unwrap().api_type, ApiType::OpenAiChatCompletions); let toml3 = r#" -api_type = "anthropic" -base_url = "https://api.anthropic.com" +api_type = "kilo_gateway" +base_url = "https://api.kilo.ai/api/gateway" api_key = "test-key" "#; let result3: StdResult = toml::from_str(toml3); assert!(result3.is_ok(), "Error: {:?}", result3.err()); - assert_eq!(result3.unwrap().api_type, ApiType::Anthropic); + assert_eq!(result3.unwrap().api_type, ApiType::KiloGateway); + + let toml4 = r#" +api_type = "openai_responses" +base_url = "https://api.openai.com" +api_key = "test-key" +"#; + let result4: StdResult = toml::from_str(toml4); + assert!(result4.is_ok(), "Error: {:?}", result4.err()); + assert_eq!(result4.unwrap().api_type, ApiType::OpenAiResponses); + + let toml5 = r#" +api_type = "anthropic" +base_url = "https://api.anthropic.com" +api_key = "test-key" +"#; + let result5: StdResult = toml::from_str(toml5); + assert!(result5.is_ok(), "Error: {:?}", result5.err()); + assert_eq!(result5.unwrap().api_type, ApiType::Anthropic); } #[test] @@ -4433,6 +4457,8 @@ api_key = "test-key" let error = result.unwrap_err(); assert!(error.to_string().contains("invalid value")); assert!(error.to_string().contains("openai_completions")); + assert!(error.to_string().contains("openai_chat_completions")); + assert!(error.to_string().contains("kilo_gateway")); assert!(error.to_string().contains("openai_responses")); assert!(error.to_string().contains("anthropic")); } diff --git a/src/llm/model.rs b/src/llm/model.rs index 9819492e5..e0770b42e 100644 --- a/src/llm/model.rs +++ b/src/llm/model.rs @@ -111,39 +111,44 @@ impl SpacebotModel { .map_err(|e| CompletionError::ProviderError(e.to_string()))?, }; - if provider_id == "zai-coding-plan" || provider_id == "zhipu" || provider_id == "kilo" { - let (display_name, extra_headers): (&str, &[(&str, &str)]) = if provider_id == "zhipu" - { - ("Z.AI (GLM)", &[]) - } else if provider_id == "zai-coding-plan" { - ("Z.AI Coding Plan", &[]) - } else { - ( + match provider_config.api_type { + ApiType::Anthropic => self.call_anthropic(request, &provider_config).await, + ApiType::OpenAiCompletions => self.call_openai(request, &provider_config).await, + ApiType::OpenAiChatCompletions => { + let endpoint = format!( + "{}/chat/completions", + provider_config.base_url.trim_end_matches('/') + ); + let display_name = provider_config + .name + .as_deref() + .unwrap_or("OpenAI-compatible provider"); + self.call_openai_compatible_with_optional_auth( + request, + display_name, + &endpoint, + Some(provider_config.api_key.clone()), + &[], + ) + .await + } + ApiType::KiloGateway => { + let endpoint = format!( + "{}/chat/completions", + provider_config.base_url.trim_end_matches('/') + ); + self.call_openai_compatible_with_optional_auth( + request, "Kilo Gateway", + &endpoint, + Some(provider_config.api_key.clone()), &[ ("HTTP-Referer", "https://github.com/spacedriveapp/spacebot"), ("X-Title", "spacebot"), ], ) - }; - let endpoint = format!( - "{}/chat/completions", - provider_config.base_url.trim_end_matches('/') - ); - return self - .call_openai_compatible_with_optional_auth( - request, - display_name, - &endpoint, - Some(provider_config.api_key.clone()), - extra_headers, - ) - .await; - } - - match provider_config.api_type { - ApiType::Anthropic => self.call_anthropic(request, &provider_config).await, - ApiType::OpenAiCompletions => self.call_openai(request, &provider_config).await, + .await + } ApiType::OpenAiResponses => self.call_openai_responses(request, &provider_config).await, ApiType::Gemini => { self.call_openai_compatible(request, "Google Gemini", &provider_config) @@ -741,7 +746,9 @@ impl SpacebotModel { let base_url = provider_config.base_url.trim_end_matches('/'); let endpoint_path = match provider_config.api_type { ApiType::OpenAiCompletions | ApiType::OpenAiResponses => "/v1/chat/completions", - ApiType::Gemini => "/chat/completions", + ApiType::OpenAiChatCompletions | ApiType::Gemini | ApiType::KiloGateway => { + "/chat/completions" + } ApiType::Anthropic => { return Err(CompletionError::ProviderError(format!( "{provider_display_name} is configured with anthropic API type, but this call expects an OpenAI-compatible API"