Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions src/api/providers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,96 +101,112 @@ fn build_test_llm_config(provider: &str, credential: &str) -> crate::config::Llm
api_type: ApiType::Anthropic,
base_url: "https://api.anthropic.com".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"openai" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.openai.com".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"openrouter" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://openrouter.ai/api".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"zhipu" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.z.ai/api/paas/v4".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"groq" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.groq.com/openai".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"together" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.together.xyz".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"fireworks" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.fireworks.ai/inference".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"deepseek" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.deepseek.com".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"xai" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.x.ai".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"mistral" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.mistral.ai".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"gemini" => Some(ProviderConfig {
api_type: ApiType::Gemini,
base_url: crate::config::GEMINI_PROVIDER_BASE_URL.to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"opencode-zen" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://opencode.ai/zen".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"nvidia" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://integrate.api.nvidia.com".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"minimax" => Some(ProviderConfig {
api_type: ApiType::Anthropic,
base_url: "https://api.minimax.io/anthropic".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"moonshot" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.moonshot.ai".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
"zai-coding-plan" => Some(ProviderConfig {
api_type: ApiType::OpenAiCompletions,
base_url: "https://api.z.ai/api/coding/paas/v4".to_string(),
api_key: credential.to_string(),
is_auth_token: false,
name: None,
}),
_ => None,
Expand Down
51 changes: 51 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,8 @@ pub struct ProviderConfig {
pub base_url: String,
pub api_key: String,
pub name: Option<String>,
/// Whether the token came from ANTHROPIC_AUTH_TOKEN (uses Bearer auth)
pub is_auth_token: bool,
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice to have the provenance tracked explicitly. One thing to watch when this lands on main: adding a new required field means every ProviderConfig { ... } literal needs to set it (most should be is_auth_token: false). Might be worth a quick rg 'ProviderConfig \{' sweep on main to avoid a surprise compile break during merge resolution.

}

/// LLM provider credentials (instance-level).
Expand Down Expand Up @@ -1802,6 +1804,10 @@ impl Config {

/// Load from environment variables only (no config file).
pub fn load_from_env(instance_dir: &Path) -> Result<Self> {
// Track whether ANTHROPIC_AUTH_TOKEN is being used (for Bearer auth)
let anthropic_is_auth_token = std::env::var("ANTHROPIC_API_KEY").is_err()
&& std::env::var("ANTHROPIC_AUTH_TOKEN").is_ok();

let mut llm = LlmConfig {
anthropic_key: std::env::var("ANTHROPIC_API_KEY")
.ok()
Expand Down Expand Up @@ -1837,6 +1843,7 @@ impl Config {
base_url,
api_key: anthropic_key,
name: None,
is_auth_token: anthropic_is_auth_token,
});
}

Expand All @@ -1847,6 +1854,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: OPENAI_PROVIDER_BASE_URL.to_string(),
api_key: openai_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1858,6 +1866,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: OPENROUTER_PROVIDER_BASE_URL.to_string(),
api_key: openrouter_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1869,6 +1878,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: ZHIPU_PROVIDER_BASE_URL.to_string(),
api_key: zhipu_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1880,6 +1890,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: ZAI_CODING_PLAN_BASE_URL.to_string(),
api_key: zai_coding_plan_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1891,6 +1902,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: OPENCODE_ZEN_PROVIDER_BASE_URL.to_string(),
api_key: opencode_zen_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1902,6 +1914,7 @@ impl Config {
api_type: ApiType::Anthropic,
base_url: MINIMAX_PROVIDER_BASE_URL.to_string(),
api_key: minimax_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1913,6 +1926,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: MOONSHOT_PROVIDER_BASE_URL.to_string(),
api_key: moonshot_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1924,6 +1938,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: NVIDIA_PROVIDER_BASE_URL.to_string(),
api_key: nvidia_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1935,6 +1950,7 @@ impl Config {
api_type: ApiType::Gemini,
base_url: GEMINI_PROVIDER_BASE_URL.to_string(),
api_key: gemini_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -1944,6 +1960,7 @@ impl Config {

// Env-only routing: check for env overrides on channel/worker models.
// SPACEBOT_MODEL overrides all process types at once; specific vars take precedence.
// ANTHROPIC_MODEL sets all anthropic/* models to the specified value.
let mut routing = RoutingConfig::default();
if let Ok(model) = std::env::var("SPACEBOT_MODEL") {
routing.channel = model.clone();
Expand All @@ -1952,6 +1969,19 @@ impl Config {
routing.compactor = model.clone();
routing.cortex = model;
}
if let Ok(anthropic_model) = std::env::var("ANTHROPIC_MODEL") {
// ANTHROPIC_MODEL sets all anthropic/* routes to the specified model
let channel = format!("anthropic/{}", anthropic_model);
let branch = format!("anthropic/{}", anthropic_model);
let worker = format!("anthropic/{}", anthropic_model);
let compactor = format!("anthropic/{}", anthropic_model);
let cortex = format!("anthropic/{}", anthropic_model);
routing.channel = channel;
routing.branch = branch;
routing.worker = worker;
routing.compactor = compactor;
routing.cortex = cortex;
}
if let Ok(channel_model) = std::env::var("SPACEBOT_CHANNEL_MODEL") {
routing.channel = channel_model;
}
Expand Down Expand Up @@ -2045,6 +2075,16 @@ impl Config {
}
}

// Track whether ANTHROPIC_AUTH_TOKEN is being used (for Bearer auth)
let anthropic_is_auth_token = toml
.llm
.anthropic_key
.as_deref()
.and_then(resolve_env_value)
.is_none()
&& std::env::var("ANTHROPIC_API_KEY").is_err()
&& std::env::var("ANTHROPIC_AUTH_TOKEN").is_ok();
Comment on lines +2078 to +2086
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Config env:ANTHROPIC_AUTH_TOKEN won’t set is_auth_token.

anthropic_is_auth_token only flips when no TOML key is present, so a config that references env:ANTHROPIC_AUTH_TOKEN will still use x‑api‑key headers. Consider detecting that env reference explicitly (or adding a config flag).

🔧 Suggested change
-        let anthropic_is_auth_token = toml
-            .llm
-            .anthropic_key
-            .as_deref()
-            .and_then(resolve_env_value)
-            .is_none()
-            && std::env::var("ANTHROPIC_API_KEY").is_err()
-            && std::env::var("ANTHROPIC_AUTH_TOKEN").is_ok();
+        let anthropic_key_raw = toml.llm.anthropic_key.as_deref();
+        let anthropic_is_auth_token =
+            matches!(anthropic_key_raw, Some("env:ANTHROPIC_AUTH_TOKEN"))
+                || (anthropic_key_raw.is_none()
+                    && std::env::var("ANTHROPIC_API_KEY").is_err()
+                    && std::env::var("ANTHROPIC_AUTH_TOKEN").is_ok());

Also applies to: 2218-2229

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/config.rs` around lines 2078 - 2086, The current boolean
anthropic_is_auth_token only becomes true when no TOML key is present, so a TOML
anthropic_key that references env:ANTHROPIC_AUTH_TOKEN still selects x-api-key;
update the logic to also detect when toml.llm.anthropic_key explicitly
references the environment variable ANTHROPIC_AUTH_TOKEN (e.g., the raw string
contains or parses as "env:ANTHROPIC_AUTH_TOKEN" or resolve_env_value indicates
an env-ref) and treat that case as using auth token; modify the condition around
anthropic_is_auth_token (and the duplicate logic at the later block around the
2218-2229 occurrence) to check both the absence of a resolved TOML key and the
presence of an env reference to ANTHROPIC_AUTH_TOKEN before consulting
std::env::var.


let mut llm = LlmConfig {
anthropic_key: toml
.llm
Expand Down Expand Up @@ -2168,6 +2208,7 @@ impl Config {
api_key: resolve_env_value(&config.api_key)
.expect("Failed to resolve API key for provider"),
name: config.name,
is_auth_token: false,
},
)
})
Expand All @@ -2184,6 +2225,7 @@ impl Config {
base_url,
api_key: anthropic_key,
name: None,
is_auth_token: anthropic_is_auth_token,
});
}

Expand All @@ -2194,6 +2236,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: OPENAI_PROVIDER_BASE_URL.to_string(),
api_key: openai_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2205,6 +2248,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: OPENROUTER_PROVIDER_BASE_URL.to_string(),
api_key: openrouter_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2216,6 +2260,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: ZHIPU_PROVIDER_BASE_URL.to_string(),
api_key: zhipu_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2227,6 +2272,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: ZAI_CODING_PLAN_BASE_URL.to_string(),
api_key: zai_coding_plan_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2238,6 +2284,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: OPENCODE_ZEN_PROVIDER_BASE_URL.to_string(),
api_key: opencode_zen_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2249,6 +2296,7 @@ impl Config {
api_type: ApiType::Anthropic,
base_url: MINIMAX_PROVIDER_BASE_URL.to_string(),
api_key: minimax_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2260,6 +2308,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: MOONSHOT_PROVIDER_BASE_URL.to_string(),
api_key: moonshot_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2271,6 +2320,7 @@ impl Config {
api_type: ApiType::OpenAiCompletions,
base_url: NVIDIA_PROVIDER_BASE_URL.to_string(),
api_key: nvidia_key,
is_auth_token: false,
name: None,
});
}
Expand All @@ -2282,6 +2332,7 @@ impl Config {
api_type: ApiType::Gemini,
base_url: GEMINI_PROVIDER_BASE_URL.to_string(),
api_key: gemini_key,
is_auth_token: false,
name: None,
});
}
Expand Down
Loading