From fff5ea587cbdfb1b781026b21504fcd4a4eae7c7 Mon Sep 17 00:00:00 2001 From: James Barney Date: Tue, 20 Jan 2026 12:04:02 -0500 Subject: [PATCH] Add support for custom OpenAI-compatible endpoints Enables configuration of custom base URLs for OpenAI-compatible APIs through environment variables. This supports: - Azure OpenAI endpoints - Custom proxy servers - Local LLM deployments (Ollama, LocalAI, vLLM, etc.) - Any OpenAI-compatible API Environment variables: - OPENAI_API_BASE: Custom endpoint URL (e.g., http://localhost:11434/v1 for Ollama) - OPENAI_API_TYPE: API type (set to 'azure' for Azure OpenAI) - OPENAI_API_VERSION: API version (required for Azure, e.g., '2024-02-01') - OPENAI_API_KEY: API key (optional for local endpoints like Ollama) When OPENAI_API_BASE is set, the runtime uses the custom endpoint instead of the default OpenAI API. Azure-specific headers are automatically applied when OPENAI_API_TYPE=azure. Local endpoints that don't require authentication (like Ollama) work without an API key. Examples: - Ollama: OPENAI_API_BASE=http://localhost:11434/v1 (no key needed) - Azure: OPENAI_API_BASE=https://your-resource.openai.azure.com OPENAI_API_TYPE=azure OPENAI_API_VERSION=2024-02-01 OPENAI_API_KEY=your-key --- src/main/agent/runtime.ts | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/src/main/agent/runtime.ts b/src/main/agent/runtime.ts index 9d997dd..62082ac 100644 --- a/src/main/agent/runtime.ts +++ b/src/main/agent/runtime.ts @@ -83,10 +83,47 @@ function getModelInstance( model.startsWith("o4") ) { const apiKey = getApiKey("openai") + + // Check for custom OpenAI-compatible endpoint (e.g., Azure, proxy, Ollama, or local deployment) + const customBaseURL = process.env.OPENAI_API_BASE + const apiVersion = process.env.OPENAI_API_VERSION + const apiType = process.env.OPENAI_API_TYPE + + if (customBaseURL) { + console.log("[Runtime] Using custom OpenAI endpoint:", customBaseURL) + if (apiType) { + console.log("[Runtime] API type:", apiType) + } + if (apiVersion) { + console.log("[Runtime] API version:", apiVersion) + } + + // Azure OpenAI uses 'api-key' header instead of 'Authorization: Bearer' + const isAzure = apiType === "azure" + + // Some endpoints (like Ollama) don't require API keys + console.log("[Runtime] OpenAI API key present:", !!apiKey) + + return new ChatOpenAI({ + model, + // For Azure, we pass the key via defaultHeaders instead of openAIApiKey + // to avoid sending both Authorization and api-key headers + // For local endpoints (Ollama, etc.), apiKey can be undefined + openAIApiKey: isAzure ? undefined : apiKey || "not-needed", + configuration: { + baseURL: customBaseURL, + defaultQuery: apiVersion ? { "api-version": apiVersion } : undefined, + defaultHeaders: isAzure && apiKey ? { "api-key": apiKey } : undefined + } + }) + } + + // Standard OpenAI requires an API key console.log("[Runtime] OpenAI API key present:", !!apiKey) if (!apiKey) { throw new Error("OpenAI API key not configured") } + return new ChatOpenAI({ model, openAIApiKey: apiKey