From bb991ae72e1fbb0f74df42fbc955edbde43deb87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tarc=C3=ADsio=20Jr?= Date: Wed, 18 Jun 2025 07:23:46 -0300 Subject: [PATCH] feat: Add support for custom provider endpoints and headers This commit introduces several enhancements for configuration flexibility and usability. Users can now specify a custom `baseURL` and `headers` for API providers (e.g., OpenAI, Gemini, Groq). This enables routing requests through proxies, corporate gateways, or connecting to self-hosted, API-compatible services. Additionally, two new command-line flags have been added: - `--config`: Allows specifying a path to a custom JSON configuration file, overriding the default search behavior. - `--prompt-file`: Enables loading a prompt from a markdown file, making it easier to run complex, multi-line prompts in non-interactive mode. The README has been updated to document these new features. --- README.md | 48 +++++++++++++++++++ cmd/root.go | 77 ++++++++++++++++++++++++++++++- internal/config/config.go | 30 ++++++++---- internal/llm/agent/agent.go | 66 ++++++++++++++++++++------ internal/llm/provider/gemini.go | 40 +++++++++++++++- internal/llm/provider/provider.go | 76 ++++++++++++++++++++++++++---- 6 files changed, 301 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index b98c1830..8166934d 100644 --- a/README.md +++ b/README.md @@ -135,6 +135,10 @@ This is useful if you want to use a different shell than your default system she "providers": { "openai": { "apiKey": "your-api-key", + "baseURL": "https://custom-openai-endpoint.com/v1", + "headers": { + "X-Custom-Header": "value" + }, "disabled": false }, "anthropic": { @@ -143,6 +147,7 @@ This is useful if you want to use a different shell than your default system she }, "groq": { "apiKey": "your-api-key", + "baseURL": "https://custom-groq-proxy.com/openai/v1", "disabled": false }, "openrouter": { @@ -188,6 +193,49 @@ This is useful if you want to use a different shell than your default system she } ``` +### Provider Configuration + +#### Custom Base URLs + +You can configure custom base URLs for providers that support it (OpenAI, Gemini, Groq, OpenRouter, XAI, Local). This is useful for: + +- Using proxy servers +- Corporate API gateways +- Self-hosted compatible endpoints +- Alternative API endpoints + +Example configuration: + +```json +{ + "providers": { + "openai": { + "apiKey": "your-api-key", + "baseURL": "https://your-openai-proxy.com/v1" + } + } +} +``` + +#### Custom Headers + +You can also add custom headers to provider requests (currently supported for OpenAI-compatible providers and Gemini): + +```json +{ + "providers": { + "openai": { + "apiKey": "your-api-key", + "baseURL": "https://your-proxy.com/v1", + "headers": { + "X-Auth-Token": "additional-auth-token", + "X-Organization": "your-org" + } + } + } +} +``` + ## Supported AI Models OpenCode supports a variety of AI models from different providers: diff --git a/cmd/root.go b/cmd/root.go index 3a58cec4..9456e900 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -3,7 +3,10 @@ package cmd import ( "context" "fmt" + "io" "os" + "path/filepath" + "strings" "sync" "time" @@ -45,6 +48,12 @@ to assist developers in writing, debugging, and understanding code directly from # Run a single non-interactive prompt with JSON output format opencode -p "Explain the use of context in Go" -f json + + # Run prompt from a markdown file + opencode --prompt-file /path/to/prompt.md + + # Use custom configuration file + opencode --config /path/to/custom-config.json `, RunE: func(cmd *cobra.Command, args []string) error { // If the help flag is set, show the help message @@ -61,14 +70,78 @@ to assist developers in writing, debugging, and understanding code directly from debug, _ := cmd.Flags().GetBool("debug") cwd, _ := cmd.Flags().GetString("cwd") prompt, _ := cmd.Flags().GetString("prompt") + promptFile, _ := cmd.Flags().GetString("prompt-file") + configFile, _ := cmd.Flags().GetString("config") outputFormat, _ := cmd.Flags().GetString("output-format") quiet, _ := cmd.Flags().GetBool("quiet") + // Validate that only one prompt option is provided + if prompt != "" && promptFile != "" { + return fmt.Errorf("cannot use both --prompt and --prompt-file options at the same time") + } + + // Validate config file if specified + if configFile != "" { + // Validate file extension + if !strings.HasSuffix(strings.ToLower(configFile), ".json") { + return fmt.Errorf("config file must have .json extension") + } + + // Check if file exists + if _, err := os.Stat(configFile); os.IsNotExist(err) { + return fmt.Errorf("config file does not exist: %s", configFile) + } + + // Convert relative path to absolute + absPath, err := filepath.Abs(configFile) + if err != nil { + return fmt.Errorf("failed to resolve absolute path for config file: %v", err) + } + configFile = absPath + } + // Validate format option if !format.IsValid(outputFormat) { return fmt.Errorf("invalid format option: %s\n%s", outputFormat, format.GetHelpText()) } + // Load prompt from file if specified + if promptFile != "" { + // Validate file extension + if !strings.HasSuffix(strings.ToLower(promptFile), ".md") { + return fmt.Errorf("prompt file must have .md extension") + } + + // Check if file exists + if _, err := os.Stat(promptFile); os.IsNotExist(err) { + return fmt.Errorf("prompt file does not exist: %s", promptFile) + } + + // Convert relative path to absolute + absPath, err := filepath.Abs(promptFile) + if err != nil { + return fmt.Errorf("failed to resolve absolute path for prompt file: %v", err) + } + + // Read file content + file, err := os.Open(absPath) + if err != nil { + return fmt.Errorf("failed to open prompt file: %v", err) + } + defer file.Close() + + content, err := io.ReadAll(file) + if err != nil { + return fmt.Errorf("failed to read prompt file: %v", err) + } + + if len(content) == 0 { + return fmt.Errorf("prompt file is empty") + } + + prompt = string(content) + } + if cwd != "" { err := os.Chdir(cwd) if err != nil { @@ -82,7 +155,7 @@ to assist developers in writing, debugging, and understanding code directly from } cwd = c } - _, err := config.Load(cwd, debug) + _, err := config.Load(cwd, debug, configFile) if err != nil { return err } @@ -294,6 +367,8 @@ func init() { rootCmd.Flags().BoolP("debug", "d", false, "Debug") rootCmd.Flags().StringP("cwd", "c", "", "Current working directory") rootCmd.Flags().StringP("prompt", "p", "", "Prompt to run in non-interactive mode") + rootCmd.Flags().String("prompt-file", "", "Markdown file containing prompt to run in non-interactive mode") + rootCmd.Flags().String("config", "", "Path to custom configuration file (.json)") // Add format flag with validation logic rootCmd.Flags().StringP("output-format", "f", format.Text.String(), diff --git a/internal/config/config.go b/internal/config/config.go index 5a0905bb..0c07dfd8 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -51,8 +51,10 @@ type Agent struct { // Provider defines configuration for an LLM provider. type Provider struct { - APIKey string `json:"apiKey"` - Disabled bool `json:"disabled"` + APIKey string `json:"apiKey"` + BaseURL string `json:"baseURL,omitempty"` + Headers map[string]string `json:"headers,omitempty"` + Disabled bool `json:"disabled"` } // Data defines storage configuration. @@ -123,8 +125,9 @@ var cfg *Config // Load initializes the configuration from environment variables and config files. // If debug is true, debug mode is enabled and log level is set to debug. +// If configFile is provided, it will be used instead of the default config file locations. // It returns an error if configuration loading fails. -func Load(workingDir string, debug bool) (*Config, error) { +func Load(workingDir string, debug bool, configFile string) (*Config, error) { if cfg != nil { return cfg, nil } @@ -136,7 +139,7 @@ func Load(workingDir string, debug bool) (*Config, error) { LSP: make(map[string]LSPConfig), } - configureViper() + configureViper(configFile) setDefaults(debug) // Read global config @@ -207,12 +210,19 @@ func Load(workingDir string, debug bool) (*Config, error) { } // configureViper sets up viper's configuration paths and environment variables. -func configureViper() { - viper.SetConfigName(fmt.Sprintf(".%s", appName)) - viper.SetConfigType("json") - viper.AddConfigPath("$HOME") - viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName)) - viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName)) +// If configFile is provided, it will be used instead of the default config file locations. +func configureViper(configFile string) { + if configFile != "" { + // Use the specific config file provided + viper.SetConfigFile(configFile) + } else { + // Use default config file locations + viper.SetConfigName(fmt.Sprintf(".%s", appName)) + viper.SetConfigType("json") + viper.AddConfigPath("$HOME") + viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName)) + viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName)) + } viper.SetEnvPrefix(strings.ToUpper(appName)) viper.AutomaticEnv() } diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go index 4f31fe75..799b1d9c 100644 --- a/internal/llm/agent/agent.go +++ b/internal/llm/agent/agent.go @@ -715,20 +715,58 @@ func createAgentProvider(agentName config.AgentName) (provider.Provider, error) provider.WithSystemMessage(prompt.GetAgentPrompt(agentName, model.Provider)), provider.WithMaxTokens(maxTokens), } - if model.Provider == models.ProviderOpenAI || model.Provider == models.ProviderLocal && model.CanReason { - opts = append( - opts, - provider.WithOpenAIOptions( - provider.WithReasoningEffort(agentConfig.ReasoningEffort), - ), - ) - } else if model.Provider == models.ProviderAnthropic && model.CanReason && agentName == config.AgentCoder { - opts = append( - opts, - provider.WithAnthropicOptions( - provider.WithAnthropicShouldThinkFn(provider.DefaultShouldThinkFn), - ), - ) + + // Apply provider-specific options based on configuration + switch model.Provider { + case models.ProviderOpenAI, models.ProviderLocal, models.ProviderGROQ, models.ProviderOpenRouter, models.ProviderXAI: + openAIOpts := []provider.OpenAIOption{} + + // Add BaseURL if configured + if providerCfg.BaseURL != "" { + openAIOpts = append(openAIOpts, provider.WithOpenAIBaseURL(providerCfg.BaseURL)) + } + + // Add Headers if configured + if len(providerCfg.Headers) > 0 { + openAIOpts = append(openAIOpts, provider.WithOpenAIExtraHeaders(providerCfg.Headers)) + } + + // Add reasoning effort if applicable + if (model.Provider == models.ProviderOpenAI || model.Provider == models.ProviderLocal) && model.CanReason { + openAIOpts = append(openAIOpts, provider.WithReasoningEffort(agentConfig.ReasoningEffort)) + } + + if len(openAIOpts) > 0 { + opts = append(opts, provider.WithOpenAIOptions(openAIOpts...)) + } + + case models.ProviderAnthropic: + if model.CanReason && agentName == config.AgentCoder { + opts = append( + opts, + provider.WithAnthropicOptions( + provider.WithAnthropicShouldThinkFn(provider.DefaultShouldThinkFn), + ), + ) + } + // TODO: Add BaseURL support for Anthropic when available + + case models.ProviderGemini: + geminiOpts := []provider.GeminiOption{} + + // Add BaseURL if configured + if providerCfg.BaseURL != "" { + geminiOpts = append(geminiOpts, provider.WithGeminiBaseURL(providerCfg.BaseURL)) + } + + // Add Headers if configured + if len(providerCfg.Headers) > 0 { + geminiOpts = append(geminiOpts, provider.WithGeminiExtraHeaders(providerCfg.Headers)) + } + + if len(geminiOpts) > 0 { + opts = append(opts, provider.WithGeminiOptions(geminiOpts...)) + } } agentProvider, err := provider.NewProvider( model.Provider, diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go index ebc36119..85b74f3a 100644 --- a/internal/llm/provider/gemini.go +++ b/internal/llm/provider/gemini.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "net/http" "strings" "time" @@ -19,6 +20,8 @@ import ( type geminiOptions struct { disableCache bool + baseURL string + headers map[string]string } type GeminiOption func(*geminiOptions) @@ -37,7 +40,30 @@ func newGeminiClient(opts providerClientOptions) GeminiClient { o(&geminiOpts) } - client, err := genai.NewClient(context.Background(), &genai.ClientConfig{APIKey: opts.apiKey, Backend: genai.BackendGeminiAPI}) + clientConfig := &genai.ClientConfig{ + APIKey: opts.apiKey, + Backend: genai.BackendGeminiAPI, + } + + // Apply HTTP options if custom base URL or headers are provided + if geminiOpts.baseURL != "" || len(geminiOpts.headers) > 0 { + httpOptions := genai.HTTPOptions{} + + if geminiOpts.baseURL != "" { + httpOptions.BaseURL = geminiOpts.baseURL + } + + if len(geminiOpts.headers) > 0 { + httpOptions.Headers = make(http.Header) + for key, value := range geminiOpts.headers { + httpOptions.Headers.Set(key, value) + } + } + + clientConfig.HTTPOptions = httpOptions + } + + client, err := genai.NewClient(context.Background(), clientConfig) if err != nil { logging.Error("Failed to create Gemini client", "error", err) return nil @@ -463,6 +489,18 @@ func WithGeminiDisableCache() GeminiOption { } } +func WithGeminiBaseURL(baseURL string) GeminiOption { + return func(options *geminiOptions) { + options.baseURL = baseURL + } +} + +func WithGeminiExtraHeaders(headers map[string]string) GeminiOption { + return func(options *geminiOptions) { + options.headers = headers + } +} + // Helper functions func parseJsonToMap(jsonStr string) (map[string]interface{}, error) { var result map[string]interface{} diff --git a/internal/llm/provider/provider.go b/internal/llm/provider/provider.go index 08175450..ee040358 100644 --- a/internal/llm/provider/provider.go +++ b/internal/llm/provider/provider.go @@ -109,9 +109,22 @@ func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption client: newBedrockClient(clientOptions), }, nil case models.ProviderGROQ: - clientOptions.openaiOptions = append(clientOptions.openaiOptions, - WithOpenAIBaseURL("https://api.groq.com/openai/v1"), - ) + // Only set default baseURL if not already provided in options + hasBaseURL := false + for _, opt := range clientOptions.openaiOptions { + // Check if baseURL was already set via options + testOpts := &openaiOptions{} + opt(testOpts) + if testOpts.baseURL != "" { + hasBaseURL = true + break + } + } + if !hasBaseURL { + clientOptions.openaiOptions = append(clientOptions.openaiOptions, + WithOpenAIBaseURL("https://api.groq.com/openai/v1"), + ) + } return &baseProvider[OpenAIClient]{ options: clientOptions, client: newOpenAIClient(clientOptions), @@ -127,8 +140,23 @@ func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption client: newVertexAIClient(clientOptions), }, nil case models.ProviderOpenRouter: + // Check if baseURL was already set + hasBaseURL := false + for _, opt := range clientOptions.openaiOptions { + testOpts := &openaiOptions{} + opt(testOpts) + if testOpts.baseURL != "" { + hasBaseURL = true + break + } + } + if !hasBaseURL { + clientOptions.openaiOptions = append(clientOptions.openaiOptions, + WithOpenAIBaseURL("https://openrouter.ai/api/v1"), + ) + } + // Always add default headers for OpenRouter clientOptions.openaiOptions = append(clientOptions.openaiOptions, - WithOpenAIBaseURL("https://openrouter.ai/api/v1"), WithOpenAIExtraHeaders(map[string]string{ "HTTP-Referer": "opencode.ai", "X-Title": "OpenCode", @@ -139,17 +167,45 @@ func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption client: newOpenAIClient(clientOptions), }, nil case models.ProviderXAI: - clientOptions.openaiOptions = append(clientOptions.openaiOptions, - WithOpenAIBaseURL("https://api.x.ai/v1"), - ) + // Check if baseURL was already set + hasBaseURL := false + for _, opt := range clientOptions.openaiOptions { + testOpts := &openaiOptions{} + opt(testOpts) + if testOpts.baseURL != "" { + hasBaseURL = true + break + } + } + if !hasBaseURL { + clientOptions.openaiOptions = append(clientOptions.openaiOptions, + WithOpenAIBaseURL("https://api.x.ai/v1"), + ) + } return &baseProvider[OpenAIClient]{ options: clientOptions, client: newOpenAIClient(clientOptions), }, nil case models.ProviderLocal: - clientOptions.openaiOptions = append(clientOptions.openaiOptions, - WithOpenAIBaseURL(os.Getenv("LOCAL_ENDPOINT")), - ) + // Check if baseURL was already set via options + hasBaseURL := false + for _, opt := range clientOptions.openaiOptions { + testOpts := &openaiOptions{} + opt(testOpts) + if testOpts.baseURL != "" { + hasBaseURL = true + break + } + } + // If no baseURL in options, use LOCAL_ENDPOINT env var + if !hasBaseURL { + localEndpoint := os.Getenv("LOCAL_ENDPOINT") + if localEndpoint != "" { + clientOptions.openaiOptions = append(clientOptions.openaiOptions, + WithOpenAIBaseURL(localEndpoint), + ) + } + } return &baseProvider[OpenAIClient]{ options: clientOptions, client: newOpenAIClient(clientOptions),