From dca15e871644569362064e4736d2fdd59b93b280 Mon Sep 17 00:00:00 2001 From: Jay Miracola Date: Wed, 17 Sep 2025 09:55:18 -0400 Subject: [PATCH 1/2] add config for compatible local models, switch to NewOpenAIFunctionsAgent for better parsing Signed-off-by: Jay Miracola --- README.md | 8 ++++++++ fn.go | 60 ++++++++++++++++++++++++++++++++++++++---------------- fn_test.go | 10 ++++----- 3 files changed, 56 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 2daafba..4df356e 100644 --- a/README.md +++ b/README.md @@ -145,6 +145,8 @@ There are a few steps to get this going. development. ```bash export OPENAI_API_KEY_B64=$(echo ${OPENAI_API_KEY} | base64) +# Optional: export OPENAI_BASE_URL_B64=$(echo ${OPENAI_BASE_URL} | base64) +# Optional: export OPENAI_MODEL_B64=$(echo ${OPENAI_MODEL} | base64) cat < example/secret.yaml apiVersion: v1 @@ -154,6 +156,12 @@ cat < example/secret.yaml namespace: crossplane-system data: OPENAI_API_KEY: ${OPENAI_API_KEY_B64} + # OPENAI_BASE_URL: ${OPENAI_BASE_URL_B64} + # Optional: Use custom OpenAI-compatible endpoint + # Example: http://localhost:11434/v1 + # OPENAI_MODEL: ${OPENAI_MODEL_B64} + # Optional: Use custom model (defaults to gpt-4) + # Example: gpt-oss:20b EOF ``` diff --git a/fn.go b/fn.go index 6f74be3..71d441e 100644 --- a/fn.go +++ b/fn.go @@ -44,8 +44,11 @@ import ( ) const ( - credName = "gpt" - credKey = "OPENAI_API_KEY" + credName = "gpt" + credKey = "OPENAI_API_KEY" + credBaseURLKey = "OPENAI_BASE_URL" + credModelKey = "OPENAI_MODEL" + defaultModel = "gpt-4" ) // Variables used to form the prompt. @@ -68,7 +71,7 @@ type Function struct { // agentInvoker is a consumer interface for working with agents. Notably this // is helpful for writing tests that mock the agent invocations. type agentInvoker interface { - Invoke(ctx context.Context, key, system, prompt string) (string, error) + Invoke(ctx context.Context, key, system, prompt, baseURL, modelName string) (string, error) } // Option modifies the underlying Function. @@ -137,11 +140,26 @@ func (f *Function) RunFunction(ctx context.Context, req *fnv1.RunFunctionRequest // TODO(negz): Where the heck is the newline at the end of this key // coming from? Bug in crossplane render? key := strings.Trim(string(b), "\n") + + // Extract optional base URL from credentials + var baseURL string + if baseURLBytes, ok := c.Data[credBaseURLKey]; ok { + baseURL = strings.Trim(string(baseURLBytes), "\n") + } + + // Extract optional model from credentials, default to gpt-4 + model := defaultModel + if modelBytes, ok := c.Data[credModelKey]; ok { + model = strings.Trim(string(modelBytes), "\n") + } + d := pipelineDetails{ - req: req, - rsp: rsp, - in: in, - cred: key, + req: req, + rsp: rsp, + in: in, + cred: key, + baseURL: baseURL, + model: model, } // If we're in a composition pipeline we want to do things with the @@ -275,6 +293,10 @@ type pipelineDetails struct { in *v1alpha1.Prompt // LLM API credential cred string + // Optional base URL for OpenAI API + baseURL string + // Optional model name, defaults to gpt-4 + model string } // compositionPipeline processes the given pipelineDetails with the assumption @@ -308,7 +330,7 @@ func (f *Function) compositionPipeline(ctx context.Context, log logging.Logger, log.Debug("Using prompt", "prompt", pb.String()) - resp, err := f.ai.Invoke(ctx, d.cred, d.in.SystemPrompt, pb.String()) + resp, err := f.ai.Invoke(ctx, d.cred, d.in.SystemPrompt, pb.String(), d.baseURL, d.model) if err != nil { response.Fatal(d.rsp, errors.Wrap(err, "failed to run chain")) @@ -377,7 +399,7 @@ func (f *Function) operationPipeline(ctx context.Context, log logging.Logger, d log.Debug("Using prompt", "prompt", vars.String()) - resp, err := f.ai.Invoke(ctx, d.cred, d.in.SystemPrompt, vars.String()) + resp, err := f.ai.Invoke(ctx, d.cred, d.in.SystemPrompt, vars.String(), d.baseURL, d.model) if err != nil { response.Fatal(d.rsp, errors.Wrap(err, "failed to run chain")) @@ -418,19 +440,23 @@ type agent struct { // Invoke makes an external call to the configured LLM with the supplied // credential key, system and user prompts. -func (a *agent) Invoke(ctx context.Context, key, system, prompt string) (string, error) { - model, err := openaillm.New( +func (a *agent) Invoke(ctx context.Context, key, system, prompt, baseURL, modelName string) (string, error) { + opts := []openaillm.Option{ openaillm.WithToken(key), - // NOTE(tnthornton): gpt-4 is noticeably slow compared to gpt-4o, but - // gpt-4o is sending input back that the agent is having trouble - // parsing. More to dig into here before switching. - openaillm.WithModel("gpt-4"), - ) + openaillm.WithModel(modelName), + } + + // Add custom base URL if provided + if baseURL != "" { + opts = append(opts, openaillm.WithBaseURL(baseURL)) + } + + model, err := openaillm.New(opts...) if err != nil { return "", errors.Wrap(err, "failed to build model") } - agent := agents.NewOneShotAgent( + agent := agents.NewOpenAIFunctionsAgent( model, a.tools(ctx), agents.WithMaxIterations(20), diff --git a/fn_test.go b/fn_test.go index c0c9fa5..80e4d94 100644 --- a/fn_test.go +++ b/fn_test.go @@ -117,7 +117,7 @@ func TestRunFunction(t *testing.T) { reason: "We should go through the composition pipeline without error.", args: args{ ai: &mockAgentInvoker{ - InvokeFn: func(_ context.Context, _, _, _ string) (string, error) { + InvokeFn: func(_ context.Context, _, _, _, _, _ string) (string, error) { return `--- apiVersion: some.group/v1 metadata: @@ -200,7 +200,7 @@ metadata: reason: "We should go through the operation pipeline without error.", args: args{ ai: &mockAgentInvoker{ - InvokeFn: func(_ context.Context, _, _, _ string) (string, error) { + InvokeFn: func(_ context.Context, _, _, _, _, _ string) (string, error) { return `some-response`, nil }, }, @@ -281,9 +281,9 @@ func mockCredentials() map[string]*fnv1.Credentials { } type mockAgentInvoker struct { - InvokeFn func(ctx context.Context, key, system, prompt string) (string, error) + InvokeFn func(ctx context.Context, key, system, prompt, baseURL, modelName string) (string, error) } -func (m *mockAgentInvoker) Invoke(ctx context.Context, key, system, prompt string) (string, error) { - return m.InvokeFn(ctx, key, system, prompt) +func (m *mockAgentInvoker) Invoke(ctx context.Context, key, system, prompt, baseURL, modelName string) (string, error) { + return m.InvokeFn(ctx, key, system, prompt, baseURL, modelName) } From 086db8cab314e279faf05173eec90f38dbbd2bd2 Mon Sep 17 00:00:00 2001 From: Jay Miracola Date: Wed, 17 Sep 2025 13:14:53 -0400 Subject: [PATCH 2/2] gofmt Signed-off-by: Jay Miracola --- fn.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/fn.go b/fn.go index 71d441e..f3ad96d 100644 --- a/fn.go +++ b/fn.go @@ -44,11 +44,11 @@ import ( ) const ( - credName = "gpt" - credKey = "OPENAI_API_KEY" - credBaseURLKey = "OPENAI_BASE_URL" - credModelKey = "OPENAI_MODEL" - defaultModel = "gpt-4" + credName = "gpt" + credKey = "OPENAI_API_KEY" + credBaseURLKey = "OPENAI_BASE_URL" + credModelKey = "OPENAI_MODEL" + defaultModel = "gpt-4" ) // Variables used to form the prompt.