Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,16 +48,30 @@ func ConvertOpenAIRequestToAntigravity(modelName string, inputRawJSON []byte, _
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
case "low":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 1024))
if util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingLevel", "low")
} else {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 1024))
}
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
case "medium":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 8192))
if !util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 8192))
}
// Gemini 3: no thinkingLevel for medium, uses dynamic thinking (auto)
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
case "high":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 32768))
if util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingLevel", "high")
} else {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 32768))
}
Comment on lines +51 to +68

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The string literals "low" (line 52) and "high" (line 65) for thinkingLevel are magic strings. It's good practice to define these as constants in a shared package (e.g., util) to ensure consistency and prevent typos. This issue is also present in the other translator files modified in this PR.

Example:

// In a relevant package (e.g., util)
const (
    ThinkingLevelLow = "low"
    ThinkingLevelHigh = "high"
)

You could then use util.ThinkingLevelLow and util.ThinkingLevelHigh.

out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
default:
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
if !util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
// Gemini 3: no thinkingLevel for auto/default, uses dynamic thinking
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}
Comment on lines 50 to 76

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

The logic within these case statements is identical to the logic in internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_request.go. Additionally, the logic for setting the default thinkingConfig for Gemini 3 models on lines 108-110 is also duplicated. This creates a maintenance burden, as any future changes will need to be applied in both places.

Consider extracting this duplicated logic into a shared helper function in a common package like internal/util to improve maintainability.

}
Expand Down Expand Up @@ -88,12 +102,10 @@ func ConvertOpenAIRequestToAntigravity(modelName string, inputRawJSON []byte, _
}
}

// For gemini-3-pro-preview, always send default thinkingConfig when none specified.
// This matches the official Gemini CLI behavior which always sends:
// { thinkingBudget: -1, includeThoughts: true }
// See: ai-gemini-cli/packages/core/src/config/defaultModelConfigs.ts
if !gjson.GetBytes(out, "request.generationConfig.thinkingConfig").Exists() && modelName == "gemini-3-pro-preview" {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
// For Gemini 3 models, enable thought summaries when no thinkingConfig is specified.
// Don't set thinkingLevel - let API use dynamic thinking by default.
// See: https://ai.google.dev/gemini-api/docs/thinking#thinking-levels
if !gjson.GetBytes(out, "request.generationConfig.thinkingConfig").Exists() && util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,16 +48,30 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
case "low":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 1024))
if util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingLevel", "low")
} else {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 1024))
}
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
case "medium":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 8192))
if !util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 8192))
}
// Gemini 3: no thinkingLevel for medium, uses dynamic thinking (auto)
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
case "high":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 32768))
if util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingLevel", "high")
} else {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 32768))
}
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
default:
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
if !util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
// Gemini 3: no thinkingLevel for auto/default, uses dynamic thinking
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}
}
Expand Down Expand Up @@ -88,12 +102,10 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
}
}

// For gemini-3-pro-preview, always send default thinkingConfig when none specified.
// This matches the official Gemini CLI behavior which always sends:
// { thinkingBudget: -1, includeThoughts: true }
// See: ai-gemini-cli/packages/core/src/config/defaultModelConfigs.ts
if !gjson.GetBytes(out, "request.generationConfig.thinkingConfig").Exists() && modelName == "gemini-3-pro-preview" {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
// For Gemini 3 models, enable thought summaries when no thinkingConfig is specified.
// Don't set thinkingLevel - let API use dynamic thinking by default.
// See: https://ai.google.dev/gemini-api/docs/thinking#thinking-levels
if !gjson.GetBytes(out, "request.generationConfig.thinkingConfig").Exists() && util.IsGemini3Model(modelName) {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -398,19 +398,37 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
case "minimal":
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 1024))
if util.IsGemini3Model(modelName) {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingLevel", "low")
} else {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 1024))
}
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
case "low":
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 4096))
if util.IsGemini3Model(modelName) {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingLevel", "low")
} else {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 4096))
}
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
Comment on lines 400 to 413

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The case "minimal" and case "low" blocks are very similar. For Gemini 3 models, they both set thinkingLevel to "low", and they both set include_thoughts to true. You can combine these cases to reduce code duplication and make the logic more concise.

case "minimal", "low":
	if util.IsGemini3Model(modelName) {
		out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingLevel", "low")
	} else {
		budget := 1024
		if reasoningEffort.String() == "low" {
			budget = 4096
		}
		out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, budget))
	}
	out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)

case "medium":
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 8192))
if !util.IsGemini3Model(modelName) {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 8192))
}
// Gemini 3: no thinkingLevel for medium, uses dynamic thinking (auto)
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
case "high":
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 32768))
if util.IsGemini3Model(modelName) {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingLevel", "high")
} else {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", util.NormalizeThinkingBudget(modelName, 32768))
}
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
default:
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
if !util.IsGemini3Model(modelName) {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
}
// Gemini 3: no thinkingLevel for auto/default, uses dynamic thinking
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
}
}
Expand All @@ -435,14 +453,11 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte
}
}

// For gemini-3-pro-preview, always send default thinkingConfig when none specified.
// This matches the official Gemini CLI behavior which always sends:
// { thinkingBudget: -1, includeThoughts: true }
// See: ai-gemini-cli/packages/core/src/config/defaultModelConfigs.ts
if !gjson.Get(out, "generationConfig.thinkingConfig").Exists() && modelName == "gemini-3-pro-preview" {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
// For Gemini 3 models, enable thought summaries when no thinkingConfig is specified.
// Don't set thinkingLevel - let API use dynamic thinking by default.
// See: https://ai.google.dev/gemini-api/docs/thinking#thinking-levels
if !gjson.Get(out, "generationConfig.thinkingConfig").Exists() && util.IsGemini3Model(modelName) {
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
// log.Debugf("Applied default thinkingConfig for gemini-3-pro-preview (matches Gemini CLI): thinkingBudget=-1, include_thoughts=true")
}

result := []byte(out)
Expand Down
6 changes: 6 additions & 0 deletions internal/util/gemini_thinking.go
Original file line number Diff line number Diff line change
Expand Up @@ -259,3 +259,9 @@ func ConvertThinkingLevelToBudget(body []byte) []byte {
}
return updated
}

// IsGemini3Model returns true if the model is a Gemini 3 model (uses thinkingLevel instead of thinkingBudget).
func IsGemini3Model(model string) bool {
lower := strings.ToLower(model)
return strings.HasPrefix(lower, "gemini-3-")

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The string literal "gemini-3-" is a magic string. It's better to define it as a constant at the package level to improve readability and maintainability, and to avoid potential typos if it's used elsewhere.

Example:

const Gemini3ModelPrefix = "gemini-3-"
Suggested change
return strings.HasPrefix(lower, "gemini-3-")
return strings.HasPrefix(lower, Gemini3ModelPrefix)

}
Loading