diff --git a/internal/translator/request.go b/internal/translator/request.go index ab29b67..e8abec1 100644 --- a/internal/translator/request.go +++ b/internal/translator/request.go @@ -429,7 +429,59 @@ func transformMessages(req *models.AnthropicRequest, targetModel string) ([]mode messages = append(messages, openAIMsg...) } - return messages, nil + return filterSystemReminders(messages), nil +} + +// filterSystemReminders buffers ALL user messages that appear between +// assistant tool_calls and tool responses to comply with Azure OpenAI's strict +// message sequencing requirement: assistant with tool_calls MUST be immediately +// followed by tool responses, with NO user messages in between. +func filterSystemReminders(messages []models.OpenAIMessage) []models.OpenAIMessage { + filtered := make([]models.OpenAIMessage, 0, len(messages)) + pendingToolCallIDs := make(map[string]bool) + bufferedMessages := []models.OpenAIMessage{} + + for _, msg := range messages { + // Assistant with tool_calls - track pending IDs + if msg.Role == "assistant" && len(msg.ToolCalls) > 0 { + pendingToolCallIDs = make(map[string]bool) + for _, tc := range msg.ToolCalls { + pendingToolCallIDs[tc.ID] = true + } + filtered = append(filtered, msg) + continue + } + + // Tool response - mark as complete + if msg.Role == "tool" && msg.ToolCallID != "" { + delete(pendingToolCallIDs, msg.ToolCallID) + filtered = append(filtered, msg) + + // If all tool calls resolved, flush buffered messages + if len(pendingToolCallIDs) == 0 && len(bufferedMessages) > 0 { + filtered = append(filtered, bufferedMessages...) + bufferedMessages = bufferedMessages[:0] + } + continue + } + + // Buffer ANY user message while tool calls pending (broader filtering) + // This ensures strict Azure OpenAI sequencing: assistant → tool responses → user messages + if msg.Role == "user" && len(pendingToolCallIDs) > 0 { + bufferedMessages = append(bufferedMessages, msg) + continue + } + + // All other messages pass through + filtered = append(filtered, msg) + } + + // Flush any remaining buffered messages + if len(bufferedMessages) > 0 { + filtered = append(filtered, bufferedMessages...) + } + + return filtered } // extractSystemContent extracts the system message content. diff --git a/package.json b/package.json index c3ccfed..020022b 100644 --- a/package.json +++ b/package.json @@ -1,17 +1,17 @@ { - "name": "clasp-ai", - "version": "0.50.23", - "description": "Claude Language Agent Super Proxy - Translate Claude/Anthropic API calls to OpenAI-compatible endpoints", - "author": "jedarden", + "name": "clasp-ai-manditrade", + "version": "0.53.1", + "description": "CLASP proxy with system-reminder filtering for Azure OpenAI compatibility", + "author": "AI-Manditrade", "license": "MIT", "repository": { "type": "git", - "url": "https://github.com/jedarden/CLASP.git" + "url": "https://github.com/AI-Manditrade/CLASP.git" }, "bugs": { - "url": "https://github.com/jedarden/CLASP/issues" + "url": "https://github.com/AI-Manditrade/CLASP/issues" }, - "homepage": "https://github.com/jedarden/CLASP#readme", + "homepage": "https://github.com/AI-Manditrade/CLASP#readme", "keywords": [ "claude", "anthropic", @@ -46,12 +46,9 @@ "node": ">=16.0.0" }, "os": [ - "darwin", - "linux", - "win32" + "linux" ], "cpu": [ - "x64", - "arm64" + "x64" ] } diff --git a/scripts/install.js b/scripts/install.js index 6adc30c..c253603 100644 --- a/scripts/install.js +++ b/scripts/install.js @@ -12,8 +12,8 @@ const path = require('path'); const { execSync } = require('child_process'); const os = require('os'); -const VERSION = '0.50.23'; -const REPO = 'jedarden/CLASP'; +const VERSION = '0.53.1'; +const REPO = 'AI-Manditrade/CLASP'; const BINARY_NAME = 'clasp'; // Platform mappings