Skip to content

Commit 13bb4f9

Browse files
authored
feat: enhance callModel to accept chat-style messages (#94)
2 parents 856fd27 + eeb715b commit 13bb4f9

File tree

10 files changed

+291
-19
lines changed

10 files changed

+291
-19
lines changed

.speakeasy/gen.lock

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ management:
55
docVersion: 1.0.0
66
speakeasyVersion: 1.660.0
77
generationVersion: 2.760.2
8-
releaseVersion: 0.1.24
9-
configChecksum: 475b25558977e68908a4d0653d872817
8+
releaseVersion: 0.1.25
9+
configChecksum: ec2472ec2f065c6df73009a241b9f6cb
1010
repoURL: https://github.com/OpenRouterTeam/typescript-sdk.git
1111
installationURL: https://github.com/OpenRouterTeam/typescript-sdk
1212
published: true

.speakeasy/gen.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ generation:
3030
generateNewTests: true
3131
skipResponseBodyAssertions: false
3232
typescript:
33-
version: 0.1.24
33+
version: 0.1.25
3434
acceptHeaderEnum: false
3535
additionalDependencies:
3636
dependencies: {}

.speakeasy/workflow.lock

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ targets:
1414
sourceRevisionDigest: sha256:ffe0e925561a55a1b403667fe33bb3158e05892ef1e66f56211544c9a890b301
1515
sourceBlobDigest: sha256:18aa7b22686c2f559af1062fea408a9f80146231027ed1fd62b68df38c71f65d
1616
codeSamplesNamespace: open-router-chat-completions-api-typescript-code-samples
17-
codeSamplesRevisionDigest: sha256:f856e6a616f0d8edab5b1a77e49bfd32584caeb323d4ee7b740c6a7791c222fb
17+
codeSamplesRevisionDigest: sha256:e88cd9ad795f165e1caced900c25a1fc13e944c9936ed229e43a1140d6c0b52c
1818
workflow:
1919
workflowVersion: 1.0.0
2020
speakeasyVersion: latest

jsr.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
{
44
"name": "@openrouter/sdk",
5-
"version": "0.1.24",
5+
"version": "0.1.25",
66
"exports": {
77
".": "./src/index.ts",
88
"./models/errors": "./src/models/errors/index.ts",

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@openrouter/sdk",
3-
"version": "0.1.24",
3+
"version": "0.1.25",
44
"author": "OpenRouter",
55
"description": "The OpenRouter TypeScript SDK is a type-safe toolkit for building AI applications with access to 300+ language models through a unified API.",
66
"keywords": [

src/funcs/callModel.ts

Lines changed: 133 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,110 @@ import * as models from "../models/index.js";
55
import { EnhancedTool, MaxToolRounds } from "../lib/tool-types.js";
66
import { convertEnhancedToolsToAPIFormat } from "../lib/tool-executor.js";
77

8+
/**
9+
* Input type that accepts both chat-style messages and responses-style input
10+
*/
11+
export type CallModelInput =
12+
| models.OpenResponsesInput
13+
| models.Message[];
14+
15+
/**
16+
* Tool type that accepts chat-style, responses-style, or enhanced tools
17+
*/
18+
export type CallModelTools =
19+
| EnhancedTool[]
20+
| models.ToolDefinitionJson[]
21+
| models.OpenResponsesRequest["tools"];
22+
23+
/**
24+
* Check if input is chat-style messages (Message[])
25+
*/
26+
function isChatStyleMessages(input: CallModelInput): input is models.Message[] {
27+
if (!Array.isArray(input)) return false;
28+
if (input.length === 0) return false;
29+
30+
const first = input[0] as any;
31+
// Chat-style messages have role but no 'type' field at top level
32+
// Responses-style items have 'type' field (like 'message', 'function_call', etc.)
33+
return first && 'role' in first && !('type' in first);
34+
}
35+
36+
/**
37+
* Check if tools are chat-style (ToolDefinitionJson[])
38+
*/
39+
function isChatStyleTools(tools: CallModelTools): tools is models.ToolDefinitionJson[] {
40+
if (!Array.isArray(tools)) return false;
41+
if (tools.length === 0) return false;
42+
43+
const first = tools[0] as any;
44+
// Chat-style tools have nested 'function' property with 'name' inside
45+
// Enhanced tools have 'function' with 'inputSchema'
46+
// Responses-style tools have 'name' at top level
47+
return first && 'function' in first && first.function && 'name' in first.function && !('inputSchema' in first.function);
48+
}
49+
50+
/**
51+
* Convert chat-style tools to responses-style
52+
*/
53+
function convertChatToResponsesTools(tools: models.ToolDefinitionJson[]): models.OpenResponsesRequest["tools"] {
54+
return tools.map((tool): models.OpenResponsesRequestToolFunction => ({
55+
type: "function",
56+
name: tool.function.name,
57+
description: tool.function.description ?? null,
58+
strict: tool.function.strict ?? null,
59+
parameters: tool.function.parameters ?? null,
60+
}));
61+
}
62+
63+
/**
64+
* Convert chat-style messages to responses-style input
65+
*/
66+
function convertChatToResponsesInput(messages: models.Message[]): models.OpenResponsesInput {
67+
return messages.map((msg): models.OpenResponsesEasyInputMessage | models.OpenResponsesFunctionCallOutput => {
68+
// Extract extra fields like cache_control
69+
const { role, content, ...extraFields } = msg as any;
70+
71+
if (role === "tool") {
72+
const toolMsg = msg as models.ToolResponseMessage;
73+
return {
74+
type: "function_call_output",
75+
callId: toolMsg.toolCallId,
76+
output: typeof toolMsg.content === "string" ? toolMsg.content : JSON.stringify(toolMsg.content),
77+
...extraFields,
78+
} as models.OpenResponsesFunctionCallOutput;
79+
}
80+
81+
// Handle assistant messages with tool calls
82+
if (role === "assistant") {
83+
const assistantMsg = msg as models.AssistantMessage;
84+
// If it has tool calls, we need to convert them
85+
// For now, just convert the content part
86+
return {
87+
role: "assistant",
88+
content: typeof assistantMsg.content === "string"
89+
? assistantMsg.content
90+
: assistantMsg.content === null
91+
? ""
92+
: JSON.stringify(assistantMsg.content),
93+
...extraFields,
94+
} as models.OpenResponsesEasyInputMessage;
95+
}
96+
97+
// System, user, developer messages
98+
const convertedContent = typeof content === "string"
99+
? content
100+
: content === null || content === undefined
101+
? ""
102+
: JSON.stringify(content);
103+
104+
return {
105+
role: role as "user" | "system" | "developer",
106+
content: convertedContent,
107+
...extraFields,
108+
} as models.OpenResponsesEasyInputMessage;
109+
}) as models.OpenResponsesInput;
110+
}
111+
8112
/**
9113
* Get a response with multiple consumption patterns
10114
*
@@ -75,24 +179,46 @@ import { convertEnhancedToolsToAPIFormat } from "../lib/tool-executor.js";
75179
*/
76180
export function callModel(
77181
client: OpenRouterCore,
78-
request: Omit<models.OpenResponsesRequest, "stream" | "tools"> & {
79-
tools?: EnhancedTool[] | models.OpenResponsesRequest["tools"];
182+
request: Omit<models.OpenResponsesRequest, "stream" | "tools" | "input"> & {
183+
input?: CallModelInput;
184+
tools?: CallModelTools;
80185
maxToolRounds?: MaxToolRounds;
81186
},
82187
options?: RequestOptions,
83188
): ResponseWrapper {
84-
const { tools, maxToolRounds, ...apiRequest } = request;
189+
const { tools, maxToolRounds, input, ...restRequest } = request;
190+
191+
// Convert chat-style messages to responses-style input if needed
192+
const convertedInput = input && isChatStyleMessages(input)
193+
? convertChatToResponsesInput(input)
194+
: input;
195+
196+
const apiRequest = {
197+
...restRequest,
198+
input: convertedInput,
199+
};
85200

86-
// Separate enhanced tools from API tools
201+
// Determine tool type and convert as needed
87202
let isEnhancedTools = false;
88-
if (tools && tools.length > 0) {
203+
let isChatTools = false;
204+
205+
if (tools && Array.isArray(tools) && tools.length > 0) {
89206
const firstTool = tools[0] as any;
90207
isEnhancedTools = "function" in firstTool && firstTool.function && "inputSchema" in firstTool.function;
208+
isChatTools = !isEnhancedTools && isChatStyleTools(tools);
91209
}
210+
92211
const enhancedTools = isEnhancedTools ? (tools as EnhancedTool[]) : undefined;
93212

94-
// Convert enhanced tools to API format if provided, otherwise use tools as-is
95-
const apiTools = enhancedTools ? convertEnhancedToolsToAPIFormat(enhancedTools) : (tools as models.OpenResponsesRequest["tools"]);
213+
// Convert tools to API format based on their type
214+
let apiTools: models.OpenResponsesRequest["tools"];
215+
if (enhancedTools) {
216+
apiTools = convertEnhancedToolsToAPIFormat(enhancedTools);
217+
} else if (isChatTools) {
218+
apiTools = convertChatToResponsesTools(tools as models.ToolDefinitionJson[]);
219+
} else {
220+
apiTools = tools as models.OpenResponsesRequest["tools"];
221+
}
96222

97223
// Build the request with converted tools
98224
const finalRequest: models.OpenResponsesRequest = {

src/lib/config.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ export function serverURLFromOptions(options: SDKOptions): URL | null {
6969
export const SDK_METADATA = {
7070
language: "typescript",
7171
openapiDocVersion: "1.0.0",
72-
sdkVersion: "0.1.24",
72+
sdkVersion: "0.1.25",
7373
genVersion: "2.760.2",
74-
userAgent: "speakeasy-sdk/typescript 0.1.24 2.760.2 1.0.0 @openrouter/sdk",
74+
userAgent: "speakeasy-sdk/typescript 0.1.25 2.760.2 1.0.0 @openrouter/sdk",
7575
} as const;

src/sdk/sdk.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,9 @@ export class OpenRouter extends ClientSDK {
9494

9595
// #region sdk-class-body
9696
callModel(
97-
request: Omit<models.OpenResponsesRequest, "stream" | "tools"> & {
98-
tools?: EnhancedTool[] | models.OpenResponsesRequest["tools"];
97+
request: Omit<models.OpenResponsesRequest, "stream" | "tools" | "input"> & {
98+
input?: import("../funcs/callModel.js").CallModelInput;
99+
tools?: import("../funcs/callModel.js").CallModelTools;
99100
maxToolRounds?: MaxToolRounds;
100101
},
101102
options?: RequestOptions,

tests/e2e/callModel.test.ts

Lines changed: 145 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,151 @@ describe("callModel E2E Tests", () => {
2121
});
2222
});
2323

24+
describe("Chat-style messages support", () => {
25+
it("should accept chat-style Message array as input", async () => {
26+
const response = client.callModel({
27+
model: "meta-llama/llama-3.2-1b-instruct",
28+
input: [
29+
{
30+
role: "system",
31+
content: "You are a helpful assistant.",
32+
},
33+
{
34+
role: "user",
35+
content: "Say 'chat test' and nothing else.",
36+
},
37+
],
38+
});
39+
40+
const text = await response.getText();
41+
42+
expect(text).toBeDefined();
43+
expect(typeof text).toBe("string");
44+
expect(text.length).toBeGreaterThan(0);
45+
});
46+
47+
it("should handle multi-turn chat-style conversation", async () => {
48+
const response = client.callModel({
49+
model: "meta-llama/llama-3.2-1b-instruct",
50+
input: [
51+
{
52+
role: "user",
53+
content: "My favorite color is blue.",
54+
},
55+
{
56+
role: "assistant",
57+
content: "That's nice! Blue is a calming color.",
58+
},
59+
{
60+
role: "user",
61+
content: "What is my favorite color?",
62+
},
63+
],
64+
});
65+
66+
const text = await response.getText();
67+
68+
expect(text).toBeDefined();
69+
expect(text.toLowerCase()).toContain("blue");
70+
});
71+
72+
it("should handle system message in chat-style input", async () => {
73+
const response = client.callModel({
74+
model: "meta-llama/llama-3.2-1b-instruct",
75+
input: [
76+
{
77+
role: "system",
78+
content: "Always respond with exactly one word.",
79+
},
80+
{
81+
role: "user",
82+
content: "Say hello.",
83+
},
84+
],
85+
});
86+
87+
const text = await response.getText();
88+
89+
expect(text).toBeDefined();
90+
expect(typeof text).toBe("string");
91+
});
92+
93+
it("should accept chat-style tools (ToolDefinitionJson)", async () => {
94+
const response = client.callModel({
95+
model: "qwen/qwen3-vl-8b-instruct",
96+
input: [
97+
{
98+
role: "user",
99+
content: "What's the weather in Paris? Use the get_weather tool.",
100+
},
101+
],
102+
tools: [
103+
{
104+
type: "function" as const,
105+
function: {
106+
name: "get_weather",
107+
description: "Get weather for a location",
108+
parameters: {
109+
type: "object",
110+
properties: {
111+
location: {
112+
type: "string",
113+
description: "City name",
114+
},
115+
},
116+
required: ["location"],
117+
},
118+
},
119+
},
120+
],
121+
});
122+
123+
const toolCalls = await response.getToolCalls();
124+
125+
// Model should call the tool
126+
expect(toolCalls.length).toBeGreaterThan(0);
127+
expect(toolCalls[0].name).toBe("get_weather");
128+
expect(toolCalls[0].arguments).toBeDefined();
129+
}, 30000);
130+
131+
it("should work with chat-style messages and chat-style tools together", async () => {
132+
const response = client.callModel({
133+
model: "meta-llama/llama-3.1-8b-instruct",
134+
input: [
135+
{
136+
role: "system",
137+
content: "You are a helpful assistant. Use tools when needed.",
138+
},
139+
{
140+
role: "user",
141+
content: "Get the weather in Tokyo using the weather tool.",
142+
},
143+
],
144+
tools: [
145+
{
146+
type: "function" as const,
147+
function: {
148+
name: "get_weather",
149+
description: "Get current weather",
150+
parameters: {
151+
type: "object",
152+
properties: {
153+
city: { type: "string" },
154+
},
155+
required: ["city"],
156+
},
157+
},
158+
},
159+
],
160+
});
161+
162+
const toolCalls = await response.getToolCalls();
163+
164+
expect(toolCalls.length).toBeGreaterThan(0);
165+
expect(toolCalls[0].name).toBe("get_weather");
166+
}, 30000);
167+
});
168+
24169
describe("response.text - Text extraction", () => {
25170
it("should successfully get text from a response", async () => {
26171
const response = client.callModel({

0 commit comments

Comments
 (0)