Skip to content

Commit c732405

Browse files
committed
wip
1 parent 082810e commit c732405

File tree

5 files changed

+64
-3
lines changed

5 files changed

+64
-3
lines changed

AGENTS.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,14 @@ MODEL=openrouter/google/gemini-pro
5353
MODEL=openrouter/meta-llama/llama-3.1-405b-instruct
5454
```
5555

56+
**LM Studio (Local models via OpenAI-compatible API):**
57+
58+
```bash
59+
MODEL=lmstudio/model-name
60+
```
61+
62+
LM Studio runs a local OpenAI-compatible API server on `http://localhost:1234/v1`. Make sure LM Studio is running with a model loaded before using this provider.
63+
5664
### MCP Server Configuration
5765

5866
The `MCP_SERVER_URL` environment variable controls MCP (Model Context Protocol) integration. The tool automatically detects whether to use HTTP or StdIO transport based on the value format.
@@ -101,6 +109,7 @@ MCP_SERVER_URL=
101109
- `ANTHROPIC_API_KEY`: Required when using `anthropic/*` models
102110
- `OPENAI_API_KEY`: Required when using `openai/*` models (get at https://platform.openai.com/api-keys)
103111
- `OPENROUTER_API_KEY`: Required when using `openrouter/*` models (get at https://openrouter.ai/keys)
112+
- No API key required for `lmstudio/*` models (runs locally)
104113

105114
### Provider Routing
106115

@@ -109,6 +118,7 @@ The benchmark tool automatically routes to the correct provider based on the `MO
109118
- `anthropic/*` → Direct Anthropic API
110119
- `openai/*` → Direct OpenAI API
111120
- `openrouter/*` → OpenRouter unified API
121+
- `lmstudio/*` → LM Studio local server (OpenAI-compatible)
112122

113123
This allows switching models and providers without any code changes.
114124

@@ -154,6 +164,7 @@ tests/
154164
- **Vercel AI SDK v5**: Agent framework with tool calling
155165
- **@ai-sdk/anthropic**: Anthropic provider for direct API access
156166
- **@ai-sdk/openai**: OpenAI provider for direct API access
167+
- **@ai-sdk/openai-compatible**: OpenAI-compatible provider for LM Studio and other local servers
157168
- **@openrouter/ai-sdk-provider**: OpenRouter provider for unified access to 300+ models
158169
- **@ai-sdk/mcp**: MCP client integration (with custom patch)
159170
- **Bun Runtime**: JavaScript runtime (not Node.js)
@@ -255,3 +266,4 @@ This naming convention allows you to:
255266
- MCP integration can be toggled via `MCP_SERVER_URL` environment variable without code changes
256267
- MCP status is clearly indicated in both the JSON metadata and HTML report with a visual badge
257268
- Exit code is 0 if all tests pass, 1 if any tests fail
269+
- LM Studio provider requires LM Studio to be running locally with a model loaded

README.md

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,44 @@ MCP_SERVER_URL=https://mcp.svelte.dev/mcp
3333

3434
**Required:**
3535

36-
- `MODEL`: The AI model to use (e.g., `anthropic/claude-sonnet-4`, `openai/gpt-5`, `openrouter/anthropic/claude-sonnet-4`)
36+
- `MODEL`: The AI model to use (e.g., `anthropic/claude-sonnet-4`, `openai/gpt-5`, `openrouter/anthropic/claude-sonnet-4`, `lmstudio/model-name`)
3737
- Corresponding API key (`ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, or `OPENROUTER_API_KEY`)
38+
- Note: No API key required for `lmstudio/*` models (runs locally)
3839

3940
**Optional:**
4041

4142
- `MCP_SERVER_URL`: MCP server URL (leave empty to disable MCP integration)
4243

44+
### Supported Providers
45+
46+
**Cloud Providers:**
47+
- `anthropic/*` - Direct Anthropic API (requires `ANTHROPIC_API_KEY`)
48+
- `openai/*` - Direct OpenAI API (requires `OPENAI_API_KEY`)
49+
- `openrouter/*` - OpenRouter unified API (requires `OPENROUTER_API_KEY`)
50+
51+
**Local Providers:**
52+
- `lmstudio/*` - LM Studio local server (requires LM Studio running on `http://localhost:1234`)
53+
54+
Example configurations:
55+
56+
```bash
57+
# Anthropic
58+
MODEL=anthropic/claude-sonnet-4
59+
ANTHROPIC_API_KEY=sk-ant-...
60+
61+
# OpenAI
62+
MODEL=openai/gpt-5
63+
OPENAI_API_KEY=sk-...
64+
65+
# OpenRouter
66+
MODEL=openrouter/anthropic/claude-sonnet-4
67+
OPENROUTER_API_KEY=sk-or-...
68+
69+
# LM Studio (local)
70+
MODEL=lmstudio/llama-3-8b
71+
# No API key needed - make sure LM Studio is running!
72+
```
73+
4374
## Usage
4475

4576
To run the benchmark (automatically discovers and runs all tests):

bun.lock

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
"@ai-sdk/anthropic": "^2.0.53",
99
"@ai-sdk/mcp": "0.0.11",
1010
"@ai-sdk/openai": "^2.0.77",
11+
"@ai-sdk/openai-compatible": "^1.0.28",
1112
"@openrouter/ai-sdk-provider": "^1.4.1",
1213
"@testing-library/svelte": "^5.2.9",
1314
"@testing-library/user-event": "^14.6.1",
@@ -39,6 +40,8 @@
3940

4041
"@ai-sdk/openai": ["@ai-sdk/openai@2.0.77", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.18" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-lEJ9vyWSU5VLo+6Msr6r32RnABf4SRxPSV3Hz1Yb5yt43bWYxbBzwaDNYGhJaDL6rCgfUVvcIf5TKiiEuVd4EQ=="],
4142

43+
"@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@1.0.28", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.18" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-yKubDxLYtXyGUzkr9lNStf/lE/I+Okc8tmotvyABhsQHHieLKk6oV5fJeRJxhr67Ejhg+FRnwUOxAmjRoFM4dA=="],
44+
4245
"@ai-sdk/provider": ["@ai-sdk/provider@2.0.0", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA=="],
4346

4447
"@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.18", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ypv1xXMsgGcNKUP+hglKqtdDuMg68nWHucPPAhIENrbFAI+xCHiqPVN8Zllxyv1TNZwGWUghPxJXU+Mqps0YRQ=="],

lib/providers.ts

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { anthropic } from "@ai-sdk/anthropic";
22
import { openai } from "@ai-sdk/openai";
33
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
4+
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
45
import type { LanguageModel } from "ai";
56

67
interface ProviderConfig {
@@ -67,9 +68,22 @@ export function getModelProvider(config: ProviderConfig): LanguageModel {
6768
return openrouter.chat(modelPath);
6869
}
6970

71+
// Route to LM Studio provider (OpenAI-compatible)
72+
if (modelString.startsWith("lmstudio/")) {
73+
// Create LM Studio provider instance
74+
const lmstudio = createOpenAICompatible({
75+
name: "lmstudio",
76+
baseURL: "http://localhost:1234/v1",
77+
});
78+
79+
// Extract model name (e.g., "lmstudio/model-name" -> "model-name")
80+
const modelName = modelString.replace("lmstudio/", "");
81+
return lmstudio(modelName);
82+
}
83+
7084
// Invalid format
7185
throw new Error(
72-
`Invalid MODEL format: "${modelString}". Must start with "anthropic/", "openai/", or "openrouter/"`,
86+
`Invalid MODEL format: "${modelString}". Must start with "anthropic/", "openai/", "openrouter/", or "lmstudio/"`,
7387
);
7488
}
7589

@@ -86,7 +100,7 @@ export function loadEnvConfig(): ProviderConfig {
86100
// Model is required
87101
if (!modelString) {
88102
throw new Error(
89-
"MODEL environment variable is required. Format: 'anthropic/model-name', 'openai/model-name', or 'openrouter/provider/model-name'",
103+
"MODEL environment variable is required. Format: 'anthropic/model-name', 'openai/model-name', 'openrouter/provider/model-name', or 'lmstudio/model-name'",
90104
);
91105
}
92106

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
"@ai-sdk/anthropic": "^2.0.53",
2626
"@ai-sdk/mcp": "0.0.11",
2727
"@ai-sdk/openai": "^2.0.77",
28+
"@ai-sdk/openai-compatible": "^1.0.28",
2829
"@openrouter/ai-sdk-provider": "^1.4.1",
2930
"@testing-library/svelte": "^5.2.9",
3031
"@testing-library/user-event": "^14.6.1",

0 commit comments

Comments
 (0)