Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 15 additions & 2 deletions apps/docs/connectors/github.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -410,14 +410,27 @@ When you update the repository configuration:
<Tab title="TypeScript">
```typescript
// Delete by connection ID
const result = await client.connections.delete(connectionId);
const result = await client.connections.deleteByID(connectionId);

// Or delete by provider (requires container tags)
const result = await client.connections.deleteByProvider('github', {
containerTags: ['user-123']
});

console.log('Deleted connection:', result.id);
```
</Tab>
<Tab title="Python">
```python
# Delete by connection ID
result = client.connections.delete(connection_id)
result = client.connections.delete_by_id(connection_id)

# Or delete by provider (requires container tags)
result = client.connections.delete_by_provider(
provider='github',
container_tags=['user-123']
)

print(f'Deleted connection: {result.id}')
```
</Tab>
Expand Down
23 changes: 17 additions & 6 deletions apps/docs/connectors/overview.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -288,11 +288,16 @@ const client = new Supermemory({
apiKey: process.env.SUPERMEMORY_API_KEY!
});

// Delete by connection ID using SDK
const result = await client.connections.delete(connectionId);
// Delete by connection ID
const result = await client.connections.deleteByID(connectionId);

// Or delete by provider (requires container tags)
const result = await client.connections.deleteByProvider('notion', {
containerTags: ['user-123']
});

console.log('Deleted:', result.id, result.provider);
// Output: Deleted: conn_abc123 notion

```

```python Python
Expand All @@ -301,11 +306,17 @@ import os

client = Supermemory(api_key=os.environ.get("SUPERMEMORY_API_KEY"))

# Delete by connection ID using SDK
result = client.connections.delete(connection_id)
# Delete by connection ID
result = client.connections.delete_by_id(connection_id)

# Or delete by provider (requires container tags)
result = client.connections.delete_by_provider(
provider='notion',
container_tags=['user-123']
)

print(f"Deleted: {result.id} {result.provider}")
# Output: Deleted: conn_abc123 notion

```

```bash cURL
Expand Down
19 changes: 17 additions & 2 deletions bun.lock
Original file line number Diff line number Diff line change
Expand Up @@ -248,17 +248,17 @@
},
"packages/tools": {
"name": "@supermemory/tools",
"version": "1.3.50",
"version": "1.3.62",
"dependencies": {
"@ai-sdk/anthropic": "^2.0.25",
"@ai-sdk/openai": "^2.0.23",
"@ai-sdk/provider": "^2.0.0",
"ai": "^5.0.29",
"openai": "^4.104.0",
"supermemory": "^3.0.0-alpha.26",
"zod": "^4.1.5",
},
"devDependencies": {
"@ai-sdk/provider": "^3.0.0",
"@anthropic-ai/sdk": "^0.65.0",
"@total-typescript/tsconfig": "^1.0.4",
"@types/bun": "^1.2.21",
Expand All @@ -267,6 +267,9 @@
"typescript": "^5.9.2",
"vitest": "^3.2.4",
},
"peerDependencies": {
"@ai-sdk/provider": "^2.0.0 || ^3.0.0",
},
},
"packages/ui": {
"name": "@repo/ui",
Expand Down Expand Up @@ -4777,8 +4780,12 @@

"@supermemory/tools/@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.53", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.18" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ih7NV+OFSNWZCF+tYYD7ovvvM+gv7TRKQblpVohg2ipIwC9Y0TirzocJVREzZa/v9luxUwFbsPji++DUDWWxsg=="],

"@supermemory/tools/@ai-sdk/provider": ["@ai-sdk/provider@3.0.0", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-m9ka3ptkPQbaHHZHqDXDF9C9B5/Mav0KTdky1k2HZ3/nrW2t1AgObxIVPyGDWQNS9FXT/FS6PIoSjpcP/No8rQ=="],

"@supermemory/tools/@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.65.0", "", { "dependencies": { "json-schema-to-ts": "^3.1.1" }, "peerDependencies": { "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["zod"], "bin": { "anthropic-ai-sdk": "bin/cli" } }, "sha512-zIdPOcrCVEI8t3Di40nH4z9EoeyGZfXbYSvWdDLsB/KkaSYMnEgC7gmcgWu83g2NTn1ZTpbMvpdttWDGGIk6zw=="],

"@supermemory/tools/ai": ["ai@5.0.113", "", { "dependencies": { "@ai-sdk/gateway": "2.0.21", "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.19", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-26vivpSO/mzZj0k1Si2IpsFspp26ttQICHRySQiMrtWcRd5mnJMX2a8sG28vmZ38C+JUn1cWmfZrsLMxkSMw9g=="],

"@supermemory/tools/typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],

"@supermemory/tools/zod": ["zod@4.1.13", "", {}, "sha512-AvvthqfqrAhNH9dnfmrfKzX5upOdjUVJYFqNSlkmGf64gRaTzlPwz99IHYnVs28qYAybvAlBV+H7pn0saFY4Ig=="],
Expand Down Expand Up @@ -5665,8 +5672,16 @@

"@supermemory/ai-sdk/ai/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.19", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-W41Wc9/jbUVXVwCN/7bWa4IKe8MtxO3EyA0Hfhx6grnmiYlCvpI8neSYWFE0zScXJkgA/YK3BRybzgyiXuu6JA=="],

"@supermemory/tools/@ai-sdk/anthropic/@ai-sdk/provider": ["@ai-sdk/provider@2.0.0", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA=="],

"@supermemory/tools/@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.18", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ypv1xXMsgGcNKUP+hglKqtdDuMg68nWHucPPAhIENrbFAI+xCHiqPVN8Zllxyv1TNZwGWUghPxJXU+Mqps0YRQ=="],

"@supermemory/tools/ai/@ai-sdk/gateway": ["@ai-sdk/gateway@2.0.21", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.19", "@vercel/oidc": "3.0.5" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-BwV7DU/lAm3Xn6iyyvZdWgVxgLu3SNXzl5y57gMvkW4nGhAOV5269IrJzQwGt03bb107sa6H6uJwWxc77zXoGA=="],

"@supermemory/tools/ai/@ai-sdk/provider": ["@ai-sdk/provider@2.0.0", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA=="],

"@supermemory/tools/ai/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.19", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-W41Wc9/jbUVXVwCN/7bWa4IKe8MtxO3EyA0Hfhx6grnmiYlCvpI8neSYWFE0zScXJkgA/YK3BRybzgyiXuu6JA=="],

"@vanilla-extract/integration/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.0", "", { "os": "aix", "cpu": "ppc64" }, "sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A=="],

"@vanilla-extract/integration/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.27.0", "", { "os": "android", "cpu": "arm" }, "sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ=="],
Expand Down
7 changes: 5 additions & 2 deletions packages/tools/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@supermemory/tools",
"type": "module",
"version": "1.3.60",
"version": "1.3.64",
"description": "Memory tools for AI SDK and OpenAI function calling with supermemory",
"scripts": {
"build": "tsdown",
Expand All @@ -13,13 +13,13 @@
"dependencies": {
"@ai-sdk/anthropic": "^2.0.25",
"@ai-sdk/openai": "^2.0.23",
"@ai-sdk/provider": "^2.0.0",
"ai": "^5.0.29",
"openai": "^4.104.0",
"supermemory": "^3.0.0-alpha.26",
"zod": "^4.1.5"
},
"devDependencies": {
"@ai-sdk/provider": "^3.0.0",
"@total-typescript/tsconfig": "^1.0.4",
"@types/bun": "^1.2.21",
"dotenv": "^16.6.1",
Expand All @@ -28,6 +28,9 @@
"vitest": "^3.2.4",
"@anthropic-ai/sdk": "^0.65.0"
},
"peerDependencies": {
"@ai-sdk/provider": "^2.0.0 || ^3.0.0"
},
"main": "./dist/index.js",
"module": "./dist/index.js",
"types": "./dist/index.d.ts",
Expand Down
12 changes: 7 additions & 5 deletions packages/tools/src/openai/middleware.ts
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ const addSystemPrompt = async (
const deduplicated = deduplicateMemories({
static: memoriesResponse.profile.static,
dynamic: memoriesResponse.profile.dynamic,
searchResults: memoriesResponse.searchResults.results,
searchResults: memoriesResponse.searchResults?.results,
})

logger.debug("Memory deduplication completed for chat API", {
Expand All @@ -197,7 +197,7 @@ const addSystemPrompt = async (
deduplicated: deduplicated.dynamic.length,
},
searchResults: {
original: memoriesResponse.searchResults.results.length,
original: memoriesResponse.searchResults?.results?.length,
deduplicated: deduplicated.searchResults.length,
},
})
Expand Down Expand Up @@ -340,7 +340,9 @@ const addMemoryTool = async (
: "",
...((msg as any).name && { name: (msg as any).name }),
...((msg as any).tool_calls && { tool_calls: (msg as any).tool_calls }),
...((msg as any).tool_call_id && { tool_call_id: (msg as any).tool_call_id }),
...((msg as any).tool_call_id && {
tool_call_id: (msg as any).tool_call_id,
}),
}))

const response = await addConversation({
Expand Down Expand Up @@ -467,7 +469,7 @@ export function createOpenAIMiddleware(
const deduplicated = deduplicateMemories({
static: memoriesResponse.profile.static,
dynamic: memoriesResponse.profile.dynamic,
searchResults: memoriesResponse.searchResults.results,
searchResults: memoriesResponse.searchResults?.results,
})

logger.debug(`Memory deduplication completed for ${context} API`, {
Expand All @@ -480,7 +482,7 @@ export function createOpenAIMiddleware(
deduplicated: deduplicated.dynamic.length,
},
searchResults: {
original: memoriesResponse.searchResults.results.length,
original: memoriesResponse.searchResults?.results?.length,
deduplicated: deduplicated.searchResults.length,
},
})
Expand Down
157 changes: 132 additions & 25 deletions packages/tools/src/vercel/index.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,37 @@
import type { LanguageModelV2 } from "@ai-sdk/provider"
import { wrapLanguageModel } from "ai"
import { createSupermemoryMiddleware } from "./middleware"
import {
type LanguageModel,
type LanguageModelCallOptions,
type LanguageModelStreamPart,
getLastUserMessage,
} from "./util"
import {
createSupermemoryContext,
transformParamsWithMemory,
extractAssistantResponseText,
saveMemoryAfterResponse,
} from "./middleware"

interface WrapVercelLanguageModelOptions {
conversationId?: string;
verbose?: boolean;
mode?: "profile" | "query" | "full";
addMemory?: "always" | "never";
apiKey?: string;
baseUrl?: string;
conversationId?: string
verbose?: boolean
mode?: "profile" | "query" | "full"
addMemory?: "always" | "never"
apiKey?: string
baseUrl?: string
}

/**
* Wraps a language model with supermemory middleware to automatically inject relevant memories
* into the system prompt based on the user's message content.
*
* This middleware searches the supermemory API for relevant memories using the container tag
* This wrapper searches the supermemory API for relevant memories using the container tag
* and user message, then either appends memories to an existing system prompt or creates
* a new system prompt with the memories.
*
* @param model - The language model to wrap with supermemory capabilities
* Supports both Vercel AI SDK 5 (LanguageModelV2) and SDK 6 (LanguageModelV3) via runtime
* detection of `model.specificationVersion`.
*
* @param model - The language model to wrap with supermemory capabilities (V2 or V3)
* @param containerTag - The container tag/identifier for memory search (e.g., user ID, project ID)
* @param options - Optional configuration options for the middleware
* @param options.conversationId - Optional conversation ID to group messages into a single document for contextual memory generation
Expand Down Expand Up @@ -51,29 +63,124 @@ interface WrapVercelLanguageModelOptions {
* @throws {Error} When neither `options.apiKey` nor `process.env.SUPERMEMORY_API_KEY` are set
* @throws {Error} When supermemory API request fails
*/
const wrapVercelLanguageModel = (
model: LanguageModelV2,
const wrapVercelLanguageModel = <T extends LanguageModel>(
model: T,
containerTag: string,
options?: WrapVercelLanguageModelOptions,
): LanguageModelV2 => {
): T => {
const providedApiKey = options?.apiKey ?? process.env.SUPERMEMORY_API_KEY

if (!providedApiKey) {
throw new Error("SUPERMEMORY_API_KEY is not set — provide it via `options.apiKey` or set `process.env.SUPERMEMORY_API_KEY`")
throw new Error(
"SUPERMEMORY_API_KEY is not set — provide it via `options.apiKey` or set `process.env.SUPERMEMORY_API_KEY`",
)
}

const conversationId = options?.conversationId
const verbose = options?.verbose ?? false
const mode = options?.mode ?? "profile"
const addMemory = options?.addMemory ?? "never"
const baseUrl = options?.baseUrl

const wrappedModel = wrapLanguageModel({
model,
middleware: createSupermemoryMiddleware(containerTag, providedApiKey, conversationId, verbose, mode, addMemory, baseUrl),
const ctx = createSupermemoryContext({
containerTag,
apiKey: providedApiKey,
conversationId: options?.conversationId,
verbose: options?.verbose ?? false,
mode: options?.mode ?? "profile",
addMemory: options?.addMemory ?? "never",
baseUrl: options?.baseUrl,
})

const wrappedModel = {
...model,

doGenerate: async (params: LanguageModelCallOptions) => {
try {
const transformedParams = await transformParamsWithMemory(params, ctx)

// biome-ignore lint/suspicious/noExplicitAny: Union type compatibility between V2 and V3
const result = await model.doGenerate(transformedParams as any)

const userMessage = getLastUserMessage(params)
if (ctx.addMemory === "always" && userMessage && userMessage.trim()) {
const assistantResponseText = extractAssistantResponseText(
result.content as unknown[],
)
saveMemoryAfterResponse(
ctx.client,
ctx.containerTag,
ctx.conversationId,
assistantResponseText,
params,
ctx.logger,
ctx.apiKey,
ctx.normalizedBaseUrl,
)
}

return result
} catch (error) {
ctx.logger.error("Error generating response", {
error: error instanceof Error ? error.message : "Unknown error",
})
throw error
}
},

doStream: async (params: LanguageModelCallOptions) => {
let generatedText = ""

try {
const transformedParams = await transformParamsWithMemory(params, ctx)

const { stream, ...rest } = await model.doStream(
// biome-ignore lint/suspicious/noExplicitAny: Union type compatibility between V2 and V3
transformedParams as any,
)

const transformStream = new TransformStream<
LanguageModelStreamPart,
LanguageModelStreamPart
>({
transform(chunk, controller) {
if (chunk.type === "text-delta") {
generatedText += chunk.delta
}
controller.enqueue(chunk)
},
flush: async () => {
const userMessage = getLastUserMessage(params)
if (
ctx.addMemory === "always" &&
userMessage &&
userMessage.trim()
) {
saveMemoryAfterResponse(
ctx.client,
ctx.containerTag,
ctx.conversationId,
generatedText,
params,
ctx.logger,
ctx.apiKey,
ctx.normalizedBaseUrl,
)
}
},
})

return {
stream: stream.pipeThrough(transformStream),
...rest,
}
} catch (error) {
ctx.logger.error("Error streaming response", {
error: error instanceof Error ? error.message : "Unknown error",
})
throw error
}
},
} as T

return wrappedModel
}

export { wrapVercelLanguageModel as withSupermemory, type WrapVercelLanguageModelOptions as WithSupermemoryOptions }
export {
wrapVercelLanguageModel as withSupermemory,
type WrapVercelLanguageModelOptions as WithSupermemoryOptions,
}
Loading
Loading