Skip to content

Commit 818f4f4

Browse files
feat: add Maple TEE-encrypted embedding provider for memory_search
Adds nomic-embed-text embedding support via Maple's TEE-secured API: - New embeddings-maple.ts provider using customFetch for encrypted requests - Auto-selects maple when MAPLE_API_KEY is set (before openai/gemini) - Adds 'maple' to provider/fallback options in config types - Uses same session/encryption flow as inference requests
1 parent eea07b0 commit 818f4f4

File tree

5 files changed

+107
-15
lines changed

5 files changed

+107
-15
lines changed

src/agents/memory-search.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ export type ResolvedMemorySearchConfig = {
1010
enabled: boolean;
1111
sources: Array<"memory" | "sessions">;
1212
extraPaths: string[];
13-
provider: "openai" | "local" | "gemini" | "auto";
13+
provider: "openai" | "local" | "gemini" | "maple" | "auto";
1414
remote?: {
1515
baseUrl?: string;
1616
apiKey?: string;
@@ -26,7 +26,7 @@ export type ResolvedMemorySearchConfig = {
2626
experimental: {
2727
sessionMemory: boolean;
2828
};
29-
fallback: "openai" | "gemini" | "local" | "none";
29+
fallback: "openai" | "gemini" | "local" | "maple" | "none";
3030
model: string;
3131
local: {
3232
modelPath?: string;

src/config/types.tools.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -233,8 +233,8 @@ export type MemorySearchConfig = {
233233
/** Enable session transcript indexing (experimental, default: false). */
234234
sessionMemory?: boolean;
235235
};
236-
/** Embedding provider mode. */
237-
provider?: "openai" | "gemini" | "local";
236+
/** Embedding provider mode (OpenSecret fork adds "maple" for TEE-encrypted embeddings). */
237+
provider?: "openai" | "gemini" | "local" | "maple";
238238
remote?: {
239239
baseUrl?: string;
240240
apiKey?: string;
@@ -252,8 +252,8 @@ export type MemorySearchConfig = {
252252
timeoutMinutes?: number;
253253
};
254254
};
255-
/** Fallback behavior when embeddings fail. */
256-
fallback?: "openai" | "gemini" | "local" | "none";
255+
/** Fallback behavior when embeddings fail (OpenSecret fork adds "maple"). */
256+
fallback?: "openai" | "gemini" | "local" | "maple" | "none";
257257
/** Embedding model id (remote) or alias (local). */
258258
model?: string;
259259
/** Local embedding settings (node-llama-cpp). */

src/memory/embeddings-maple.ts

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
/**
2+
* Maple TEE-encrypted embedding provider using nomic-embed-text
3+
*/
4+
import { requireApiKey, resolveApiKeyForProvider } from "../agents/model-auth.js";
5+
import { createMapleCustomFetch } from "../agents/pi-embedded-runner/maple-fetch.js";
6+
import { MAPLE_DEFAULT_BASE_URL } from "../agents/models-config.providers.js";
7+
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
8+
9+
export type MapleEmbeddingClient = {
10+
baseUrl: string;
11+
model: string;
12+
customFetch: typeof fetch;
13+
};
14+
15+
export const DEFAULT_MAPLE_EMBEDDING_MODEL = "nomic-embed-text";
16+
17+
export async function createMapleEmbeddingProvider(
18+
options: EmbeddingProviderOptions,
19+
): Promise<{ provider: EmbeddingProvider; client: MapleEmbeddingClient }> {
20+
const client = await resolveMapleEmbeddingClient(options);
21+
const url = `${client.baseUrl.replace(/\/$/, "")}/embeddings`;
22+
23+
const embed = async (input: string[]): Promise<number[][]> => {
24+
if (input.length === 0) return [];
25+
const res = await client.customFetch(url, {
26+
method: "POST",
27+
headers: { "Content-Type": "application/json" },
28+
body: JSON.stringify({ model: client.model, input }),
29+
});
30+
if (!res.ok) {
31+
const text = await res.text();
32+
throw new Error(`maple embeddings failed: ${res.status} ${text}`);
33+
}
34+
const payload = (await res.json()) as {
35+
data?: Array<{ embedding?: number[] }>;
36+
};
37+
const data = payload.data ?? [];
38+
return data.map((entry) => entry.embedding ?? []);
39+
};
40+
41+
return {
42+
provider: {
43+
id: "maple",
44+
model: client.model,
45+
embedQuery: async (text) => {
46+
const [vec] = await embed([text]);
47+
return vec ?? [];
48+
},
49+
embedBatch: embed,
50+
},
51+
client,
52+
};
53+
}
54+
55+
export async function resolveMapleEmbeddingClient(
56+
options: EmbeddingProviderOptions,
57+
): Promise<MapleEmbeddingClient> {
58+
const remote = options.remote;
59+
const remoteApiKey = remote?.apiKey?.trim();
60+
const remoteBaseUrl = remote?.baseUrl?.trim();
61+
62+
const apiKey = remoteApiKey
63+
? remoteApiKey
64+
: requireApiKey(
65+
await resolveApiKeyForProvider({
66+
provider: "maple",
67+
cfg: options.config,
68+
agentDir: options.agentDir,
69+
}),
70+
"maple",
71+
);
72+
73+
const providerConfig = options.config.models?.providers?.maple;
74+
const baseUrl =
75+
remoteBaseUrl ||
76+
providerConfig?.baseUrl?.trim() ||
77+
process.env.MAPLE_API_URL?.trim() ||
78+
MAPLE_DEFAULT_BASE_URL;
79+
80+
const model = options.model?.trim() || DEFAULT_MAPLE_EMBEDDING_MODEL;
81+
const customFetch = createMapleCustomFetch(apiKey);
82+
83+
return { baseUrl, model, customFetch };
84+
}

src/memory/embeddings.ts

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,12 @@ import type { Llama, LlamaEmbeddingContext, LlamaModel } from "node-llama-cpp";
44
import type { MoltbotConfig } from "../config/config.js";
55
import { resolveUserPath } from "../utils.js";
66
import { createGeminiEmbeddingProvider, type GeminiEmbeddingClient } from "./embeddings-gemini.js";
7+
import { createMapleEmbeddingProvider, type MapleEmbeddingClient } from "./embeddings-maple.js";
78
import { createOpenAiEmbeddingProvider, type OpenAiEmbeddingClient } from "./embeddings-openai.js";
89
import { importNodeLlamaCpp } from "./node-llama.js";
910

1011
export type { GeminiEmbeddingClient } from "./embeddings-gemini.js";
12+
export type { MapleEmbeddingClient } from "./embeddings-maple.js";
1113
export type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
1214

1315
export type EmbeddingProvider = {
@@ -19,24 +21,25 @@ export type EmbeddingProvider = {
1921

2022
export type EmbeddingProviderResult = {
2123
provider: EmbeddingProvider;
22-
requestedProvider: "openai" | "local" | "gemini" | "auto";
23-
fallbackFrom?: "openai" | "local" | "gemini";
24+
requestedProvider: "openai" | "local" | "gemini" | "maple" | "auto";
25+
fallbackFrom?: "openai" | "local" | "gemini" | "maple";
2426
fallbackReason?: string;
2527
openAi?: OpenAiEmbeddingClient;
2628
gemini?: GeminiEmbeddingClient;
29+
maple?: MapleEmbeddingClient;
2730
};
2831

2932
export type EmbeddingProviderOptions = {
3033
config: MoltbotConfig;
3134
agentDir?: string;
32-
provider: "openai" | "local" | "gemini" | "auto";
35+
provider: "openai" | "local" | "gemini" | "maple" | "auto";
3336
remote?: {
3437
baseUrl?: string;
3538
apiKey?: string;
3639
headers?: Record<string, string>;
3740
};
3841
model: string;
39-
fallback: "openai" | "gemini" | "local" | "none";
42+
fallback: "openai" | "gemini" | "local" | "maple" | "none";
4043
local?: {
4144
modelPath?: string;
4245
modelCacheDir?: string;
@@ -116,7 +119,7 @@ export async function createEmbeddingProvider(
116119
const requestedProvider = options.provider;
117120
const fallback = options.fallback;
118121

119-
const createProvider = async (id: "openai" | "local" | "gemini") => {
122+
const createProvider = async (id: "openai" | "local" | "gemini" | "maple") => {
120123
if (id === "local") {
121124
const provider = await createLocalEmbeddingProvider(options);
122125
return { provider };
@@ -125,11 +128,15 @@ export async function createEmbeddingProvider(
125128
const { provider, client } = await createGeminiEmbeddingProvider(options);
126129
return { provider, gemini: client };
127130
}
131+
if (id === "maple") {
132+
const { provider, client } = await createMapleEmbeddingProvider(options);
133+
return { provider, maple: client };
134+
}
128135
const { provider, client } = await createOpenAiEmbeddingProvider(options);
129136
return { provider, openAi: client };
130137
};
131138

132-
const formatPrimaryError = (err: unknown, provider: "openai" | "local" | "gemini") =>
139+
const formatPrimaryError = (err: unknown, provider: "openai" | "local" | "gemini" | "maple") =>
133140
provider === "local" ? formatLocalSetupError(err) : formatError(err);
134141

135142
if (requestedProvider === "auto") {
@@ -145,7 +152,8 @@ export async function createEmbeddingProvider(
145152
}
146153
}
147154

148-
for (const provider of ["openai", "gemini"] as const) {
155+
// OpenSecret fork: prefer maple when MAPLE_API_KEY is available
156+
for (const provider of ["maple", "openai", "gemini"] as const) {
149157
try {
150158
const result = await createProvider(provider);
151159
return { ...result, requestedProvider };

src/memory/manager.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,8 +123,8 @@ export class MemoryIndexManager {
123123
private readonly workspaceDir: string;
124124
private readonly settings: ResolvedMemorySearchConfig;
125125
private provider: EmbeddingProvider;
126-
private readonly requestedProvider: "openai" | "local" | "gemini" | "auto";
127-
private fallbackFrom?: "openai" | "local" | "gemini";
126+
private readonly requestedProvider: "openai" | "local" | "gemini" | "maple" | "auto";
127+
private fallbackFrom?: "openai" | "local" | "gemini" | "maple";
128128
private fallbackReason?: string;
129129
private openAi?: OpenAiEmbeddingClient;
130130
private gemini?: GeminiEmbeddingClient;

0 commit comments

Comments
 (0)