diff --git a/.gitignore b/.gitignore index 7093805a..dc93fe6e 100644 --- a/.gitignore +++ b/.gitignore @@ -65,3 +65,6 @@ bun.lockb # One-off / local scripts (keep on disk, not versioned) /scripts/ + + +.cursorrules diff --git a/drizzle/0006_add_api_keys.sql b/drizzle/0006_add_api_keys.sql new file mode 100644 index 00000000..263fedd4 --- /dev/null +++ b/drizzle/0006_add_api_keys.sql @@ -0,0 +1,16 @@ +-- API keys for MCP server authentication +CREATE TABLE "api_keys" ( + "id" text PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "user_id" text NOT NULL, + "key_hash" text NOT NULL, + "key_prefix" text NOT NULL, + "label" text, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "last_used_at" timestamp with time zone, + "revoked_at" timestamp with time zone, + CONSTRAINT "api_keys_key_hash_key" UNIQUE("key_hash") +); +--> statement-breakpoint +ALTER TABLE "api_keys" ADD CONSTRAINT "api_keys_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action; +--> statement-breakpoint +CREATE INDEX "api_keys_user_id_idx" ON "api_keys" USING btree ("user_id" text_ops); diff --git a/drizzle/meta/_journal.json b/drizzle/meta/_journal.json index 8b5c51d8..036717c7 100644 --- a/drizzle/meta/_journal.json +++ b/drizzle/meta/_journal.json @@ -43,6 +43,13 @@ "when": 1771800001000, "tag": "0005_add_chat_messages_thread_message_unique", "breakpoints": true + }, + { + "idx": 6, + "version": "7", + "when": 1744156800000, + "tag": "0006_add_api_keys", + "breakpoints": true } ] } \ No newline at end of file diff --git a/package.json b/package.json index 40e4339b..bed35a17 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "dependencies": { "@ai-sdk/devtools": "^0.0.15", "@ai-sdk/google": "^3.0.58", + "@modelcontextprotocol/sdk": "^1.0.4", "@assistant-ui/react": "^0.12.23", "@assistant-ui/react-ai-sdk": "^1.3.17", "@assistant-ui/react-devtools": "^1.0.4", diff --git a/src/app/api/mcp-keys/[id]/route.ts b/src/app/api/mcp-keys/[id]/route.ts new file mode 100644 index 00000000..cf31cfc9 --- /dev/null +++ b/src/app/api/mcp-keys/[id]/route.ts @@ -0,0 +1,36 @@ +import { NextResponse } from "next/server"; +import { db } from "@/lib/db/client"; +import { apiKeys } from "@/lib/db/schema"; +import { eq, and } from "drizzle-orm"; +import { requireAuth, withErrorHandling } from "@/lib/api/workspace-helpers"; + +async function handleDELETE( + _req: Request, + { params }: { params: Promise<{ id: string }> } +) { + const userId = await requireAuth(); + const { id } = await params; + + const [key] = await db + .select({ userId: apiKeys.userId }) + .from(apiKeys) + .where(eq(apiKeys.id, id)) + .limit(1); + + if (!key) { + return NextResponse.json({ error: "API key not found" }, { status: 404 }); + } + + if (key.userId !== userId) { + return NextResponse.json({ error: "Access denied" }, { status: 403 }); + } + + await db + .update(apiKeys) + .set({ revokedAt: new Date().toISOString() }) + .where(eq(apiKeys.id, id)); + + return NextResponse.json({ success: true }); +} + +export const DELETE = withErrorHandling(handleDELETE, "DELETE /api/mcp-keys/[id]"); diff --git a/src/app/api/mcp-keys/route.ts b/src/app/api/mcp-keys/route.ts new file mode 100644 index 00000000..7d39c168 --- /dev/null +++ b/src/app/api/mcp-keys/route.ts @@ -0,0 +1,96 @@ +import { NextResponse } from "next/server"; +import { createHash, randomBytes } from "crypto"; +import { db } from "@/lib/db/client"; +import { apiKeys } from "@/lib/db/schema"; +import { eq, and, isNull, count, sql } from "drizzle-orm"; +import { requireAuth, withErrorHandling } from "@/lib/api/workspace-helpers"; + +const MAX_KEYS_PER_USER = 10; + +// Deterministic i64 from a userId string — used as the advisory lock key. +// XOR-folds the SHA-256 bytes into 8 bytes so the result fits in a pg bigint. +function userIdToLockKey(userId: string): bigint { + const hash = createHash("sha256").update(userId).digest(); + let lo = 0n; + for (let i = 0; i < 32; i++) { + lo ^= BigInt(hash[i]) << BigInt((i % 8) * 8); + } + // Clamp to signed int64 range so Postgres accepts it + const MAX_I64 = 9223372036854775807n; + return lo > MAX_I64 ? lo - (MAX_I64 + 1n) * 2n : lo; +} + +async function handlePOST(req: Request) { + const userId = await requireAuth(); + const body = await req.json().catch(() => ({})); + const label = typeof body?.label === "string" ? body.label.slice(0, 100) : null; + + const rawKey = `tx_${randomBytes(32).toString("base64url")}`; + const keyHash = createHash("sha256").update(rawKey).digest("hex"); + const keyPrefix = rawKey.substring(0, 8); + const lockKey = userIdToLockKey(userId); + + const result = await db.transaction(async (tx) => { + // Acquire a per-user advisory lock that is automatically released when + // the transaction ends, serialising concurrent key-creation requests. + await tx.execute(sql`SELECT pg_advisory_xact_lock(${lockKey}::bigint)`); + + const [{ total }] = await tx + .select({ total: count() }) + .from(apiKeys) + .where(and(eq(apiKeys.userId, userId), isNull(apiKeys.revokedAt))); + + if (total >= MAX_KEYS_PER_USER) { + return null; + } + + const [inserted] = await tx + .insert(apiKeys) + .values({ + userId, + keyHash, + keyPrefix, + label, + createdAt: new Date().toISOString(), + revokedAt: null, + lastUsedAt: null, + }) + .returning({ id: apiKeys.id, prefix: apiKeys.keyPrefix }); + + return inserted; + }); + + if (!result) { + return NextResponse.json( + { error: `You can have at most ${MAX_KEYS_PER_USER} active API keys. Revoke an existing key first.` }, + { status: 422 } + ); + } + + return NextResponse.json({ + id: result.id, + rawKey, + prefix: result.prefix, + }); +} + +async function handleGET() { + const userId = await requireAuth(); + + const keys = await db + .select({ + id: apiKeys.id, + prefix: apiKeys.keyPrefix, + label: apiKeys.label, + createdAt: apiKeys.createdAt, + lastUsedAt: apiKeys.lastUsedAt, + }) + .from(apiKeys) + .where(and(eq(apiKeys.userId, userId), isNull(apiKeys.revokedAt))) + .orderBy(apiKeys.createdAt); + + return NextResponse.json({ keys }); +} + +export const POST = withErrorHandling(handlePOST, "POST /api/mcp-keys"); +export const GET = withErrorHandling(handleGET, "GET /api/mcp-keys"); diff --git a/src/app/api/mcp/route.ts b/src/app/api/mcp/route.ts new file mode 100644 index 00000000..30b9a3ae --- /dev/null +++ b/src/app/api/mcp/route.ts @@ -0,0 +1,687 @@ +import { NextRequest, NextResponse } from "next/server"; +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { + ListToolsRequestSchema, + CallToolRequestSchema, + InitializeRequestSchema, + LATEST_PROTOCOL_VERSION, +} from "@modelcontextprotocol/sdk/types.js"; +import { createHash } from "crypto"; +import { db, workspaces } from "@/lib/db/client"; +import { apiKeys } from "@/lib/db/schema"; +import { eq, and, isNull } from "drizzle-orm"; +import { getCachedState } from "@/lib/mcp/workspace-cache"; +import type { Item } from "@/lib/workspace-state/types"; + +const MAX_BODY_BYTES = 64 * 1024; +const MAX_REGEX_LENGTH = 300; +const MAX_ID_LENGTH = 256; +const MAX_NAME_LENGTH = 500; +const MAX_PDF_PAGES = 50; // ~25k words per call — fits comfortably in any modern model context window +const MAX_LINE_LIMIT = 2000; // ~20k words at typical prose density +const MIN_LINE_LIMIT = 1; +const DEFAULT_LINE_LIMIT = 500; // enough for a full section without requiring a follow-up call +const LIST_MAX_ITEMS = 200; // items returned per list_workspace call; use search_workspace for larger workspaces + +const VALID_ITEM_TYPES = new Set(["document", "pdf", "flashcard", "quiz", "audio", "image"]); + +// Escapes all regex metacharacters so user-supplied query strings are always +// treated as literal substrings rather than patterns, preventing ReDoS. +function escapeRegex(s: string): string { + return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); +} + +// Throws if the workspace does not belong to userId — prevents cross-user data access. +async function assertOwnsWorkspace(workspaceId: string, userId: string): Promise { + const [ws] = await db + .select({ id: workspaces.id }) + .from(workspaces) + .where(and(eq(workspaces.id, workspaceId), eq(workspaces.userId, userId))) + .limit(1); + if (!ws) throw new McpAuthError("Workspace not found or access denied"); +} + +class McpAuthError extends Error { + constructor(message: string) { + super(message); + this.name = "McpAuthError"; + } +} + +// Sanitise errors before sending to the caller — never leak raw DB messages. +function safeErrorMessage(error: unknown): string { + if (error instanceof McpAuthError) return error.message; + return "Internal server error"; +} + +function extractText(item: Item): string | null { + switch (item.type) { + case "document": + return (item.data as any).markdown ?? null; + case "pdf": { + const pages = (item.data as any).ocrPages; + return pages?.map((p: any) => p.markdown).join("\n\n") ?? null; + } + case "flashcard": { + const cards = (item.data as any).cards ?? []; + return cards.map((c: any) => `Q: ${c.front}\nA: ${c.back}`).join("\n\n"); + } + case "quiz": { + const questions = (item.data as any).questions ?? []; + return questions + .map((q: any) => + [q.questionText, ...(q.options ?? []), q.explanation] + .filter(Boolean) + .join("\n") + ) + .join("\n\n") || null; + } + case "audio": { + const data = item.data as any; + const transcript: string | null = + data.transcript ?? + ((data.segments as any[] | undefined) + ?.map((s: any) => s.content) + .filter(Boolean) + .join("\n") || null); + const summary: string | null = data.summary ?? null; + return [transcript, summary].filter(Boolean).join("\n\n") || null; + } + case "image": { + const pages = (item.data as any).ocrPages; + return pages?.map((p: any) => p.markdown).join("\n\n") ?? null; + } + case "website": + case "youtube": + case "folder": + default: + return null; + } +} + +function levenshteinDistance(a: string, b: string): number { + const matrix: number[][] = []; + for (let i = 0; i <= b.length; i++) { + matrix[i] = [i]; + } + for (let j = 0; j <= a.length; j++) { + matrix[0][j] = j; + } + for (let i = 1; i <= b.length; i++) { + for (let j = 1; j <= a.length; j++) { + if (b.charAt(i - 1) === a.charAt(j - 1)) { + matrix[i][j] = matrix[i - 1][j - 1]; + } else { + matrix[i][j] = Math.min( + matrix[i - 1][j - 1] + 1, + matrix[i][j - 1] + 1, + matrix[i - 1][j] + 1 + ); + } + } + } + return matrix[b.length][a.length]; +} + +async function authenticateRequest(req: NextRequest): Promise { + const authHeader = req.headers.get("Authorization"); + + if (!authHeader || !authHeader.startsWith("Bearer ")) { + return null; + } + + const rawKey = authHeader.substring(7); + if (!rawKey.startsWith("tx_")) { + return null; + } + + const keyHash = createHash("sha256").update(rawKey).digest("hex"); + + const [key] = await db + .select({ userId: apiKeys.userId, id: apiKeys.id }) + .from(apiKeys) + .where(and(eq(apiKeys.keyHash, keyHash), isNull(apiKeys.revokedAt))) + .limit(1); + + if (!key) { + return null; + } + + db.update(apiKeys) + .set({ lastUsedAt: new Date().toISOString() }) + .where(eq(apiKeys.id, key.id)) + .execute() + .catch(() => {}); + + return key.userId; +} + +function createServer(userId: string): Server { + const server = new Server( + { name: "thinkex", version: "1.0.0" }, + { capabilities: { tools: {} } } + ); + registerTools(server, userId); + return server; +} + +function registerTools(server: Server, userId: string) { + + server.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: [ + { + name: "list_workspaces", + description: "List all workspaces for the authenticated user. Call this first if you don't know the workspaceId.", + inputSchema: { type: "object", properties: {} }, + }, + { + name: "list_workspace", + description: "List items in a workspace (metadata only — no content). Returns up to 200 most-recently-modified items. If totalItems exceeds returned, use search_workspace to find specific items instead of calling list_workspace repeatedly.", + inputSchema: { + type: "object", + properties: { + workspaceId: { type: "string", description: "Workspace ID" }, + folderId: { type: "string", description: "Restrict to a specific folder (optional)" }, + }, + required: ["workspaceId"], + }, + }, + { + name: "get_recent", + description: "Get the N most recently modified items in a workspace. Use this to orient yourself quickly before deciding what to read.", + inputSchema: { + type: "object", + properties: { + workspaceId: { type: "string", description: "Workspace ID" }, + limit: { type: "number", description: "Items to return (default 5, max 20)" }, + }, + required: ["workspaceId"], + }, + }, + { + name: "search_workspace", + description: "Search item content by literal keyword. Returns matching snippets with item names and line numbers. Use this before read_item to locate the right section — it is far more token-efficient than loading full documents to scan manually.", + inputSchema: { + type: "object", + properties: { + workspaceId: { type: "string", description: "Workspace ID" }, + query: { type: "string", description: "Literal substring to search for (case-insensitive; special characters are matched literally, not as regex)" }, + folderId: { type: "string", description: "Restrict to a folder (optional)" }, + type: { type: "string", description: "Restrict to an item type: document, pdf, flashcard, quiz, audio, image (optional)" }, + limit: { type: "number", description: "Snippets to return (default 5, max 10)" }, + }, + required: ["workspaceId", "query"], + }, + }, + { + name: "read_item", + description: "Read the content of a named item. Fuzzy-matches the name. For text items use lineStart+limit; for PDFs use pageStart+pageEnd. The response includes hasMore and a note with the exact next call when the document continues beyond the current window.", + inputSchema: { + type: "object", + properties: { + workspaceId: { type: "string", description: "Workspace ID" }, + name: { type: "string", description: "Item name (fuzzy matched)" }, + lineStart: { type: "number", description: "Start line, 1-indexed (text items only, default 1)" }, + limit: { type: "number", description: "Lines to return (default 500, max 2000)" }, + pageStart: { type: "number", description: "Start page, 1-indexed (PDFs only, default 1)" }, + pageEnd: { type: "number", description: "End page inclusive (PDFs only, max 50 pages per call)" }, + }, + required: ["workspaceId", "name"], + }, + }, + ], + }; + }); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + try { + switch (name) { + case "list_workspaces": { + const workspacesList = await db + .select({ id: workspaces.id, slug: workspaces.slug, name: workspaces.name }) + .from(workspaces) + .where(eq(workspaces.userId, userId)); + + return { + content: [{ type: "text", text: JSON.stringify(workspacesList) }], + }; + } + + case "list_workspace": { + const { workspaceId, folderId } = args as { workspaceId: string; folderId?: string }; + await assertOwnsWorkspace(workspaceId, userId); + const state = await getCachedState(workspaceId); + let items = state.items; + + if (folderId !== undefined) { + items = items.filter((i) => i.folderId === folderId); + } + + const totalItems = items.length; + + // Sort most-recently-modified first so the cap preserves the most relevant items + const sorted = [...items] + .sort((a, b) => (b.lastModified ?? 0) - (a.lastModified ?? 0)) + .slice(0, LIST_MAX_ITEMS) + .map((i) => ({ name: i.name, type: i.type, folderId: i.folderId ?? null, lastModified: i.lastModified })); + + const result: Record = { + items: sorted, + returned: sorted.length, + totalItems, + }; + if (totalItems > LIST_MAX_ITEMS) { + result.hint = `Showing ${LIST_MAX_ITEMS} most-recent of ${totalItems} items. Use search_workspace() to find specific items by name or content.`; + } + + return { + content: [{ type: "text", text: JSON.stringify(result) }], + }; + } + + case "get_recent": { + const { workspaceId, limit } = args as { workspaceId: string; limit?: number }; + await assertOwnsWorkspace(workspaceId, userId); + const state = await getCachedState(workspaceId); + const recentLimit = Math.max(1, Math.min(Math.floor(limit ?? 5), 20)); + + const recent = [...state.items] + .filter((i) => i.lastModified) + .sort((a, b) => (b.lastModified ?? 0) - (a.lastModified ?? 0)) + .slice(0, recentLimit) + .map((i) => ({ name: i.name, type: i.type, folderId: i.folderId ?? null, lastModified: i.lastModified })); + + return { + content: [{ type: "text", text: JSON.stringify(recent) }], + }; + } + + case "search_workspace": { + const { workspaceId, query, folderId, type, limit } = args as { + workspaceId: string; + query: string; + folderId?: string; + type?: string; + limit?: number; + }; + await assertOwnsWorkspace(workspaceId, userId); + + if (typeof query !== "string" || query.length === 0 || query.length > MAX_REGEX_LENGTH) { + return { + content: [{ type: "text", text: JSON.stringify({ error: `Query must be a non-empty string of at most ${MAX_REGEX_LENGTH} characters` }) }], + isError: true, + }; + } + + if (type !== undefined && !VALID_ITEM_TYPES.has(type)) { + return { + content: [{ type: "text", text: JSON.stringify({ error: `Unknown item type "${type}". Valid types: ${[...VALID_ITEM_TYPES].join(", ")}` }) }], + isError: true, + }; + } + + const state = await getCachedState(workspaceId); + let items = state.items; + + if (folderId !== undefined) { + items = items.filter((i) => i.folderId === folderId); + } + if (type !== undefined) { + items = items.filter((i) => i.type === type); + } + + // Treat the query as a literal substring (metacharacters are escaped) + // to eliminate ReDoS exposure from untrusted input. + // Use case-insensitive flag only (no `g`) to avoid lastIndex state + // bleeding between test() calls on separate lines. + const regex = new RegExp(escapeRegex(query), "i"); + + const matches: Array<{ + itemName: string; + itemType: string; + folderId: string | null; + lineStart: number; + pageNumber?: number; + content: string; + }> = []; + const INTERNAL_CAP = 100; + + for (const item of items) { + if (item.type === "pdf") { + const pages = ((item.data as any).ocrPages ?? []) as Array<{ markdown: string }>; + for (let pageIdx = 0; pageIdx < pages.length && matches.length < INTERNAL_CAP; pageIdx++) { + const pageLines = (pages[pageIdx].markdown ?? "").split("\n"); + for (let lineIdx = 0; lineIdx < pageLines.length; lineIdx++) { + if (regex.test(pageLines[lineIdx])) { + matches.push({ + itemName: item.name, + itemType: item.type, + folderId: item.folderId ?? null, + pageNumber: pageIdx + 1, + lineStart: lineIdx + 1, + content: pageLines[lineIdx].trim(), + }); + if (matches.length >= INTERNAL_CAP) break; + } + } + } + } else { + const text = extractText(item); + if (!text) continue; + + const lines = text.split("\n"); + for (let i = 0; i < lines.length; i++) { + if (regex.test(lines[i])) { + matches.push({ + itemName: item.name, + itemType: item.type, + folderId: item.folderId ?? null, + lineStart: i + 1, + content: lines[i].trim(), + }); + if (matches.length >= INTERNAL_CAP) break; + } + } + } + if (matches.length >= INTERNAL_CAP) break; + } + + if (matches.length === 0) { + return { + content: [{ + type: "text", + text: JSON.stringify({ results: [], suggestion: "No matches found. Try list_workspace() to browse available items or broaden your query." }), + }], + }; + } + + const resultLimit = Math.min(Math.max(limit ?? 5, 1), 10); + return { + content: [{ type: "text", text: JSON.stringify(matches.slice(0, resultLimit)) }], + }; + } + + case "read_item": { + const { workspaceId, name, lineStart, limit, pageStart, pageEnd } = args as { + workspaceId: string; + name: string; + lineStart?: number; + limit?: number; + pageStart?: number; + pageEnd?: number; + }; + await assertOwnsWorkspace(workspaceId, userId); + + if (typeof name !== "string" || name.length === 0 || name.length > MAX_NAME_LENGTH) { + return { + content: [{ type: "text", text: JSON.stringify({ error: `name must be a non-empty string of at most ${MAX_NAME_LENGTH} characters` }) }], + isError: true, + }; + } + + const state = await getCachedState(workspaceId); + + const nameLower = name.toLowerCase(); + let matchedItem: Item | null = null; + + const exactMatch = state.items.find((i) => i.name.toLowerCase() === nameLower); + if (exactMatch) { + matchedItem = exactMatch; + } else { + const substringMatches = state.items.filter((i) => + i.name.toLowerCase().includes(nameLower) + ); + if (substringMatches.length === 1) { + matchedItem = substringMatches[0]; + } else if (substringMatches.length > 1) { + let closestItem = substringMatches[0]; + let closestDistance = levenshteinDistance(nameLower, closestItem.name.toLowerCase()); + for (const item of substringMatches.slice(1)) { + const distance = levenshteinDistance(nameLower, item.name.toLowerCase()); + if (distance < closestDistance) { + closestDistance = distance; + closestItem = item; + } + } + matchedItem = closestItem; + } + } + + if (!matchedItem) { + return { + content: [{ type: "text", text: JSON.stringify({ error: "Item not found. Try list_workspace() to see available items." }) }], + isError: true, + }; + } + + if (matchedItem.type === "pdf") { + const pages = ((matchedItem.data as any).ocrPages ?? []) as Array<{ markdown: string }>; + const totalPages = pages.length; + + // Clamp pageStart to a valid 1-based page number + const clampedStart = Math.max(1, Math.floor(pageStart ?? 1)); + if (clampedStart > totalPages) { + return { + content: [{ type: "text", text: JSON.stringify({ error: `pageStart (${clampedStart}) exceeds total pages (${totalPages})` }) }], + isError: true, + }; + } + + // Cap the window: if pageEnd is supplied, honour it but never exceed MAX_PDF_PAGES per request + const requestedEnd = pageEnd !== undefined ? Math.floor(pageEnd) : clampedStart + MAX_PDF_PAGES - 1; + if (requestedEnd < clampedStart) { + return { + content: [{ type: "text", text: JSON.stringify({ error: "pageEnd must be >= pageStart" }) }], + isError: true, + }; + } + const clampedEnd = Math.min(requestedEnd, clampedStart + MAX_PDF_PAGES - 1, totalPages); + + const content = pages + .slice(clampedStart - 1, clampedEnd) + .map((p) => p.markdown) + .join("\n\n---\n\n"); + + const pdfResult: Record = { + itemName: matchedItem.name, + itemType: matchedItem.type, + content, + estimatedTokens: Math.ceil(content.length / 4), + pageStart: clampedStart, + pageEnd: clampedEnd, + totalPages, + hasMore: clampedEnd < totalPages, + }; + if (clampedEnd < totalPages) { + pdfResult.note = `Showing pages ${clampedStart}–${clampedEnd} of ${totalPages}. Call read_item again with pageStart=${clampedEnd + 1} for the next section.`; + } + + return { content: [{ type: "text", text: JSON.stringify(pdfResult) }] }; + } + + const text = extractText(matchedItem); + if (!text) { + const url = (matchedItem.data as any).url; + return { + content: [{ type: "text", text: JSON.stringify({ content: null, note: `This item has no stored body text. URL: ${url ?? "N/A"}` }) }], + }; + } + + const lines = text.split("\n"); + const totalLines = lines.length; + + const clampedLineStart = Math.max(1, Math.floor(lineStart ?? 1)); + if (clampedLineStart > totalLines) { + return { + content: [{ type: "text", text: JSON.stringify({ error: `lineStart (${clampedLineStart}) exceeds total lines (${totalLines})` }) }], + isError: true, + }; + } + + const lineLimit = Math.min(Math.max(Math.floor(limit ?? DEFAULT_LINE_LIMIT), MIN_LINE_LIMIT), MAX_LINE_LIMIT); + const slice = lines.slice(clampedLineStart - 1, clampedLineStart - 1 + lineLimit); + const content = slice.join("\n"); + const returnedEnd = clampedLineStart - 1 + slice.length; + + const textResult: Record = { + itemName: matchedItem.name, + itemType: matchedItem.type, + content, + estimatedTokens: Math.ceil(content.length / 4), + lineStart: clampedLineStart, + lineEnd: returnedEnd, + totalLines, + hasMore: returnedEnd < totalLines, + }; + if (returnedEnd < totalLines) { + textResult.note = `Showing lines ${clampedLineStart}–${returnedEnd} of ${totalLines}. Call read_item again with lineStart=${returnedEnd + 1} for the next section.`; + } + + return { content: [{ type: "text", text: JSON.stringify(textResult) }] }; + } + + default: + return { + content: [ + { + type: "text", + text: JSON.stringify({ error: `Unknown tool: ${name}` }), + }, + ], + isError: true, + }; + } + } catch (error: unknown) { + return { + content: [ + { + type: "text", + text: JSON.stringify({ error: safeErrorMessage(error) }), + }, + ], + isError: true, + }; + } + }); +} + +async function handleMCP(req: NextRequest) { + // Enforce body size limit before doing anything else + const contentLength = Number(req.headers.get("content-length") ?? 0); + if (contentLength > MAX_BODY_BYTES) { + return NextResponse.json({ error: "Request body too large" }, { status: 413 }); + } + + const userId = await authenticateRequest(req); + if (!userId) { + return NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + } + + const server = createServer(userId); + + let body: unknown; + try { + const reader = req.body?.getReader(); + if (!reader) { + return NextResponse.json({ + jsonrpc: "2.0", + id: null, + error: { code: -32700, message: "Parse error" }, + }, { status: 400 }); + } + let totalBytes = 0; + const chunks: Uint8Array[] = []; + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + totalBytes += value.byteLength; + if (totalBytes > MAX_BODY_BYTES) { + await reader.cancel(); + return NextResponse.json({ error: "Request body too large" }, { status: 413 }); + } + chunks.push(value); + } + } finally { + reader.releaseLock(); + } + const combined = new Uint8Array(totalBytes); + let offset = 0; + for (const chunk of chunks) { + combined.set(chunk, offset); + offset += chunk.byteLength; + } + body = JSON.parse(new TextDecoder().decode(combined)); + } catch { + return NextResponse.json({ + jsonrpc: "2.0", + id: null, + error: { code: -32700, message: "Parse error" }, + }, { status: 400 }); + } + + if (typeof body !== "object" || body === null || Array.isArray(body)) { + return NextResponse.json({ + jsonrpc: "2.0", + id: null, + error: { code: -32600, message: "Invalid request" }, + }, { status: 400 }); + } + + const { method, params, id: rawId } = body as Record; + + // JSON-RPC id must be a string, number, or null — never reflect arbitrary values + const id: string | number | null = + typeof rawId === "string" && rawId.length <= MAX_ID_LENGTH ? rawId + : typeof rawId === "number" && Number.isFinite(rawId) ? rawId + : null; + + try { + if (method === "initialize") { + // Respond to the MCP initialization handshake. + // The client sends its capabilities; we reply with ours. + await server.request( + { method: "initialize", params: params as any }, + InitializeRequestSchema, + ); + return NextResponse.json({ + jsonrpc: "2.0", + id, + result: { + protocolVersion: LATEST_PROTOCOL_VERSION, + capabilities: { tools: {} }, + serverInfo: { name: "thinkex", version: "1.0.0" }, + }, + }); + } else if (method === "notifications/initialized") { + // Notification — no response body required by the MCP spec. + return new NextResponse(null, { status: 204 }); + } else if (method === "tools/list") { + const response = await server.request({ method: "tools/list", params: params as any }, ListToolsRequestSchema); + return NextResponse.json({ jsonrpc: "2.0", id, result: response }); + } else if (method === "tools/call") { + const response = await server.request({ method: "tools/call", params: params as any }, CallToolRequestSchema); + return NextResponse.json({ jsonrpc: "2.0", id, result: response }); + } else { + return NextResponse.json({ + jsonrpc: "2.0", + id, + error: { code: -32601, message: "Method not found" }, + }); + } + } catch { + return NextResponse.json({ + jsonrpc: "2.0", + id, + error: { code: -32603, message: "Internal error" }, + }, { status: 500 }); + } +} + +export const POST = handleMCP; + +// Allow up to 30s for workspace state loading on large workspaces +export const maxDuration = 30; diff --git a/src/app/api/workspaces/[id]/events/route.ts b/src/app/api/workspaces/[id]/events/route.ts index dab5ea0b..dae50e61 100644 --- a/src/app/api/workspaces/[id]/events/route.ts +++ b/src/app/api/workspaces/[id]/events/route.ts @@ -67,6 +67,7 @@ import { withErrorHandling, } from "@/lib/api/workspace-helpers"; import { broadcastWorkspaceEventFromServer } from "@/lib/realtime/server-broadcast"; +import { invalidateWorkspaceCache } from "@/lib/mcp/workspace-cache"; /** * GET /api/workspaces/[id]/events @@ -458,6 +459,9 @@ async function handlePOST( } // Success - no conflict + // Invalidate MCP cache for this workspace + invalidateWorkspaceCache(id); + // Check if we need to create a snapshot (async, non-blocking) checkAndCreateSnapshot(id).catch((err) => { console.error( diff --git a/src/app/share-copy/[id]/layout.tsx b/src/app/share-copy/[id]/layout.tsx index a83bdfb7..763806c5 100644 --- a/src/app/share-copy/[id]/layout.tsx +++ b/src/app/share-copy/[id]/layout.tsx @@ -38,9 +38,15 @@ export async function generateMetadata({ params }: Props): Promise { } // Fetch full state to get potentially updated title/description - const state = await loadWorkspaceState(id); + let stateTitle: string | undefined; + try { + const state = await loadWorkspaceState(id); + stateTitle = state.globalTitle; + } catch (err) { + console.error("[generateMetadata] loadWorkspaceState failed for workspace", id, err); + } - const title = state.globalTitle || workspace[0].name || "Untitled Workspace"; + const title = stateTitle || workspace[0].name || "Untitled Workspace"; const sharedTitle = `Shared Workspace: ${title}`; const description = workspace[0].description || "View and import this shared ThinkEx workspace."; const fullTitle = getPageTitle(sharedTitle); diff --git a/src/components/auth/AccountModal.tsx b/src/components/auth/AccountModal.tsx index 7cbb4f63..666355ac 100644 --- a/src/components/auth/AccountModal.tsx +++ b/src/components/auth/AccountModal.tsx @@ -1,18 +1,20 @@ "use client"; -import { useState } from "react"; +import { Fragment, useState, useEffect, useRef } from "react"; import { Dialog, DialogContent, DialogHeader, DialogTitle, } from "@/components/ui/dialog"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { getBaseURL } from "@/lib/base-url"; import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { authClient, useSession } from "@/lib/auth-client"; import { toast } from "sonner"; -import { Loader2 } from "lucide-react"; +import { Loader2, Copy, Plus, Trash2, ChevronDown } from "lucide-react"; import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; import { AlertDialog, @@ -24,6 +26,19 @@ import { AlertDialogHeader, AlertDialogTitle, } from "@/components/ui/alert-dialog"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { + Collapsible, + CollapsibleTrigger, + CollapsibleContent, +} from "@/components/ui/collapsible"; interface AccountModalProps { open: boolean; @@ -37,13 +52,18 @@ export function AccountModal({ open, onOpenChange }: AccountModalProps) { return ( - - - Account Settings - -
- - + +
+ + Account Settings + +
+
+
+ + + +
@@ -102,6 +122,412 @@ function ProfileForm({ user }: { user: any }) { ); } +interface APIKey { + id: string; + prefix: string; + label: string | null; + createdAt: string; + lastUsedAt: string | null; +} + +function MCPAccessSection() { + const [open, setOpen] = useState(false); + const [keys, setKeys] = useState([]); + const [isLoading, setIsLoading] = useState(false); + const [loadFailed, setLoadFailed] = useState(false); + const [showCreateDialog, setShowCreateDialog] = useState(false); + const [showKeyModal, setShowKeyModal] = useState(false); + const [newKeyData, setNewKeyData] = useState<{ rawKey: string; prefix: string } | null>(null); + const [label, setLabel] = useState(""); + const [isCreating, setIsCreating] = useState(false); + const [keyToRevoke, setKeyToRevoke] = useState(null); + const latestFetchIdRef = useRef(0); + + const fetchKeys = async () => { + const fetchId = ++latestFetchIdRef.current; + try { + const res = await fetch("/api/mcp-keys"); + if (fetchId !== latestFetchIdRef.current) return; + if (!res.ok) { + console.error("Failed to load API keys:", res.status, res.statusText); + toast.error("Failed to load API keys"); + setLoadFailed(true); + return; + } + const data = await res.json(); + if (fetchId !== latestFetchIdRef.current) return; + setKeys(data.keys || []); + setLoadFailed(false); + } catch (error) { + if (fetchId !== latestFetchIdRef.current) return; + toast.error("Failed to load API keys"); + setLoadFailed(true); + } finally { + if (fetchId === latestFetchIdRef.current) { + setIsLoading(false); + } + } + }; + + useEffect(() => { + if (open) { + setLoadFailed(false); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open]); + + useEffect(() => { + if (open && keys.length === 0 && !loadFailed) { + setIsLoading(true); + fetchKeys(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open, keys.length, loadFailed]); + + const handleCreateKey = async () => { + setIsCreating(true); + try { + const res = await fetch("/api/mcp-keys", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ label: label || null }), + }); + + if (!res.ok) { + let errorMessage = "Failed to create API key"; + try { + const errData = await res.json(); + errorMessage = errData.message || errData.error || errorMessage; + } catch {} + toast.error(errorMessage); + return; + } + + const data = await res.json(); + setNewKeyData({ rawKey: data.rawKey, prefix: data.prefix }); + setShowCreateDialog(false); + setShowKeyModal(true); + setLabel(""); + await fetchKeys(); + } catch (error: any) { + toast.error(error?.message || "Failed to create API key"); + } finally { + setIsCreating(false); + } + }; + + const handleRevokeKey = async (id: string) => { + try { + const res = await fetch(`/api/mcp-keys/${id}`, { method: "DELETE" }); + if (!res.ok) throw new Error("Failed to revoke API key"); + + toast.success("API key revoked successfully"); + await fetchKeys(); + } catch (error) { + toast.error("Failed to revoke API key"); + } finally { + setKeyToRevoke(null); + } + }; + + const copyToClipboard = async (text: string) => { + try { + await navigator.clipboard.writeText(text); + toast.success("Copied to clipboard"); + } catch { + toast.error("Failed to copy to clipboard"); + } + }; + + const formatDate = (dateStr: string | null) => { + if (!dateStr) return "—"; + return new Date(dateStr).toLocaleDateString(); + }; + + return ( + <> + + + + + + + {isLoading ? ( +
+ +
+ ) : loadFailed ? ( +
+ Failed to load API keys. Please try again. +
+ ) : keys.length === 0 ? ( +
+ No API keys yet. Create one to get started. +
+ ) : ( +
+ + + + Label + Key Prefix + Created + Last Used + Actions + + + + {keys.map((key) => ( + + {key.label || Unlabeled} + + {key.prefix}... + + {formatDate(key.createdAt)} + {formatDate(key.lastUsedAt)} + + + + + ))} + +
+
+ )} + + + + +
+
+ + + + + Create API Key + + Give this API key a label to help you identify it later (optional). + + +
+ + setLabel(e.target.value)} + placeholder="e.g., My MacBook" + className="mt-2" + /> +
+ + Cancel + { + e.preventDefault(); + handleCreateKey(); + }} + disabled={isCreating} + > + {isCreating ? : null} + Create Key + + +
+
+ + { setShowKeyModal(open); if (!open) setNewKeyData(null); }}> + + + API Key Created + + Copy this key now. You will not be able to see it again. + + +
+
+ + +
+

+ This key will not be shown again. Store it in a secure location. +

+
+ + { setShowKeyModal(false); setNewKeyData(null); }}> + Done + + +
+
+ + setKeyToRevoke(null)}> + + + Revoke API Key? + + This will immediately revoke the API key. Any applications using this key will lose access. + + + + Cancel + { + e.preventDefault(); + if (keyToRevoke) handleRevokeKey(keyToRevoke); + }} + className="bg-red-500 hover:bg-red-600" + > + Revoke Key + + + + + + ); +} + +function IDEConfigSection({ copyToClipboard }: { copyToClipboard: (text: string) => void }) { + const mcpUrl = `${getBaseURL()}/api/mcp`; + + const snippet = () => `{ + "mcpServers": { + "thinkex": { + "url": "${mcpUrl}", + "headers": { + "Authorization": "Bearer " + } + } + } +}`; + + const claudeCodeSnippet = `{ + "mcpServers": { + "thinkex": { + "type": "http", + "url": "${mcpUrl}", + "headers": { + "Authorization": "Bearer " + } + } + } +}`; + + const cursorGlobal = snippet(); + const cursorProject = snippet(); + const vscode = snippet(); + + const ideTabs = [ + { + value: "cursor-global", + tabLabel: "Cursor — global", + fileLabel: "~/.cursor/mcp.json", + code: cursorGlobal, + hint: "Applies to all your Cursor projects. Create the file if it doesn't exist.", + }, + { + value: "cursor-project", + tabLabel: "Cursor — project", + fileLabel: ".cursor/mcp.json", + code: cursorProject, + hint: "Place this inside the root of a specific project. Useful for per-project keys.", + }, + { + value: "vscode", + tabLabel: "VS Code", + fileLabel: ".vscode/mcp.json", + code: vscode, + hint: "Requires the MCP extension for VS Code. Place in the project root.", + }, + { + value: "claude-code", + tabLabel: "Claude Code", + fileLabel: ".mcp.json", + code: claudeCodeSnippet, + hint: 'Place .mcp.json in your project root. Claude Code requires the "type": "http" field for remote servers.', + }, + ] as const; + + return ( +
+

IDE Configuration

+

+ Create (or edit) the config file shown below and paste the snippet inside. + After saving, your IDE will discover the ThinkEx MCP server automatically. +

+ + + + {ideTabs.map((tab, index) => ( + + {index > 0 ? ( + + ) : null} + + {tab.tabLabel} + + + ))} + + + {ideTabs.map(({ value, fileLabel, code, hint }) => ( + +
+
+ {fileLabel} + +
+
{code}
+
+

{hint}

+
+ ))} +
+
+ ); +} + function DangerZone() { const [showDeleteAlert, setShowDeleteAlert] = useState(false); const [isDeleting, setIsDeleting] = useState(false); diff --git a/src/components/ui/table.tsx b/src/components/ui/table.tsx new file mode 100644 index 00000000..c0df655c --- /dev/null +++ b/src/components/ui/table.tsx @@ -0,0 +1,120 @@ +import * as React from "react" + +import { cn } from "@/lib/utils" + +const Table = React.forwardRef< + HTMLTableElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+ + +)) +Table.displayName = "Table" + +const TableHeader = React.forwardRef< + HTMLTableSectionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +TableHeader.displayName = "TableHeader" + +const TableBody = React.forwardRef< + HTMLTableSectionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +TableBody.displayName = "TableBody" + +const TableFooter = React.forwardRef< + HTMLTableSectionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + tr]:last:border-b-0", + className + )} + {...props} + /> +)) +TableFooter.displayName = "TableFooter" + +const TableRow = React.forwardRef< + HTMLTableRowElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +TableRow.displayName = "TableRow" + +const TableHead = React.forwardRef< + HTMLTableCellElement, + React.ThHTMLAttributes +>(({ className, ...props }, ref) => ( +
[role=checkbox]]:translate-y-[2px]", + className + )} + {...props} + /> +)) +TableHead.displayName = "TableHead" + +const TableCell = React.forwardRef< + HTMLTableCellElement, + React.TdHTMLAttributes +>(({ className, ...props }, ref) => ( + [role=checkbox]]:translate-y-[2px]", + className + )} + {...props} + /> +)) +TableCell.displayName = "TableCell" + +const TableCaption = React.forwardRef< + HTMLTableCaptionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+)) +TableCaption.displayName = "TableCaption" + +export { + Table, + TableHeader, + TableBody, + TableFooter, + TableHead, + TableRow, + TableCell, + TableCaption, +} diff --git a/src/components/ui/tabs.tsx b/src/components/ui/tabs.tsx index 87eeec56..5a0dab25 100644 --- a/src/components/ui/tabs.tsx +++ b/src/components/ui/tabs.tsx @@ -29,7 +29,7 @@ const TabsTrigger = React.forwardRef< { + invalidateWorkspaceCache(workspaceId); + await broadcastPersistedWorkspaceEvent(workspaceId, event, version); +} + /** * WORKER 3: Workspace Management Agent * Manages workspace items (create, update, delete) @@ -485,11 +495,7 @@ export async function workspaceWorker( logger.info(`📝 [WORKSPACE-WORKER] Created ${item.type}:`, item.name); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); // Include card count for flashcard decks (use created item.data.cards, not params) const flashcardCards = @@ -555,15 +561,10 @@ export async function workspaceWorker( "Workspace was modified by another user, please try again", ); } - logger.info( `📝 [WORKSPACE-WORKER] Bulk created ${items.length} items`, ); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, message: `Bulk created ${items.length} items successfully`, @@ -682,7 +683,6 @@ export async function workspaceWorker( "Workspace was modified by another user, please try again", ); } - logger.info("🎴 [WORKSPACE-WORKER] Updated flashcard deck:", { itemId: params.itemId, cardsAdded: newCards.length, @@ -690,11 +690,7 @@ export async function workspaceWorker( newTitle: params.title, }); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, @@ -841,18 +837,13 @@ export async function workspaceWorker( "Workspace was modified by another user, please try again", ); } - logger.info("🎯 [WORKSPACE-WORKER] Updated quiz:", { itemId: params.itemId, questionsAdded: questionsToAdd?.length ?? 0, totalQuestions: updatedData.questions.length, }); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, @@ -967,18 +958,13 @@ export async function workspaceWorker( "Workspace was modified by another user, please try again", ); } - const contentLen = getOcrPagesTextContent(params.pdfOcrPages).length; logger.info("📄 [WORKSPACE-WORKER] Updated PDF OCR content:", { itemId: params.itemId, contentLength: contentLen, }); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, @@ -1072,12 +1058,7 @@ export async function workspaceWorker( "Workspace was modified by another user, please try again", ); } - - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, @@ -1215,12 +1196,7 @@ export async function workspaceWorker( throw new Error( "Workspace was modified by another user, please try again", ); - - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, @@ -1349,12 +1325,7 @@ export async function workspaceWorker( throw new Error( "Workspace was modified by another user, please try again", ); - - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, @@ -1457,12 +1428,7 @@ export async function workspaceWorker( throw new Error( "Workspace was modified by another user, please try again", ); - - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); const diffOutput = trimDiff( createPatch( @@ -1538,11 +1504,7 @@ export async function workspaceWorker( throw new Error( "Workspace was modified by another user, please try again", ); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, itemId: params.itemId, @@ -1600,14 +1562,9 @@ export async function workspaceWorker( "Workspace was modified by another user, please try again", ); } - logger.info("📝 [WORKSPACE-WORKER] Deleted item:", params.itemId); - await broadcastPersistedWorkspaceEvent( - params.workspaceId, - event, - appendResult.version, - ); + await postAppendSuccess(params.workspaceId, event, appendResult.version); return { success: true, diff --git a/src/lib/base-url.ts b/src/lib/base-url.ts new file mode 100644 index 00000000..55019751 --- /dev/null +++ b/src/lib/base-url.ts @@ -0,0 +1,40 @@ +const PRODUCTION_URL = "https://thinkex.app"; + +/** + * Returns the canonical app base URL. + * Safe to call from both server and client code. + * + * Priority: + * 1. NEXT_PUBLIC_APP_URL — validated and trimmed; localhost values are + * intentionally skipped so config snippets always show the live URL. + * 2. Hard-coded PRODUCTION_URL as fallback. + * + * In non-production server environments (local dev, self-hosted) where + * NEXT_PUBLIC_APP_URL is absent or points to localhost, an Error is thrown + * so callers don't silently emit config snippets pointing at the live site. + * Client-side code (browser) always receives PRODUCTION_URL as fallback to + * avoid crashing the component tree. + */ +export function getBaseURL(): string { + const envUrl = process.env.NEXT_PUBLIC_APP_URL?.trim(); + + if (envUrl && !envUrl.includes("localhost") && !envUrl.includes("127.0.0.1")) { + return envUrl.replace(/\/$/, ""); + } + + // On the server in a non-production environment (local dev, preview without + // NEXT_PUBLIC_APP_URL set) fail loudly rather than silently returning the + // production URL. typeof window check distinguishes server from browser. + if ( + typeof window === "undefined" && + process.env.NODE_ENV !== "production" && + process.env.VERCEL_ENV !== "production" + ) { + throw new Error( + "NEXT_PUBLIC_APP_URL is missing or points to localhost. " + + "Set it to the canonical deployment URL (e.g. https://your-app.vercel.app)." + ); + } + + return PRODUCTION_URL; +} diff --git a/src/lib/db/schema.ts b/src/lib/db/schema.ts index 37417bc6..bab0021f 100644 --- a/src/lib/db/schema.ts +++ b/src/lib/db/schema.ts @@ -376,3 +376,16 @@ export const workspaceItemReads = pgTable( }), ] ); + +export const apiKeys = pgTable("api_keys", { + id: text("id").primaryKey().default(sql`gen_random_uuid()`), + userId: text("user_id").notNull().references(() => user.id, { onDelete: "cascade" }), + keyHash: text("key_hash").notNull().unique(), + keyPrefix: text("key_prefix").notNull(), + label: text("label"), + createdAt: timestamp("created_at", { withTimezone: true, mode: "string" }).defaultNow().notNull(), + lastUsedAt: timestamp("last_used_at", { withTimezone: true, mode: "string" }), + revokedAt: timestamp("revoked_at", { withTimezone: true, mode: "string" }), +}, (table) => [ + index("api_keys_user_id_idx").on(table.userId), +]); diff --git a/src/lib/mcp/workspace-cache.ts b/src/lib/mcp/workspace-cache.ts new file mode 100644 index 00000000..f4f446c3 --- /dev/null +++ b/src/lib/mcp/workspace-cache.ts @@ -0,0 +1,54 @@ +import { loadWorkspaceState } from "@/lib/workspace/state-loader"; +import type { AgentState } from "@/lib/workspace-state/types"; + +const stateCache = new Map(); +const CACHE_TTL_MS = 30_000; +const CACHE_MAX_SIZE = 200; +const PRUNE_INTERVAL_MS = 60_000; + +/** + * Removes expired entries from stateCache and, when the map still exceeds + * CACHE_MAX_SIZE after TTL pruning, evicts the oldest-inserted entries first + * (Map iteration order is insertion order in V8). + */ +function pruneStateCache(): void { + const now = Date.now(); + for (const [key, entry] of stateCache) { + if (entry.expiresAt < now) { + stateCache.delete(key); + } + } + // Evict oldest entries when the cache is still over its size cap + if (stateCache.size > CACHE_MAX_SIZE) { + const overflow = stateCache.size - CACHE_MAX_SIZE; + let evicted = 0; + for (const key of stateCache.keys()) { + stateCache.delete(key); + if (++evicted >= overflow) break; + } + } +} + +// Run periodic cleanup; the interval is unref'd so it won't keep the process +// alive in test/serverless environments that track active handles. +if (typeof setInterval !== "undefined") { + const timer = setInterval(pruneStateCache, PRUNE_INTERVAL_MS); + if (typeof timer === "object" && timer !== null && "unref" in timer) { + (timer as NodeJS.Timeout).unref(); + } +} + +export async function getCachedState(workspaceId: string): Promise { + const cached = stateCache.get(workspaceId); + if (cached && cached.expiresAt > Date.now()) return cached.state; + + // loadWorkspaceState throws on DB error, so any state that reaches here is + // a genuine result (including legitimately empty new workspaces). + const state = await loadWorkspaceState(workspaceId); + stateCache.set(workspaceId, { state, expiresAt: Date.now() + CACHE_TTL_MS }); + return state; +} + +export function invalidateWorkspaceCache(workspaceId: string) { + stateCache.delete(workspaceId); +} diff --git a/src/lib/workspace/state-loader.ts b/src/lib/workspace/state-loader.ts index 1e74a24f..30d259ed 100644 --- a/src/lib/workspace/state-loader.ts +++ b/src/lib/workspace/state-loader.ts @@ -49,17 +49,9 @@ export async function loadWorkspaceState(workspaceId: string): Promise