From ad854119509e0e457468d06804c5c4effeeae741 Mon Sep 17 00:00:00 2001 From: Chen <99816898+donteatfriedrice@users.noreply.github.com> Date: Tue, 28 Apr 2026 17:24:20 +0800 Subject: [PATCH 1/2] chore(m5): drop core connector engine + DB v5 migration MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Removes the connector subsystem from @spool-lab/core: the engine, scheduler, registry, plugin loader, capabilities, prerequisite checker, trust store, all capture queries and tests. Adds a v5 schema migration that drops connector tables on existing user DBs and narrows the stars CHECK constraint to session-only. The renderer / main process / CLI already stopped using these in earlier PRs. src/connectors/: deleted entirely (27 files, ~3700 LOC) — engine, scheduler, registry, loader, npm-install, prerequisites, trust-store, capabilities, types, all tests. src/db/queries.ts: - Drop searchCaptures, searchAll, searchCaptureRows, mapCaptureRows, mergeCaptureGroups, insertCapture, getCaptureCount, compareSearchResultRelevance, getMatchTypePriority - Narrow getStarredUuidsByType return to { session: string[] } - Narrow listStarredItems to session-only - Drop CaptureResult / Capture / CapturedItem / SearchResult / Source type imports src/db/db.ts: - Schema block: remove captures / captures_fts / captures_fts_trigram / capture_connectors / connector_sync_state tables, their indexes, and the 'connector' source row insert; narrow stars CHECK to item_type = 'session' - Trigger block: remove captures_fts_insert / captures_fts_delete (kept the DROP TRIGGER lines so existing-user upgrade cleans them out via v5) - Wrap historical v1-v3 migrations in try/catch — they reference tables that fresh installs no longer create - Add v5 migration: drops connector tables/triggers/FTS shadow tables, deletes capture stars, rebuilds stars with narrow CHECK, drops the 'connector' source row - Drop maintenance: rebuildFtsTableIfEmpty for captures, capture-orphan stars prune (no captures table to filter against) src/types.ts: - Delete CaptureResult, Capture, CapturedItem export - Narrow SearchResult to fragment-only - Narrow StarKind to 'session' - Narrow StarredItem to session-only src/index.ts: drop all connector exports + connector-sdk re-exports. package.json: drop @spool-lab/connector-sdk, effect, semver, tar deps and their @types; drop the connector-sdk filter from the build script. Tests: - Add src/db/migration-v5.test.ts: seeds a v4 DB with capture data, runs getDB(), verifies tables dropped + stars narrowed + session stars preserved + capture inserts now rejected; plus a fresh-install no-op case - Rewrite src/db/stars.test.ts to session-only (drops 5 capture-specific cases, removes seedCapture helper) Bundle/footprint: - Net -6007 / +124 in core - App main bundle 56.65 kB → 55.72 kB - App loader chunk 308.57 kB → gone (replaced by syncer chunk at 68 kB) Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/core/package.json | 10 +- .../capabilities/cookies-chrome.test.ts | 85 -- .../connectors/capabilities/cookies-chrome.ts | 248 ------ .../connectors/capabilities/exec-impl.test.ts | 50 -- .../src/connectors/capabilities/exec-impl.ts | 118 --- .../src/connectors/capabilities/fetch-impl.ts | 7 - .../core/src/connectors/capabilities/index.ts | 5 - .../src/connectors/capabilities/log-impl.ts | 43 - .../connectors/capabilities/sqlite-impl.ts | 20 - packages/core/src/connectors/loader.test.ts | 442 ---------- packages/core/src/connectors/loader.ts | 436 ---------- .../core/src/connectors/npm-install.test.ts | 158 ---- packages/core/src/connectors/npm-install.ts | 117 --- .../core/src/connectors/prerequisites.test.ts | 227 ----- packages/core/src/connectors/prerequisites.ts | 115 --- .../src/connectors/registry-fetch.test.ts | 89 -- .../core/src/connectors/registry-fetch.ts | 77 -- packages/core/src/connectors/registry.test.ts | 49 -- packages/core/src/connectors/registry.ts | 68 -- .../src/connectors/sync-engine.effect.test.ts | 180 ---- .../sync-engine.observability.test.ts | 225 ----- .../core/src/connectors/sync-engine.test.ts | 694 --------------- packages/core/src/connectors/sync-engine.ts | 804 ------------------ .../connectors/sync-scheduler.effect.test.ts | 433 ---------- .../core/src/connectors/sync-scheduler.ts | 314 ------- packages/core/src/connectors/test-helpers.ts | 114 --- .../core/src/connectors/trust-store.test.ts | 66 -- packages/core/src/connectors/trust-store.ts | 50 -- packages/core/src/connectors/types.ts | 179 ---- packages/core/src/db/db.ts | 230 ++--- packages/core/src/db/migration-v5.test.ts | 193 +++++ packages/core/src/db/queries.ts | 288 +------ packages/core/src/db/stars.test.ts | 90 +- packages/core/src/index.ts | 59 -- packages/core/src/types.ts | 41 +- 35 files changed, 317 insertions(+), 6007 deletions(-) delete mode 100644 packages/core/src/connectors/capabilities/cookies-chrome.test.ts delete mode 100644 packages/core/src/connectors/capabilities/cookies-chrome.ts delete mode 100644 packages/core/src/connectors/capabilities/exec-impl.test.ts delete mode 100644 packages/core/src/connectors/capabilities/exec-impl.ts delete mode 100644 packages/core/src/connectors/capabilities/fetch-impl.ts delete mode 100644 packages/core/src/connectors/capabilities/index.ts delete mode 100644 packages/core/src/connectors/capabilities/log-impl.ts delete mode 100644 packages/core/src/connectors/capabilities/sqlite-impl.ts delete mode 100644 packages/core/src/connectors/loader.test.ts delete mode 100644 packages/core/src/connectors/loader.ts delete mode 100644 packages/core/src/connectors/npm-install.test.ts delete mode 100644 packages/core/src/connectors/npm-install.ts delete mode 100644 packages/core/src/connectors/prerequisites.test.ts delete mode 100644 packages/core/src/connectors/prerequisites.ts delete mode 100644 packages/core/src/connectors/registry-fetch.test.ts delete mode 100644 packages/core/src/connectors/registry-fetch.ts delete mode 100644 packages/core/src/connectors/registry.test.ts delete mode 100644 packages/core/src/connectors/registry.ts delete mode 100644 packages/core/src/connectors/sync-engine.effect.test.ts delete mode 100644 packages/core/src/connectors/sync-engine.observability.test.ts delete mode 100644 packages/core/src/connectors/sync-engine.test.ts delete mode 100644 packages/core/src/connectors/sync-engine.ts delete mode 100644 packages/core/src/connectors/sync-scheduler.effect.test.ts delete mode 100644 packages/core/src/connectors/sync-scheduler.ts delete mode 100644 packages/core/src/connectors/test-helpers.ts delete mode 100644 packages/core/src/connectors/trust-store.test.ts delete mode 100644 packages/core/src/connectors/trust-store.ts delete mode 100644 packages/core/src/connectors/types.ts create mode 100644 packages/core/src/db/migration-v5.test.ts diff --git a/packages/core/package.json b/packages/core/package.json index 4a79cce..fcb20fd 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -20,24 +20,18 @@ "scripts": { "rebuild:native": "pnpm run rebuild:native:node", "rebuild:native:node": "node ../../scripts/rebuild-better-sqlite3-node.mjs", - "build": "pnpm --filter @spool-lab/connector-sdk build && tsc", + "build": "tsc", "dev": "tsc --watch", "test": "pnpm run rebuild:native && vitest run", "clean": "rm -rf dist", "prepack": "pnpm run build" }, "dependencies": { - "@spool-lab/connector-sdk": "workspace:^", - "better-sqlite3": "^11.10.0", - "effect": "^3.21.0", - "semver": "^7.7.4", - "tar": "^7.5.13" + "better-sqlite3": "^11.10.0" }, "devDependencies": { - "@effect/vitest": "^0.29.0", "@types/better-sqlite3": "^7.6.13", "@types/node": "^22.19.17", - "@types/semver": "^7.7.1", "vitest": "^3.2.4" } } diff --git a/packages/core/src/connectors/capabilities/cookies-chrome.test.ts b/packages/core/src/connectors/capabilities/cookies-chrome.test.ts deleted file mode 100644 index b251151..0000000 --- a/packages/core/src/connectors/capabilities/cookies-chrome.test.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { describe, it, expect } from 'vitest' -import { makeChromeCookiesCapability, getMatchingHostKeys } from './cookies-chrome.js' -import { SyncError, SyncErrorCode } from '@spool-lab/connector-sdk' - -describe('getMatchingHostKeys', () => { - it('matches host-only and same-host domain cookies', () => { - expect(getMatchingHostKeys('reddit.com')).toEqual([ - 'reddit.com', - '.reddit.com', - ]) - }) - - it('matches parent domain cookies for subdomain requests', () => { - expect(getMatchingHostKeys('www.reddit.com')).toEqual([ - 'www.reddit.com', - '.www.reddit.com', - '.reddit.com', - ]) - }) - - it('walks all parent labels for deep subdomains', () => { - expect(getMatchingHostKeys('a.b.example.co.uk')).toEqual([ - 'a.b.example.co.uk', - '.a.b.example.co.uk', - '.b.example.co.uk', - '.example.co.uk', - '.co.uk', - ]) - }) - - it('does not walk into a bare TLD', () => { - const keys = getMatchingHostKeys('reddit.com') - expect(keys).not.toContain('.com') - expect(keys).not.toContain('com') - }) - - it('lower-cases the input host', () => { - expect(getMatchingHostKeys('WWW.Reddit.COM')).toEqual([ - 'www.reddit.com', - '.www.reddit.com', - '.reddit.com', - ]) - }) - - it('strips a leading dot from the input', () => { - expect(getMatchingHostKeys('.reddit.com')).toEqual([ - 'reddit.com', - '.reddit.com', - ]) - }) - - it('returns empty for single-label or empty hosts', () => { - expect(getMatchingHostKeys('localhost')).toEqual([]) - expect(getMatchingHostKeys('')).toEqual([]) - }) -}) - -describe('makeChromeCookiesCapability', () => { - it('returns a capability with a get method', () => { - const cap = makeChromeCookiesCapability() - expect(typeof cap.get).toBe('function') - }) - - it('rejects non-chrome browser', async () => { - const cap = makeChromeCookiesCapability() - // @ts-expect-error — testing runtime guard against invalid union value - await expect(cap.get({ browser: 'safari', url: 'https://x.com' })) - .rejects.toThrow(SyncError) - }) - - // Integration test: only runs if Chrome is available - it.skipIf(!process.env.CI_HAS_CHROME)( - 'returns cookies from Chrome for x.com', - async () => { - const cap = makeChromeCookiesCapability() - const cookies = await cap.get({ browser: 'chrome', url: 'https://x.com' }) - expect(Array.isArray(cookies)).toBe(true) - for (const c of cookies) { - expect(typeof c.name).toBe('string') - expect(typeof c.value).toBe('string') - expect(typeof c.secure).toBe('boolean') - } - }, - ) -}) diff --git a/packages/core/src/connectors/capabilities/cookies-chrome.ts b/packages/core/src/connectors/capabilities/cookies-chrome.ts deleted file mode 100644 index 25f5ac9..0000000 --- a/packages/core/src/connectors/capabilities/cookies-chrome.ts +++ /dev/null @@ -1,248 +0,0 @@ -/** - * Chrome cookie extraction for X/Twitter authentication. - * - * Adapted from fieldtheory-cli (https://github.com/afar1/fieldtheory-cli). - * Reads Chrome's encrypted cookie database on macOS, decrypts auth_token and - * ct0 (CSRF) cookies for x.com using the macOS Keychain. - */ - -import { execFileSync } from 'node:child_process' -import { existsSync, unlinkSync, copyFileSync } from 'node:fs' -import { join } from 'node:path' -import { tmpdir, platform, homedir } from 'node:os' -import { pbkdf2Sync, createDecipheriv, randomUUID } from 'node:crypto' -import type { CookiesCapability, Cookie, CookieQuery } from '@spool-lab/connector-sdk' -import { SyncError, SyncErrorCode } from '@spool-lab/connector-sdk' - -function getMacOSChromeKey(): Buffer { - const candidates = [ - { service: 'Chrome Safe Storage', account: 'Chrome' }, - { service: 'Chrome Safe Storage', account: 'Google Chrome' }, - { service: 'Google Chrome Safe Storage', account: 'Chrome' }, - { service: 'Google Chrome Safe Storage', account: 'Google Chrome' }, - { service: 'Chromium Safe Storage', account: 'Chromium' }, - { service: 'Brave Safe Storage', account: 'Brave' }, - { service: 'Brave Browser Safe Storage', account: 'Brave Browser' }, - ] - - for (const candidate of candidates) { - try { - const password = execFileSync( - 'security', - ['find-generic-password', '-w', '-s', candidate.service, '-a', candidate.account], - { encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }, - ).trim() - if (password) { - return pbkdf2Sync(password, 'saltysalt', 1003, 16, 'sha1') - } - } catch { - // Try the next known browser/keychain naming pair. - } - } - - throw new SyncError( - SyncErrorCode.AUTH_KEYCHAIN_DENIED, - 'Could not read a browser Safe Storage password from the macOS Keychain.', - ) -} - -export function decryptCookieValue(encryptedValue: Buffer, key: Buffer, dbVersion = 0): string { - if (encryptedValue.length === 0) return '' - - if (encryptedValue[0] === 0x76 && encryptedValue[1] === 0x31 && encryptedValue[2] === 0x30) { - const iv = Buffer.alloc(16, 0x20) // 16 spaces - const ciphertext = encryptedValue.subarray(3) - const decipher = createDecipheriv('aes-128-cbc', key, iv) - let decrypted = decipher.update(ciphertext) - decrypted = Buffer.concat([decrypted, decipher.final()]) - - // Chrome DB version >= 24 (Chrome ~130+) prepends SHA256(host_key) to plaintext - if (dbVersion >= 24 && decrypted.length > 32) { - decrypted = decrypted.subarray(32) - } - - return decrypted.toString('utf8') - } - - return encryptedValue.toString('utf8') -} - -function detectChromeUserDataDir(): string { - const os = platform() - const home = homedir() - if (os === 'darwin') return join(home, 'Library', 'Application Support', 'Google', 'Chrome') - if (os === 'linux') return join(home, '.config', 'google-chrome') - if (os === 'win32') return join(home, 'AppData', 'Local', 'Google', 'Chrome', 'User Data') - throw new SyncError( - SyncErrorCode.AUTH_CHROME_NOT_FOUND, - `Unsupported platform for Chrome cookie extraction: ${os}`, - ) -} - -/** - * Run a sqlite3 query with fallback to a temp copy (Chrome locks the DB while running). - * Returns raw stdout string. - */ -function runSqliteQuery(dbPath: string, sql: string): string { - const tryQuery = (path: string): string => - execFileSync('sqlite3', ['-json', path, sql], { - encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'], - timeout: 10000, - }).trim() - - try { - return tryQuery(dbPath) - } catch { - const tmpDb = join(tmpdir(), `spool-cookies-${randomUUID()}.db`) - try { - copyFileSync(dbPath, tmpDb) - return tryQuery(tmpDb) - } catch (e2: unknown) { - throw new SyncError( - SyncErrorCode.AUTH_COOKIE_DECRYPT_FAILED, - `Could not read Chrome Cookies database at ${dbPath}. ${e2 instanceof Error ? e2.message : ''}`, - e2, - ) - } finally { - try { unlinkSync(tmpDb) } catch {} - } - } -} - -// ── CookiesCapability wrapper ────────────────────────────────────────────── - -interface RawCookieFull { - name: string - host_key: string - path: string - encrypted_value_hex: string - value: string - expires_utc: string - is_secure: string - is_httponly: string -} - -/** - * Enumerate every Chrome `host_key` value that should match a request to `host` - * per RFC 6265 §5.1.3. Chrome stores host-only cookies under the bare hostname - * and domain cookies under `.parent.example.com`; a request to `www.example.com` - * must see cookies at `www.example.com`, `.www.example.com`, and `.example.com` - * but not anything scoped to a sibling (`.other.example.com`) or a TLD alone. - */ -export function getMatchingHostKeys(host: string): string[] { - const normalized = host.toLowerCase().replace(/^\./, '') - if (!normalized || !normalized.includes('.')) return [] - - const keys = [normalized, `.${normalized}`] - let cur = normalized - while (true) { - const idx = cur.indexOf('.') - if (idx < 0) break - const parent = cur.substring(idx + 1) - if (!parent.includes('.')) break - keys.push(`.${parent}`) - cur = parent - } - return keys -} - -function queryAllCookiesForHost( - dbPath: string, - host: string, -): { cookies: RawCookieFull[]; dbVersion: number } { - if (!existsSync(dbPath)) { - throw new SyncError( - SyncErrorCode.AUTH_CHROME_NOT_FOUND, - `Chrome Cookies database not found at: ${dbPath}`, - ) - } - - const keys = getMatchingHostKeys(host) - if (keys.length === 0) return { cookies: [], dbVersion: 0 } - - const quoted = keys.map(k => `'${k.replace(/'/g, "''")}'`).join(',') - // Fetch cookies and DB version in one sqlite3 invocation to avoid double process spawn - const sql = `SELECT name, host_key, path, hex(encrypted_value) as encrypted_value_hex, value, expires_utc, is_secure, is_httponly, (SELECT value FROM meta WHERE key='version') as db_version FROM cookies WHERE host_key IN (${quoted});` - - const output = runSqliteQuery(dbPath, sql) - - if (!output || output === '[]') return { cookies: [], dbVersion: 0 } - try { - const rows: Array = JSON.parse(output) - const dbVersion = rows.length > 0 ? parseInt(rows[0]?.db_version ?? '0', 10) || 0 : 0 - return { cookies: rows, dbVersion } - } catch { - return { cookies: [], dbVersion: 0 } - } -} - -const CHROMIUM_EPOCH_DELTA = 11644473600 - -function chromiumExpiresToUnix(expiresUtc: string | number): number | null { - const raw = typeof expiresUtc === 'string' ? parseInt(expiresUtc, 10) : expiresUtc - if (!raw || raw === 0) return null - return raw / 1_000_000 - CHROMIUM_EPOCH_DELTA -} - -function domainFromUrl(url: string): string { - try { - return new URL(url).hostname - } catch { - return url - } -} - -export function makeChromeCookiesCapability(): CookiesCapability { - return { - async get(query: CookieQuery): Promise { - if (query.browser !== 'chrome') { - throw new SyncError( - SyncErrorCode.AUTH_CHROME_NOT_FOUND, - `Unsupported browser: ${query.browser}. Only 'chrome' is supported.`, - ) - } - - const os = platform() - if (os !== 'darwin') { - throw new SyncError( - SyncErrorCode.AUTH_CHROME_NOT_FOUND, - `Direct cookie extraction is currently supported on macOS only (detected: ${os}).`, - ) - } - - const profile = query.profile ?? 'Default' - const dataDir = detectChromeUserDataDir() - const dbPath = join(dataDir, profile, 'Cookies') - const key = getMacOSChromeKey() - - const host = domainFromUrl(query.url) - const result = queryAllCookiesForHost(dbPath, host) - - const cookies: Cookie[] = [] - for (const raw of result.cookies) { - let value: string - const hexVal = raw.encrypted_value_hex - if (hexVal && hexVal.length > 0) { - const buf = Buffer.from(hexVal, 'hex') - const decrypted = decryptCookieValue(buf, key, result.dbVersion) - value = decrypted.replace(/\0+$/g, '').trim() - } else { - value = raw.value ?? '' - } - - cookies.push({ - name: raw.name, - value, - domain: raw.host_key, - path: raw.path || '/', - expires: chromiumExpiresToUnix(raw.expires_utc), - secure: raw.is_secure === '1' || raw.is_secure === 'true', - httpOnly: raw.is_httponly === '1' || raw.is_httponly === 'true', - }) - } - - return cookies - }, - } -} diff --git a/packages/core/src/connectors/capabilities/exec-impl.test.ts b/packages/core/src/connectors/capabilities/exec-impl.test.ts deleted file mode 100644 index be94178..0000000 --- a/packages/core/src/connectors/capabilities/exec-impl.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { describe, it, expect } from 'vitest' -import { makeExecCapability } from './exec-impl.js' - -describe('makeExecCapability', () => { - const exec = makeExecCapability() - - it('runs a command and returns stdout', async () => { - const result = await exec.run('echo', ['hello']) - expect(result.exitCode).toBe(0) - expect(result.stdout.trim()).toBe('hello') - expect(result.stderr).toBe('') - }) - - it('returns non-zero exitCode on failure', async () => { - const result = await exec.run('bash', ['-c', 'exit 42']) - expect(result.exitCode).toBe(42) - }) - - it('captures stderr', async () => { - const result = await exec.run('bash', ['-c', 'echo err >&2; exit 1']) - expect(result.exitCode).toBe(1) - expect(result.stderr.trim()).toBe('err') - }) - - it('rejects on timeout', async () => { - await expect( - exec.run('sleep', ['10'], { timeout: 200 }), - ).rejects.toThrow() - }) - - it('returns exit 127 when binary not found (login shell semantics)', async () => { - const result = await exec.run('nonexistent-binary-xyz', []) - expect(result.exitCode).toBe(127) - expect(result.stderr).toMatch(/not found|no such/i) - }) - - it('runs through a login shell so subprocesses inherit user env (e.g. proxy vars)', async () => { - // Sanity check: the spawned process can see at least one inherited env var - // that login shells typically set. HOME is reliable across macOS/Linux. - const result = await exec.run('printenv', ['HOME']) - expect(result.exitCode).toBe(0) - expect(result.stdout.trim()).toBeTruthy() - }) - - it('quotes args safely (no shell injection)', async () => { - const result = await exec.run('echo', ['hello world', `it's a $(date) test`]) - expect(result.exitCode).toBe(0) - expect(result.stdout.trim()).toBe(`hello world it's a $(date) test`) - }) -}) diff --git a/packages/core/src/connectors/capabilities/exec-impl.ts b/packages/core/src/connectors/capabilities/exec-impl.ts deleted file mode 100644 index 3e6e1e2..0000000 --- a/packages/core/src/connectors/capabilities/exec-impl.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { spawn } from 'node:child_process' -import { readdirSync } from 'node:fs' -import { homedir } from 'node:os' -import { join } from 'node:path' -import type { ExecCapability, ExecResult } from '@spool-lab/connector-sdk' - -const DEFAULT_TIMEOUT = 60_000 - -function buildEnrichedPath(): string { - const home = homedir() - const base = process.env['PATH'] ?? '' - - const nvmBins: string[] = [] - const versionsDir = join(home, '.nvm', 'versions', 'node') - try { - for (const d of readdirSync(versionsDir)) { - if (d.startsWith('v')) nvmBins.push(join(versionsDir, d, 'bin')) - } - nvmBins.sort().reverse() - } catch {} - - const extras = [ - '/opt/homebrew/bin', - '/usr/local/bin', - `${home}/.local/bin`, - `${home}/.nvm/current/bin`, - `${home}/.fnm/aliases/default/bin`, - ...nvmBins, - ] - - return [...extras, base].join(':') -} - -/** POSIX single-quote escaping: wraps in '...' and escapes embedded single quotes. */ -function shellQuote(s: string): string { - return `'${s.replace(/'/g, `'\\''`)}'` -} - -let enrichedPath: string | null = null - -export function makeExecCapability(): ExecCapability { - if (!enrichedPath) enrichedPath = buildEnrichedPath() - const isWin = process.platform === 'win32' - - return { - run(bin: string, args: string[], opts?: { timeout?: number }): Promise { - const timeout = opts?.timeout ?? DEFAULT_TIMEOUT - - return new Promise((resolve, reject) => { - // GUI-launched apps on macOS don't inherit the user's shell env (no - // proxy vars, no nvm PATH, etc.) — running through a login shell - // sources .zprofile / .bash_profile so subprocesses get a realistic - // env. zsh additionally needs -i to source .zshrc where most users - // keep proxy/PATH tweaks; bash -i emits "cannot set terminal process - // group" warnings in non-TTY contexts (eg. CI) so we stick to plain - // -lc for bash and rely on .bash_profile to source .bashrc as is - // standard. On Windows there is no equivalent concept, spawn direct. - const shellPath = process.env['SHELL'] || '/bin/zsh' - const useInteractive = /\bzsh$/.test(shellPath) - const proc = isWin - ? spawn(bin, args, { - stdio: ['pipe', 'pipe', 'pipe'], - env: { ...process.env, PATH: enrichedPath! }, - }) - : spawn( - shellPath, - [useInteractive ? '-ilc' : '-lc', [bin, ...args].map(shellQuote).join(' ')], - { - stdio: ['pipe', 'pipe', 'pipe'], - env: { ...process.env, PATH: enrichedPath! }, - // Run in own process group so timeout kills the inner command - // too, not just the shell wrapper. - detached: true, - }, - ) - - let stdout = '' - let stderr = '' - let timedOut = false - - const killGroup = () => { - if (!isWin && proc.pid) { - // Kill the whole process group so the shell wrapper AND the - // inner command both terminate. Without this, killing only the - // shell leaves the inner command orphaned and stdio pipes open. - try { process.kill(-proc.pid, 'SIGKILL') } catch { proc.kill('SIGKILL') } - } else { - proc.kill() - } - } - - const timer = setTimeout(() => { - timedOut = true - killGroup() - // Resolve immediately rather than waiting for stdio drain — the - // 'close' handler may not fire promptly when the process group is - // killed because orphaned descendants can keep pipes open. - reject(new Error(`Process timed out after ${timeout}ms`)) - }, timeout) - - proc.stdout.on('data', (d: Buffer) => { stdout += d.toString() }) - proc.stderr.on('data', (d: Buffer) => { stderr += d.toString() }) - - proc.on('close', () => { - clearTimeout(timer) - if (!timedOut) { - resolve({ stdout, stderr, exitCode: proc.exitCode ?? 1 }) - } - }) - - proc.on('error', (err) => { - clearTimeout(timer) - if (!timedOut) reject(err) - }) - }) - }, - } -} diff --git a/packages/core/src/connectors/capabilities/fetch-impl.ts b/packages/core/src/connectors/capabilities/fetch-impl.ts deleted file mode 100644 index d39b945..0000000 --- a/packages/core/src/connectors/capabilities/fetch-impl.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { FetchCapability } from '@spool-lab/connector-sdk' - -export function makeFetchCapability( - fetchFn: typeof globalThis.fetch = globalThis.fetch, -): FetchCapability { - return fetchFn -} diff --git a/packages/core/src/connectors/capabilities/index.ts b/packages/core/src/connectors/capabilities/index.ts deleted file mode 100644 index d5daf15..0000000 --- a/packages/core/src/connectors/capabilities/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { makeFetchCapability } from './fetch-impl.js' -export { makeChromeCookiesCapability } from './cookies-chrome.js' -export { makeLogCapabilityFor } from './log-impl.js' -export { makeSqliteCapability } from './sqlite-impl.js' -export { makeExecCapability } from './exec-impl.js' diff --git a/packages/core/src/connectors/capabilities/log-impl.ts b/packages/core/src/connectors/capabilities/log-impl.ts deleted file mode 100644 index 1cecb6a..0000000 --- a/packages/core/src/connectors/capabilities/log-impl.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { Effect } from 'effect' -import type { LogCapability, LogFields } from '@spool-lab/connector-sdk' - -export function makeLogCapabilityFor(connectorId: string): LogCapability { - const baseAttrs: LogFields = { 'connector.id': connectorId } - - const emit = ( - level: 'Debug' | 'Info' | 'Warning' | 'Error', - msg: string, - fields?: LogFields, - ) => { - const attrs = { ...baseAttrs, ...fields } - const effect = - level === 'Debug' ? Effect.logDebug(msg) : - level === 'Info' ? Effect.logInfo(msg) : - level === 'Warning' ? Effect.logWarning(msg) : - Effect.logError(msg) - Effect.runFork(effect.pipe(Effect.annotateLogs(attrs))) - } - - return { - debug(msg, fields) { emit('Debug', msg, fields) }, - info(msg, fields) { emit('Info', msg, fields) }, - warn(msg, fields) { emit('Warning', msg, fields) }, - error(msg, fields) { emit('Error', msg, fields) }, - - async span( - name: string, - fn: () => Promise, - opts?: { attributes?: LogFields }, - ): Promise { - const attrs = { ...baseAttrs, ...opts?.attributes } - return Effect.runPromise( - Effect.tryPromise({ - try: fn, - catch: e => e, - }).pipe( - Effect.withSpan(`connector.${name}`, { attributes: attrs }), - ), - ) - }, - } -} diff --git a/packages/core/src/connectors/capabilities/sqlite-impl.ts b/packages/core/src/connectors/capabilities/sqlite-impl.ts deleted file mode 100644 index d3c44f0..0000000 --- a/packages/core/src/connectors/capabilities/sqlite-impl.ts +++ /dev/null @@ -1,20 +0,0 @@ -import Database from 'better-sqlite3' -import type { SqliteCapability, SqliteDatabase, SqliteStatement } from '@spool-lab/connector-sdk' - -export function makeSqliteCapability(): SqliteCapability { - return { - openReadonly(path: string): SqliteDatabase { - const db = new Database(path, { readonly: true, fileMustExist: true }) - return { - prepare(sql: string): SqliteStatement { - const stmt = db.prepare(sql) - return { - all: (...params) => stmt.all(...params) as T[], - get: (...params) => stmt.get(...params) as T | undefined, - } - }, - close: () => { db.close() }, - } - }, - } -} diff --git a/packages/core/src/connectors/loader.test.ts b/packages/core/src/connectors/loader.test.ts deleted file mode 100644 index 2cebf88..0000000 --- a/packages/core/src/connectors/loader.test.ts +++ /dev/null @@ -1,442 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest' -import { validatePrerequisites } from './loader.js' -import { mkdtempSync, mkdirSync, writeFileSync, rmSync } from 'node:fs' -import { tmpdir } from 'node:os' -import { join } from 'node:path' -import { loadConnectors } from './loader.js' -import { ConnectorRegistry } from './registry.js' -import { TrustStore } from './trust-store.js' -import type { Connector } from '@spool-lab/connector-sdk' - -function writePkg(nodeModulesDir: string, name: string, manifest: object, entrySource: string) { - const segments = name.startsWith('@') ? name.split('/') : [name] - const pkgDir = join(nodeModulesDir, ...segments) - mkdirSync(pkgDir, { recursive: true }) - writeFileSync( - join(pkgDir, 'package.json'), - JSON.stringify({ - name, - version: '1.0.0', - type: 'module', - main: './index.js', - ...manifest, - }), - ) - writeFileSync(join(pkgDir, 'index.js'), entrySource) -} - -function fakeCapabilityImpls() { - return { - fetch: globalThis.fetch, - cookies: { get: async () => [] }, - sqlite: { openReadonly: () => { throw new Error('not available') } }, - exec: { run: async () => ({ stdout: '', stderr: '', exitCode: 0 }) }, - logFor: () => ({ - debug: () => {}, info: () => {}, warn: () => {}, error: () => {}, - span: async (_name: string, fn: () => Promise) => fn(), - }), - } -} - -function silentLogger() { - return { - info: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - child: function() { return this }, - } -} - -describe('loadConnectors', () => { - let connectorsDir: string - - beforeEach(() => { - connectorsDir = mkdtempSync(join(tmpdir(), 'loader-connectors-')) - }) - - function makeTrustStore(): TrustStore { - const dir = mkdtempSync(join(tmpdir(), 'spool-trust-')) - return new TrustStore(dir) - } - - it('loads a connector that declares spool.type === "connector"', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-test', - { - spool: { - type: 'connector', - id: 'test', - platform: 'test', - label: 'Test', - description: 'Test', - color: '#000', - ephemeral: false, - capabilities: ['log'], - }, - }, - `export default class TestConn { - id = 'test'; platform = 'test'; label = 'Test'; - description = 'Test'; color = '#000'; ephemeral = false; - constructor(caps) { this.caps = caps } - async checkAuth() { return { ok: true } } - async fetchPage() { return { items: [], nextCursor: null } } - }`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - expect(report.loadResults.find(r => r.name === '@spool-lab/connector-test')?.status) - .toBe('loaded') - expect(registry.list().length).toBe(1) - }) - - it('manifest metadata wins over class field declarations', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-drift', - { - spool: { - type: 'connector', - id: 'drift', - platform: 'drift', - label: 'Manifest Label', - description: 'manifest description', - color: '#abcdef', - ephemeral: true, - capabilities: ['log'], - }, - }, - `export default class DriftConn { - id = 'drift'; platform = 'drift'; label = 'Stale Class Label'; - description = 'class description'; color = '#000000'; ephemeral = false; - constructor(caps) { this.caps = caps } - async checkAuth() { return { ok: true } } - async fetchPage() { return { items: [], nextCursor: null } } - }`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - expect(report.loadResults.find(r => r.name === '@spool-lab/connector-drift')?.status) - .toBe('loaded') - const loaded = registry.list()[0]! - expect(loaded.label).toBe('Manifest Label') - expect(loaded.color).toBe('#abcdef') - expect(loaded.ephemeral).toBe(true) - }) - - it('skips packages without spool.type === "connector"', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - 'some-random-pkg', - { description: 'not a connector' }, - `export default {}`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - expect(report.loadResults.length).toBe(0) - expect(registry.list().length).toBe(0) - }) - - it('rejects connectors with unknown capabilities', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-test', - { - spool: { - type: 'connector', id: 'test', platform: 'test', label: 'Test', - description: 'Test', color: '#000', ephemeral: false, - capabilities: ['fetch', 'filesystem:read'], - }, - }, - `export default class {}`, - ) - - const log = silentLogger() - await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log, - }) - - const errorCalls = (log.error as any).mock.calls - expect(errorCalls.some((c: any[]) => - String(c[1]?.error ?? '').includes('filesystem:read') - )).toBe(true) - }) - - it('skips untrusted community connectors', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@community/connector-untrusted', - { - spool: { - type: 'connector', id: 'untrusted', platform: 'test', label: 'Untrusted', - description: 'Test', color: '#000', ephemeral: false, - capabilities: ['log'], - }, - }, - `export default class {}`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - expect(report.loadResults.find(r => r.name === '@community/connector-untrusted')?.status) - .toBe('skipped') - expect(registry.list().length).toBe(0) - }) - - it('isolates crashes: one broken connector does not block others', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-typeless', - { - spool: { - type: 'connector', id: 'typeless', platform: 'typeless', - label: 'Typeless', description: '...', color: '#000', ephemeral: false, - capabilities: ['log'], - }, - }, - `export default class { - id = 'typeless'; platform = 'typeless'; label = 'Typeless'; - description = '...'; color = '#000'; ephemeral = false; - constructor() {} - async checkAuth() { return { ok: true } } - async fetchPage() { return { items: [], nextCursor: null } } - }`, - ) - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-twitter-bookmarks', - { - spool: { - type: 'connector', id: 'twitter-bookmarks', platform: 'twitter', - label: 'Twitter', description: '...', color: '#000', ephemeral: false, - capabilities: ['log'], - }, - }, - `export default class { - constructor() { throw new Error('boom') } - }`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - const statuses = Object.fromEntries( - report.loadResults.map(r => [r.name, r.status]), - ) - expect(statuses['@spool-lab/connector-typeless']).toBe('loaded') - expect(statuses['@spool-lab/connector-twitter-bookmarks']).toBe('failed') - expect(registry.list().length).toBe(1) - }) - - it('loads multi-connector package with spool.connectors array', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-multi', - { - spool: { - type: 'connector', - connectors: [ - { - id: 'multi-a', - platform: 'multi', - label: 'Multi A', - description: 'A', - color: '#aaa', - ephemeral: false, - capabilities: ['log'], - }, - { - id: 'multi-b', - platform: 'multi', - label: 'Multi B', - description: 'B', - color: '#bbb', - ephemeral: true, - capabilities: ['log'], - }, - ], - }, - }, - ` - class A { - id = 'multi-a'; platform = 'multi'; label = 'Multi A'; - description = 'A'; color = '#aaa'; ephemeral = false; - constructor(caps) { this.caps = caps } - async checkAuth() { return { ok: true } } - async fetchPage() { return { items: [], nextCursor: null } } - } - class B { - id = 'multi-b'; platform = 'multi'; label = 'Multi B'; - description = 'B'; color = '#bbb'; ephemeral = true; - constructor(caps) { this.caps = caps } - async checkAuth() { return { ok: true } } - async fetchPage() { return { items: [], nextCursor: null } } - } - export const connectors = [A, B]; - `, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - const loaded = report.loadResults.filter(r => r.status === 'loaded') - expect(loaded.length).toBe(2) - expect(registry.list().length).toBe(2) - expect(registry.has('multi-a')).toBe(true) - expect(registry.has('multi-b')).toBe(true) - }) - - it('loads single-connector package unchanged (backward compat)', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-single', - { - spool: { - type: 'connector', - id: 'single', - platform: 'test', - label: 'Single', - description: 'S', - color: '#000', - ephemeral: false, - capabilities: ['log'], - }, - }, - `export default class { - id = 'single'; platform = 'test'; label = 'Single'; - description = 'S'; color = '#000'; ephemeral = false; - constructor(caps) {} - async checkAuth() { return { ok: true } } - async fetchPage() { return { items: [], nextCursor: null } } - }`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - expect(report.loadResults.find(r => r.name === '@spool-lab/connector-single')?.status).toBe('loaded') - expect(registry.list().length).toBe(1) - }) - - it('throws when plugin uses an undeclared capability at runtime', async () => { - const registry = new ConnectorRegistry() - writePkg( - join(connectorsDir, 'node_modules'), - '@spool-lab/connector-test', - { - spool: { - type: 'connector', id: 'test', platform: 'test', label: 'Test', - description: 'Test', color: '#000', ephemeral: false, - capabilities: ['log'], - }, - }, - `export default class { - id = 'test'; platform = 'test'; label = 'Test'; - description = 'Test'; color = '#000'; ephemeral = false; - constructor(caps) { this.caps = caps } - async checkAuth() { return { ok: true } } - async fetchPage() { - await this.caps.fetch('https://example.com') - return { items: [], nextCursor: null } - } - }`, - ) - - const report = await loadConnectors({ - connectorsDir, - capabilityImpls: fakeCapabilityImpls(), - registry, - log: silentLogger(), - trustStore: makeTrustStore(), - }) - - expect(report.loadResults.find(r => r.name === '@spool-lab/connector-test')?.status) - .toBe('loaded') - const connector = registry.list()[0] - await expect(connector.fetchPage({ cursor: null, sinceItemId: null, phase: 'forward', signal: new AbortController().signal })) - .rejects.toThrow(/not declared/) - }) -}) - -describe('validatePrerequisites', () => { - const validBase = { - id: 'req1', - name: 'Req One', - kind: 'exec' as const, - detect: { type: 'exec' as const, command: 'req1', args: ['--version'] }, - install: { kind: 'exec' as const, url: 'https://example.com' }, - } - - it('accepts a valid prerequisite', () => { - expect(validatePrerequisites([validBase], 'pkg')).toHaveLength(1) - }) - - it('throws on duplicate prerequisite id', () => { - expect(() => validatePrerequisites([validBase, validBase], 'pkg')) - .toThrow('Prerequisite req1 in pkg: duplicate id') - }) - - it('throws on missing required fields', () => { - expect(() => validatePrerequisites([{ id: 'x' }], 'pkg')) - .toThrow(/missing required fields/) - }) - - it('throws on install.kind mismatch', () => { - const bad = { ...validBase, install: { ...validBase.install, kind: 'browser-extension' as any } } - expect(() => validatePrerequisites([bad], 'pkg')) - .toThrow(/install\.kind/) - }) -}) diff --git a/packages/core/src/connectors/loader.ts b/packages/core/src/connectors/loader.ts deleted file mode 100644 index 9f8fe22..0000000 --- a/packages/core/src/connectors/loader.ts +++ /dev/null @@ -1,436 +0,0 @@ -import { existsSync, readdirSync, readFileSync } from 'node:fs' -import { join } from 'node:path' -import { pathToFileURL } from 'node:url' -import type { - Connector, - ConnectorCapabilities, - CookiesCapability, - ExecCapability, - FetchCapability, - LogCapability, - Prerequisite, - PrerequisitesCapability, - SqliteCapability, -} from '@spool-lab/connector-sdk' -import { SyncError, SyncErrorCode, KNOWN_CAPABILITIES_V1 } from '@spool-lab/connector-sdk' -import type { ConnectorRegistry } from './registry.js' -interface BaseLogger { - info(msg: string, fields?: Record): void - warn(msg: string, fields?: Record): void - error(msg: string, fields?: Record): void -} -import { TrustStore } from './trust-store.js' - -export function validatePrerequisites(prereqs: unknown[], packageName: string): Prerequisite[] { - const result: Prerequisite[] = [] - const seen = new Set() - for (const raw of prereqs) { - const p = raw as Prerequisite - if (!p.id || !p.name || !p.kind || !p.detect || !p.install) { - throw new Error(`Invalid prerequisite in ${packageName}: missing required fields`) - } - if (p.install.kind !== p.kind) { - throw new Error(`Prerequisite ${p.id} in ${packageName}: install.kind "${p.install.kind}" must match kind "${p.kind}"`) - } - if (p.kind === 'browser-extension') { - const inst = p.install as { webstoreUrl?: string; manual?: unknown } - if (!inst.webstoreUrl && !inst.manual) { - throw new Error(`Prerequisite ${p.id} in ${packageName}: browser-extension requires webstoreUrl or manual`) - } - } - if (p.minVersion && !(p.detect.type === 'exec' && p.detect.versionRegex)) { - throw new Error(`Prerequisite ${p.id} in ${packageName}: minVersion requires detect.versionRegex`) - } - if (seen.has(p.id)) { - throw new Error(`Prerequisite ${p.id} in ${packageName}: duplicate id`) - } - for (const req of p.requires ?? []) { - if (!seen.has(req)) { - throw new Error(`Prerequisite ${p.id} in ${packageName}: requires "${req}" must appear earlier in array`) - } - } - seen.add(p.id) - result.push(p) - } - return result -} - -export interface CapabilityImpls { - fetch: FetchCapability - cookies: CookiesCapability - sqlite: SqliteCapability - exec: ExecCapability - logFor(connectorId: string): LogCapability - /** Returns the prerequisites capability for the given package id, or undefined if not supported. */ - prerequisitesFor?: (packageId: string) => PrerequisitesCapability -} - -export interface LoaderLogger extends BaseLogger { - child?(attrs: Record): LoaderLogger -} - -export interface LoadDeps { - connectorsDir: string - capabilityImpls: CapabilityImpls - registry: ConnectorRegistry - log: LoaderLogger - trustStore: TrustStore -} - -export type LoadResult = - | { status: 'loaded'; name: string; version: string } - | { status: 'failed'; name: string; error: unknown } - | { status: 'skipped'; name: string; reason: 'not-in-allowlist' | 'bad-manifest' } - -export interface LoadReport { - loadResults: LoadResult[] -} - -interface PkgInfo { - dir: string - name: string - version: string - manifest: { - id: string - platform: string - label: string - description: string - color: string - ephemeral: boolean - capabilities: string[] - } - main: string - multi: boolean - prerequisites: Prerequisite[] -} - -const KNOWN_CAPS_SET = new Set(KNOWN_CAPABILITIES_V1) - -const importedModules = new Map() - -export async function loadConnectors(deps: LoadDeps): Promise { - const { connectorsDir, log } = deps - - importedModules.clear() - deps.registry.clear() - - const discovered = discoverConnectorPackages(connectorsDir, log) - - const loadResults: LoadResult[] = [] - for (const pkg of discovered) { - const result = await loadOneConnector(pkg, deps) - loadResults.push(result) - } - - return { loadResults } -} - -function discoverConnectorPackages( - connectorsDir: string, - log: LoaderLogger, -): PkgInfo[] { - const nodeModules = join(connectorsDir, 'node_modules') - if (!existsSync(nodeModules)) return [] - - const results: PkgInfo[] = [] - let topEntries: string[] - try { - topEntries = readdirSync(nodeModules) - } catch (err) { - log.error('failed to read node_modules', { error: String(err) }) - return results - } - - for (const entry of topEntries) { - if (entry.startsWith('.')) continue - const entryPath = join(nodeModules, entry) - - if (entry.startsWith('@')) { - let scopedEntries: string[] - try { - scopedEntries = readdirSync(entryPath) - } catch { - continue - } - for (const sub of scopedEntries) { - if (sub.startsWith('.')) continue - results.push(...tryReadConnectorManifest(join(entryPath, sub), log)) - } - } else { - results.push(...tryReadConnectorManifest(entryPath, log)) - } - } - - return results -} - -function tryReadConnectorManifest( - pkgDir: string, - log: LoaderLogger, -): PkgInfo[] { - const pkgJsonPath = join(pkgDir, 'package.json') - if (!existsSync(pkgJsonPath)) return [] - - let json: any - try { - json = JSON.parse(readFileSync(pkgJsonPath, 'utf8')) - } catch (err) { - log.warn('invalid package.json', { path: pkgJsonPath, error: String(err) }) - return [] - } - - if (json?.spool?.type !== 'connector') return [] - - const packageName = String(json.name) - let prerequisites: Prerequisite[] - try { - prerequisites = validatePrerequisites( - Array.isArray(json.spool.prerequisites) ? json.spool.prerequisites : [], - packageName, - ) - } catch (err) { - log.error('invalid prerequisites in package', { package: packageName, error: String(err) }) - return [] - } - - // Multi-connector package: spool.connectors is an array - if (Array.isArray(json.spool.connectors)) { - const results: PkgInfo[] = [] - for (const entry of json.spool.connectors) { - const declared: string[] = Array.isArray(entry.capabilities) ? entry.capabilities : [] - const unknown = declared.filter(c => !KNOWN_CAPS_SET.has(c)) - if (unknown.length > 0) { - log.error('unknown capability in spool.connectors entry', { - package: packageName, - connectorId: entry.id, - unknown, - error: `Unknown capability "${unknown[0]}" — known v1 values: ${[...KNOWN_CAPS_SET].join(', ')}`, - }) - continue - } - results.push({ - dir: pkgDir, - name: packageName, - version: String(json.version ?? '0.0.0'), - manifest: { - id: String(entry.id ?? ''), - platform: String(entry.platform ?? ''), - label: String(entry.label ?? ''), - description: String(entry.description ?? ''), - color: String(entry.color ?? '#888'), - ephemeral: Boolean(entry.ephemeral), - capabilities: declared, - }, - main: String(json.main ?? 'dist/index.js'), - multi: true, - prerequisites, - }) - } - return results - } - - // Single-connector package (original path) - const declared: string[] = Array.isArray(json.spool.capabilities) - ? json.spool.capabilities - : [] - - const unknown = declared.filter(c => !KNOWN_CAPS_SET.has(c)) - if (unknown.length > 0) { - log.error('unknown capability in spool.capabilities', { - package: packageName, - unknown, - error: `Unknown capability "${unknown[0]}" — known v1 values: ${[...KNOWN_CAPS_SET].join(', ')}`, - }) - return [] - } - - return [{ - dir: pkgDir, - name: packageName, - version: String(json.version ?? '0.0.0'), - manifest: { - id: String(json.spool.id ?? ''), - platform: String(json.spool.platform ?? ''), - label: String(json.spool.label ?? ''), - description: String(json.spool.description ?? ''), - color: String(json.spool.color ?? '#888'), - ephemeral: Boolean(json.spool.ephemeral), - capabilities: declared, - }, - main: String(json.main ?? 'dist/index.js'), - multi: false, - prerequisites, - }] -} - -async function loadOneConnector( - pkg: PkgInfo, - deps: LoadDeps, -): Promise { - if (!deps.trustStore.isTrusted(pkg.name)) { - deps.log.info('skip untrusted connector', { name: pkg.name, id: pkg.manifest.id }) - return { status: 'skipped', name: pkg.name, reason: 'not-in-allowlist' } - } - - try { - const entryPath = join(pkg.dir, pkg.main) - if (!existsSync(entryPath)) { - throw new Error(`entry file not found: ${entryPath}`) - } - - let mod: any - if (importedModules.has(entryPath)) { - mod = importedModules.get(entryPath) - } else { - const modUrl = pathToFileURL(entryPath).href - mod = await import(modUrl) - importedModules.set(entryPath, mod) - } - - const caps = buildCapabilities(pkg.manifest.capabilities, pkg.manifest.id, pkg.name, deps.capabilityImpls) - let ConnectorClass: any - - if (pkg.multi) { - // Multi-connector: find the class from mod.connectors by matching id - const classes: any[] = mod.connectors - if (!Array.isArray(classes)) { - throw new Error('multi-connector package must export a `connectors` array') - } - ConnectorClass = null - for (const Cls of classes) { - if (typeof Cls !== 'function') continue - const probe: Connector = new Cls(caps) - if (probe.id === pkg.manifest.id) { - ConnectorClass = Cls - break - } - } - if (!ConnectorClass) { - throw new Error(`no connector class with id="${pkg.manifest.id}" found in connectors array`) - } - } else { - ConnectorClass = - mod.default ?? - mod[pkg.manifest.id] ?? - (typeof mod === 'function' ? mod : null) - } - - if (typeof ConnectorClass !== 'function') { - throw new Error('module does not export a connector class') - } - - const instance: Connector = new ConnectorClass(caps) - applyManifestMetadata(instance, pkg, deps.log) - - deps.registry.register(instance) - const connectorPkg: import('./types.js').ConnectorPackage = { - id: pkg.name, - packageName: pkg.name, - rootDir: pkg.dir, - connectors: [instance], - } - if (pkg.prerequisites.length > 0) { - connectorPkg.prerequisites = pkg.prerequisites - } - deps.registry.registerPackage(connectorPkg) - deps.log.info('loaded connector', { name: pkg.name, id: pkg.manifest.id, version: pkg.version }) - return { status: 'loaded', name: pkg.name, version: pkg.version } - } catch (err) { - deps.log.error('failed to load connector', { - name: pkg.name, - id: pkg.manifest.id, - error: err instanceof Error ? err.message : String(err), - stack: err instanceof Error ? err.stack : undefined, - }) - return { status: 'failed', name: pkg.name, error: err } - } -} - -function buildCapabilities( - declared: string[], - connectorId: string, - packageId: string, - impls: CapabilityImpls, -): ConnectorCapabilities { - const caps: ConnectorCapabilities = { - fetch: declared.includes('fetch') - ? impls.fetch - : (undefinedCapability('fetch') as FetchCapability), - cookies: declared.includes('cookies:chrome') - ? impls.cookies - : (undefinedCapability('cookies:chrome') as CookiesCapability), - log: declared.includes('log') - ? impls.logFor(connectorId) - : (undefinedCapability('log') as LogCapability), - sqlite: declared.includes('sqlite') - ? impls.sqlite - : (undefinedCapability('sqlite') as SqliteCapability), - exec: declared.includes('exec') - ? impls.exec - : (undefinedCapability('exec') as ExecCapability), - } - if (declared.includes('prerequisites') && impls.prerequisitesFor) { - caps.prerequisites = impls.prerequisitesFor(packageId) - } - return caps -} - -function undefinedCapability(name: string): unknown { - return new Proxy( - function undef() { - throw makeUndeclaredError(name, 'call') - }, - { - get(_target, prop) { - return () => { - throw makeUndeclaredError(name, String(prop)) - } - }, - apply() { - throw makeUndeclaredError(name, 'call') - }, - }, - ) -} - -function makeUndeclaredError(name: string, accessor: string): SyncError { - return new SyncError( - SyncErrorCode.CONNECTOR_ERROR, - `Capability "${name}" used (via .${accessor}) but not declared in spool.capabilities`, - ) -} - -/** - * Manifest is the single source of truth for connector metadata. - * - * Any `readonly id/platform/label/description/color/ephemeral` declared on the - * connector class is treated as a default and overwritten with the manifest - * value so that runtime behavior always matches what the package declared. - * - * If the class field disagrees with the manifest, we log a warning so authors - * can clean it up — but loading proceeds, since silently dropping a connector - * because two redundant declarations drifted is worse than the inconsistency. - */ -function applyManifestMetadata(instance: Connector, pkg: PkgInfo, log: LoadDeps['log']): void { - const fields: Array = [ - 'id', 'platform', 'label', 'description', 'color', 'ephemeral', - ] - for (const field of fields) { - const classValue = (instance as any)[field] - const manifestValue = pkg.manifest[field] - if (classValue !== undefined && classValue !== manifestValue) { - log.warn('connector class field disagrees with manifest; manifest wins', { - package: pkg.name, - field, - classValue, - manifestValue, - }) - } - Object.defineProperty(instance, field, { - value: manifestValue, - writable: false, - configurable: true, - enumerable: true, - }) - } -} diff --git a/packages/core/src/connectors/npm-install.test.ts b/packages/core/src/connectors/npm-install.test.ts deleted file mode 100644 index b404549..0000000 --- a/packages/core/src/connectors/npm-install.test.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { describe, it, expect, vi, afterEach } from 'vitest' -import { mkdtempSync, mkdirSync, writeFileSync, readFileSync, symlinkSync, rmSync, existsSync, lstatSync } from 'node:fs' -import { join } from 'node:path' -import { tmpdir } from 'node:os' -import * as tar from 'tar' -import { registryUrl, checkForUpdates, downloadAndInstall } from './npm-install.js' - -describe('registryUrl', () => { - it('builds correct URL for scoped package', () => { - expect(registryUrl('@spool-lab/connector-hackernews-hot')) - .toBe('https://registry.npmjs.org/@spool-lab%2Fconnector-hackernews-hot/latest') - }) - - it('builds correct URL for unscoped package', () => { - expect(registryUrl('connector-foo')) - .toBe('https://registry.npmjs.org/connector-foo/latest') - }) -}) - -function mockNpmResponse(name: string, version: string): Response { - return new Response(JSON.stringify({ - name, - version, - dist: { tarball: `https://registry.npmjs.org/${name}/-/${name}-${version}.tgz` }, - spool: { type: 'connector' }, - })) -} - -describe('checkForUpdates', () => { - it('returns update when npm has a newer version', async () => { - const fetchFn = vi.fn().mockResolvedValue(mockNpmResponse('@spool-lab/hn', '0.2.0')) - const result = await checkForUpdates( - [{ packageName: '@spool-lab/hn', currentVersion: '0.1.0' }], - fetchFn as unknown as typeof fetch, - ) - expect(result.size).toBe(1) - expect(result.get('@spool-lab/hn')).toEqual({ current: '0.1.0', latest: '0.2.0' }) - }) - - it('returns empty when versions are equal', async () => { - const fetchFn = vi.fn().mockResolvedValue(mockNpmResponse('@spool-lab/hn', '0.1.0')) - const result = await checkForUpdates( - [{ packageName: '@spool-lab/hn', currentVersion: '0.1.0' }], - fetchFn as unknown as typeof fetch, - ) - expect(result.size).toBe(0) - }) - - it('returns empty when installed version is newer', async () => { - const fetchFn = vi.fn().mockResolvedValue(mockNpmResponse('@spool-lab/hn', '0.1.0')) - const result = await checkForUpdates( - [{ packageName: '@spool-lab/hn', currentVersion: '0.2.0' }], - fetchFn as unknown as typeof fetch, - ) - expect(result.size).toBe(0) - }) - - it('silently skips connectors that fail to fetch', async () => { - const fetchFn = vi.fn() - .mockResolvedValueOnce(mockNpmResponse('@spool-lab/hn', '0.2.0')) - .mockRejectedValueOnce(new Error('network error')) - const result = await checkForUpdates( - [ - { packageName: '@spool-lab/hn', currentVersion: '0.1.0' }, - { packageName: '@spool-lab/broken', currentVersion: '0.1.0' }, - ], - fetchFn as unknown as typeof fetch, - ) - expect(result.size).toBe(1) - expect(result.has('@spool-lab/hn')).toBe(true) - expect(result.has('@spool-lab/broken')).toBe(false) - }) - - it('checks multiple connectors in parallel', async () => { - const fetchFn = vi.fn() - .mockResolvedValueOnce(mockNpmResponse('@spool-lab/a', '0.3.0')) - .mockResolvedValueOnce(mockNpmResponse('@spool-lab/b', '0.1.0')) - const result = await checkForUpdates( - [ - { packageName: '@spool-lab/a', currentVersion: '0.1.0' }, - { packageName: '@spool-lab/b', currentVersion: '0.1.0' }, - ], - fetchFn as unknown as typeof fetch, - ) - expect(result.size).toBe(1) - expect(result.get('@spool-lab/a')).toEqual({ current: '0.1.0', latest: '0.3.0' }) - expect(fetchFn).toHaveBeenCalledTimes(2) - }) -}) - -describe('downloadAndInstall', () => { - const tempDirs: string[] = [] - afterEach(() => { - for (const d of tempDirs) rmSync(d, { recursive: true, force: true }) - tempDirs.length = 0 - }) - function tmp(): string { - const d = mkdtempSync(join(tmpdir(), 'npm-install-test-')) - tempDirs.push(d) - return d - } - - async function buildTarball(sourceDir: string, outPath: string): Promise { - // npm packs with contents under a top-level "package/" dir that tar.extract strips. - const stage = mkdtempSync(join(tmpdir(), 'npm-pack-stage-')) - tempDirs.push(stage) - const pkgDir = join(stage, 'package') - mkdirSync(pkgDir) - for (const name of ['package.json', 'index.js']) { - const src = join(sourceDir, name) - if (existsSync(src)) writeFileSync(join(pkgDir, name), readFileSync(src)) - } - await tar.create({ gzip: true, file: outPath, cwd: stage }, ['package']) - } - - function mockFetch(registryJson: object, tarballBytes: Buffer): typeof fetch { - return vi.fn(async (url: string) => { - if (String(url).endsWith('.tgz')) { - return new Response(tarballBytes, { status: 200 }) - } - return new Response(JSON.stringify(registryJson), { status: 200 }) - }) as unknown as typeof fetch - } - - it('replaces a broken symlink at installPath', async () => { - const src = tmp() - writeFileSync(join(src, 'package.json'), JSON.stringify({ - name: '@spool-lab/connector-foo', - version: '0.1.0', - spool: { type: 'connector' }, - })) - writeFileSync(join(src, 'index.js'), 'module.exports = {}\n') - const tarballPath = join(tmp(), 'pkg.tgz') - await buildTarball(src, tarballPath) - - const connectorsDir = tmp() - const scopeDir = join(connectorsDir, 'node_modules', '@spool-lab') - mkdirSync(scopeDir, { recursive: true }) - const installPath = join(scopeDir, 'connector-foo') - // Simulate a broken dev symlink left over from a removed worktree. - symlinkSync('/nonexistent/worktree/packages/connectors/foo', installPath) - - const fetchFn = mockFetch({ - name: '@spool-lab/connector-foo', - version: '0.1.0', - dist: { tarball: 'https://registry.npmjs.org/@spool-lab/connector-foo/-/connector-foo-0.1.0.tgz' }, - spool: { type: 'connector' }, - }, readFileSync(tarballPath)) - - const result = await downloadAndInstall('@spool-lab/connector-foo', connectorsDir, fetchFn) - - expect(result.name).toBe('@spool-lab/connector-foo') - expect(result.version).toBe('0.1.0') - expect(lstatSync(installPath).isDirectory()).toBe(true) - expect(existsSync(join(installPath, 'package.json'))).toBe(true) - expect(existsSync(join(installPath, 'index.js'))).toBe(true) - }) -}) diff --git a/packages/core/src/connectors/npm-install.ts b/packages/core/src/connectors/npm-install.ts deleted file mode 100644 index eb491bd..0000000 --- a/packages/core/src/connectors/npm-install.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { mkdirSync, createWriteStream, existsSync, rmSync } from 'node:fs' -import { join } from 'node:path' -import { pipeline } from 'node:stream/promises' -import { tmpdir } from 'node:os' -import * as tar from 'tar' -import * as semver from 'semver' - -export interface NpmPackageInfo { - name: string - version: string - tarballUrl: string - isConnector: boolean - label: string | null - description: string | null -} - -export interface InstallResult { - name: string - version: string - installPath: string -} - -export interface UpdateInfo { - current: string - latest: string -} - -export function registryUrl(packageName: string): string { - const encoded = packageName.includes('/') - ? packageName.replace('/', '%2F') - : packageName - return `https://registry.npmjs.org/${encoded}/latest` -} - -export async function resolveNpmPackage( - packageName: string, - fetchFn: typeof globalThis.fetch, -): Promise { - const url = registryUrl(packageName) - const res = await fetchFn(url) - if (!res.ok) { - throw new Error(`npm registry returned ${res.status} for ${packageName}`) - } - const data = await res.json() as Record - const name = data['name'] as string - const version = data['version'] as string - const dist = data['dist'] as Record - const tarballUrl = dist['tarball'] as string - const spool = data['spool'] as Record | undefined - const isConnector = spool?.['type'] === 'connector' - - const label = typeof spool?.['label'] === 'string' ? spool['label'] : null - const description = typeof spool?.['description'] === 'string' ? spool['description'] : null - - return { name, version, tarballUrl, isConnector, label, description } -} - -export async function downloadAndInstall( - packageName: string, - connectorsDir: string, - fetchFn: typeof globalThis.fetch, -): Promise { - const info = await resolveNpmPackage(packageName, fetchFn) - - if (!info.isConnector) { - throw new Error(`Package "${packageName}" is not a spool connector (missing spool.type: "connector")`) - } - - // Download tarball to temp file - const tmpPath = join(tmpdir(), `spool-install-${Date.now()}.tgz`) - const res = await fetchFn(info.tarballUrl) - if (!res.ok || !res.body) { - throw new Error(`Failed to download tarball: ${res.status}`) - } - const fileStream = createWriteStream(tmpPath) - await pipeline(res.body as unknown as NodeJS.ReadableStream, fileStream) - - // Extract to node_modules - const nameSegments = info.name.startsWith('@') ? info.name.split('/') : [info.name] - const installPath = join(connectorsDir, 'node_modules', ...nameSegments) - // Clear any stale entry first: a broken dev symlink (from a removed - // worktree) would cause mkdirSync to ENOENT by following the dangling link. - rmSync(installPath, { recursive: true, force: true }) - mkdirSync(installPath, { recursive: true }) - await tar.extract({ file: tmpPath, cwd: installPath, strip: 1 }) - - return { name: info.name, version: info.version, installPath } -} - -export async function checkForUpdates( - connectors: Array<{ packageName: string; currentVersion: string }>, - fetchFn: typeof globalThis.fetch, -): Promise> { - const results = new Map() - const checks = connectors.map(async ({ packageName, currentVersion }) => { - try { - const info = await resolveNpmPackage(packageName, fetchFn) - if (semver.gt(info.version, currentVersion)) { - results.set(packageName, { current: currentVersion, latest: info.version }) - } - } catch { - // Network error or delisted package — skip silently - } - }) - await Promise.all(checks) - return results -} - -export function uninstallConnector( - packageName: string, - connectorsDir: string, -): void { - const nameSegments = packageName.startsWith('@') ? packageName.split('/') : [packageName] - const installPath = join(connectorsDir, 'node_modules', ...nameSegments) - - rmSync(installPath, { recursive: true, force: true }) -} diff --git a/packages/core/src/connectors/prerequisites.test.ts b/packages/core/src/connectors/prerequisites.test.ts deleted file mode 100644 index bcc29ce..0000000 --- a/packages/core/src/connectors/prerequisites.test.ts +++ /dev/null @@ -1,227 +0,0 @@ -import { describe, it, expect, vi } from 'vitest' -import { PrerequisiteChecker } from './prerequisites.js' -import { validatePrerequisites } from './loader.js' -import type { Prerequisite } from '@spool-lab/connector-sdk' -import type { ConnectorPackage } from './types.js' - -function mkPkg(id: string, prerequisites: Prerequisite[]): ConnectorPackage { - return { - id, - packageName: id, - rootDir: '/fake', - connectors: [], - prerequisites, - } as unknown as ConnectorPackage -} - -describe('PrerequisiteChecker', () => { - it('marks exec-detect as ok when command succeeds', async () => { - const exec = { run: vi.fn().mockResolvedValue({ exitCode: 0, stdout: 'v1.0.0\n', stderr: '' }) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'tool', - name: 'Tool', - kind: 'cli', - detect: { type: 'exec', command: 'tool', args: ['--version'] }, - install: { kind: 'cli', command: { darwin: 'brew install tool' } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps).toHaveLength(1) - expect(steps[0].status).toBe('ok') - }) - - it('marks missing when exec throws ENOENT', async () => { - const exec = { run: vi.fn().mockRejectedValue(new Error('ENOENT')) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'tool', - name: 'Tool', - kind: 'cli', - detect: { type: 'exec', command: 'missing', args: [] }, - install: { kind: 'cli', command: { darwin: 'brew install tool' } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('missing') - }) - - it('accepts zero-padded CalVer (e.g. yt-dlp 2026.03.17) by stripping leading zeros', async () => { - const exec = { run: vi.fn().mockResolvedValue({ exitCode: 0, stdout: '2026.03.17\n', stderr: '' }) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'yt-dlp', - name: 'yt-dlp', - kind: 'cli', - detect: { type: 'exec', command: 'yt-dlp', args: ['--version'], versionRegex: '(\\d{4}\\.\\d{2}\\.\\d{2})' }, - minVersion: '2024.01.01', - install: { kind: 'cli', command: { darwin: 'brew install yt-dlp' } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('ok') - expect(steps[0].detectedVersion).toBe('2026.03.17') - }) - - it('correctly compares zero-padded CalVer across years/months', async () => { - const exec = { run: vi.fn().mockResolvedValue({ exitCode: 0, stdout: '2024.02.05\n', stderr: '' }) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'yt-dlp', - name: 'yt-dlp', - kind: 'cli', - detect: { type: 'exec', command: 'yt-dlp', args: ['--version'], versionRegex: '(\\d{4}\\.\\d{2}\\.\\d{2})' }, - minVersion: '2024.03.01', - install: { kind: 'cli', command: { darwin: 'brew install yt-dlp' } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('outdated') - expect(steps[0].detectedVersion).toBe('2024.02.05') - }) - - it('marks outdated when version is below minVersion', async () => { - const exec = { run: vi.fn().mockResolvedValue({ exitCode: 0, stdout: 'v0.2.1\n', stderr: '' }) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'tool', - name: 'Tool', - kind: 'cli', - detect: { type: 'exec', command: 'tool', args: ['--version'], versionRegex: 'v?(\\d+\\.\\d+\\.\\d+)' }, - minVersion: '0.3.0', - install: { kind: 'cli', command: { darwin: 'brew install tool' } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('outdated') - expect(steps[0].detectedVersion).toBe('0.2.1') - expect(steps[0].minVersion).toBe('0.3.0') - }) - - it('marks pending when upstream requires is not ok', async () => { - const exec = { run: vi.fn().mockRejectedValue(new Error('ENOENT')) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'upstream', - name: 'Upstream', - kind: 'cli', - detect: { type: 'exec', command: 'upstream', args: [] }, - install: { kind: 'cli', command: { darwin: 'install upstream' } }, - }, - { - id: 'downstream', - name: 'Downstream', - kind: 'browser-extension', - requires: ['upstream'], - detect: { type: 'exec', command: 'check', args: [] }, - install: { kind: 'browser-extension', manual: { downloadUrl: 'https://x', steps: ['a'] } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('missing') - expect(steps[1].status).toBe('pending') - expect(exec.run).toHaveBeenCalledTimes(1) - }) - - it('uses matchStdout over version when both present', async () => { - const exec = { run: vi.fn().mockResolvedValue({ exitCode: 0, stdout: '[OK] Extension connected v0.1.0', stderr: '' }) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'ext', - name: 'Ext', - kind: 'browser-extension', - detect: { - type: 'exec', - command: 'tool', - args: ['doctor'], - matchStdout: '\\[OK\\].*Extension', - versionRegex: 'v?(\\d+\\.\\d+\\.\\d+)', - }, - minVersion: '99.0.0', - install: { kind: 'browser-extension', manual: { downloadUrl: 'https://x', steps: ['a'] } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('ok') - }) - - it('marks error when detected version is not parseable as semver', async () => { - const exec = { run: vi.fn().mockResolvedValue({ exitCode: 0, stdout: 'garbage-version', stderr: '' }) } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'tool', - name: 'Tool', - kind: 'cli', - detect: { type: 'exec', command: 'tool', args: ['--version'], versionRegex: '(.+)' }, - minVersion: '0.3.0', - install: { kind: 'cli', command: { darwin: 'install' } }, - }, - ]) - const steps = await checker.check(pkg) - expect(steps[0].status).toBe('error') - expect(steps[0].hint).toMatch(/parse/i) - }) - - it('dedupes concurrent check calls for the same package', async () => { - const exec = { - run: vi.fn().mockImplementation(() => new Promise(resolve => setTimeout(() => resolve({ exitCode: 0, stdout: 'v1.0.0', stderr: '' }), 10))), - } - const checker = new PrerequisiteChecker(exec as any) - const pkg = mkPkg('p1', [ - { - id: 'tool', - name: 'Tool', - kind: 'cli', - detect: { type: 'exec', command: 'tool', args: ['--version'] }, - install: { kind: 'cli', command: { darwin: 'install' } }, - }, - ]) - await Promise.all([checker.check(pkg), checker.check(pkg), checker.check(pkg)]) - expect(exec.run).toHaveBeenCalledTimes(1) - }) -}) - -describe('validatePrerequisites', () => { - it('rejects install.kind mismatch', () => { - expect(() => validatePrerequisites( - [{ id: 'a', name: 'A', kind: 'cli', detect: { type: 'exec', command: 'a', args: [] }, install: { kind: 'browser-extension' } }], - 'p', - )).toThrow(/must match kind/) - }) - - it('rejects browser-extension without webstoreUrl or manual', () => { - expect(() => validatePrerequisites( - [{ id: 'a', name: 'A', kind: 'browser-extension', detect: { type: 'exec', command: 'a', args: [] }, install: { kind: 'browser-extension' } }], - 'p', - )).toThrow(/webstoreUrl or manual/) - }) - - it('rejects forward-referencing requires', () => { - expect(() => validatePrerequisites( - [ - { id: 'a', name: 'A', kind: 'cli', requires: ['b'], detect: { type: 'exec', command: 'a', args: [] }, install: { kind: 'cli', command: {} } }, - { id: 'b', name: 'B', kind: 'cli', detect: { type: 'exec', command: 'b', args: [] }, install: { kind: 'cli', command: {} } }, - ], - 'p', - )).toThrow(/must appear earlier/) - }) - - it('accepts valid prerequisites', () => { - const r = validatePrerequisites( - [ - { id: 'a', name: 'A', kind: 'cli', detect: { type: 'exec', command: 'a', args: [] }, install: { kind: 'cli', command: {} } }, - { id: 'b', name: 'B', kind: 'browser-extension', requires: ['a'], detect: { type: 'exec', command: 'b', args: [] }, install: { kind: 'browser-extension', webstoreUrl: 'https://x' } }, - ], - 'p', - ) - expect(r).toHaveLength(2) - }) -}) diff --git a/packages/core/src/connectors/prerequisites.ts b/packages/core/src/connectors/prerequisites.ts deleted file mode 100644 index 4e3ebe1..0000000 --- a/packages/core/src/connectors/prerequisites.ts +++ /dev/null @@ -1,115 +0,0 @@ -import type { Prerequisite, SetupStep, SetupStatus, ExecCapability } from '@spool-lab/connector-sdk' -import type { ConnectorPackage } from './types.js' -import { valid, gte } from 'semver' - -function baseStep(p: Prerequisite, status: SetupStatus, extras: Partial = {}): SetupStep { - const step: SetupStep = { - id: p.id, - label: p.name, - kind: p.kind, - status, - install: p.install, - ...extras, - } - if (p.docsUrl !== undefined) step.docsUrl = p.docsUrl - if (p.minVersion !== undefined && step.minVersion === undefined) step.minVersion = p.minVersion - return step -} - -export class PrerequisiteChecker { - private cache = new Map() - private inFlight = new Map>() - - constructor(private exec: ExecCapability) {} - - getCached(packageId: string): SetupStep[] | undefined { - return this.cache.get(packageId) - } - - invalidate(packageId: string): void { - this.cache.delete(packageId) - } - - async check(pkg: ConnectorPackage): Promise { - const existing = this.inFlight.get(pkg.id) - if (existing) return existing - const promise = this.runCheck(pkg).finally(() => this.inFlight.delete(pkg.id)) - this.inFlight.set(pkg.id, promise) - return promise - } - - private async runCheck(pkg: ConnectorPackage): Promise { - const prereqs = pkg.prerequisites ?? [] - const steps: SetupStep[] = [] - const okIds = new Set() - - for (const p of prereqs) { - const unmet = (p.requires ?? []).filter(id => !okIds.has(id)) - if (unmet.length > 0) { - steps.push(baseStep(p, 'pending')) - continue - } - const step = await this.detectOne(p) - steps.push(step) - if (step.status === 'ok') okIds.add(p.id) - } - - this.cache.set(pkg.id, steps) - return steps - } - - private async detectOne(p: Prerequisite): Promise { - if (p.detect.type !== 'exec') { - return baseStep(p, 'error', { hint: `Unknown detect type: ${(p.detect as any).type}` }) - } - const timeout = p.detect.timeoutMs ?? 5000 - let result: { exitCode: number; stdout: string; stderr: string } - try { - result = await this.exec.run(p.detect.command, p.detect.args, { timeout }) - } catch (e) { - const msg = (e as Error).message ?? '' - // TODO: fragile substring sniff — the ExecCapability contract does not - // define a recognizable timeout signal (no err.code, err.name, or - // { timedOut } flag). See packages/connector-sdk/src/capabilities.ts - // near ExecCapability for a proposed fix. - if (/timeout/i.test(msg)) return baseStep(p, 'error', { hint: 'Detection timed out' }) - return baseStep(p, 'missing') - } - - if (p.detect.matchStdout) { - const re = new RegExp(p.detect.matchStdout) - return re.test(result.stdout + result.stderr) - ? baseStep(p, 'ok') - : baseStep(p, 'missing') - } - - if (p.detect.versionRegex && p.minVersion) { - const vm = new RegExp(p.detect.versionRegex).exec(result.stdout) - if (!vm || !vm[1]) { - return baseStep(p, 'error', { hint: 'Could not parse version' }) - } - const detectedVersion = vm[1] - // Normalize for semver: strip leading zeros from numeric segments so - // zero-padded CalVer (yt-dlp, Ubuntu, Postgres) like `2026.03.17` - // validates as `2026.3.17`. Ordering is preserved. - const normalizedDetected = stripLeadingZeros(detectedVersion) - const normalizedMin = stripLeadingZeros(p.minVersion) - if (!valid(normalizedDetected)) { - return baseStep(p, 'error', { hint: 'Could not parse detected version' }) - } - if (gte(normalizedDetected, normalizedMin)) { - return baseStep(p, 'ok', { detectedVersion }) - } - return baseStep(p, 'outdated', { - detectedVersion, - hint: `Detected ${detectedVersion}, requires ≥ ${p.minVersion}`, - }) - } - - return result.exitCode === 0 ? baseStep(p, 'ok') : baseStep(p, 'missing') - } -} - -function stripLeadingZeros(version: string): string { - return version.replace(/(^|\.)0+(\d)/g, '$1$2') -} diff --git a/packages/core/src/connectors/registry-fetch.test.ts b/packages/core/src/connectors/registry-fetch.test.ts deleted file mode 100644 index 610b703..0000000 --- a/packages/core/src/connectors/registry-fetch.test.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest' -import { fetchRegistry } from './registry-fetch.js' -import { mkdirSync, writeFileSync, rmSync } from 'node:fs' -import { join } from 'node:path' -import { tmpdir } from 'node:os' - -const testCacheDir = join(tmpdir(), `registry-fetch-test-${process.pid}`) - -const sampleRegistry = { - version: 1, - connectors: [ - { - name: '@spool-lab/connector-twitter-bookmarks', - id: 'twitter-bookmarks', - platform: 'twitter', - label: 'X Bookmarks', - description: 'Your saved tweets on X', - color: '#1DA1F2', - author: 'spool-lab', - category: 'social', - firstParty: true, - bundled: true, - npm: 'https://www.npmjs.com/package/@spool-lab/connector-twitter-bookmarks', - }, - ], -} - -beforeEach(() => { - rmSync(testCacheDir, { recursive: true, force: true }) - mkdirSync(testCacheDir, { recursive: true }) -}) - -describe('fetchRegistry', () => { - it('returns connectors on successful fetch', async () => { - const fetchFn = vi.fn().mockResolvedValue(new Response(JSON.stringify(sampleRegistry))) - const result = await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - expect(result).toEqual(sampleRegistry.connectors) - }) - - it('caches result with fetchedAt timestamp', async () => { - const fetchFn = vi.fn().mockResolvedValue(new Response(JSON.stringify(sampleRegistry))) - await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - - const cached = JSON.parse( - require('node:fs').readFileSync(join(testCacheDir, 'registry-cache.json'), 'utf-8'), - ) - expect(cached.connectors).toEqual(sampleRegistry.connectors) - expect(typeof cached.fetchedAt).toBe('number') - }) - - it('falls back to cache on fetch failure', async () => { - writeFileSync( - join(testCacheDir, 'registry-cache.json'), - JSON.stringify({ connectors: sampleRegistry.connectors, fetchedAt: Date.now() }), - ) - const fetchFn = vi.fn().mockRejectedValue(new Error('network error')) - const result = await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - expect(result).toEqual(sampleRegistry.connectors) - }) - - it('returns empty array when fetch fails and no cache', async () => { - const fetchFn = vi.fn().mockRejectedValue(new Error('network error')) - const result = await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - expect(result).toEqual([]) - }) - - it('returns empty array when fetch returns non-ok response and no cache', async () => { - const fetchFn = vi.fn().mockResolvedValue(new Response('Not Found', { status: 404 })) - const result = await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - expect(result).toEqual([]) - }) - - it('falls back to cache on non-ok response', async () => { - writeFileSync( - join(testCacheDir, 'registry-cache.json'), - JSON.stringify({ connectors: sampleRegistry.connectors, fetchedAt: Date.now() }), - ) - const fetchFn = vi.fn().mockResolvedValue(new Response('Server Error', { status: 500 })) - const result = await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - expect(result).toEqual(sampleRegistry.connectors) - }) - - it('uses AbortSignal with 3s timeout', async () => { - const fetchFn = vi.fn().mockResolvedValue(new Response(JSON.stringify(sampleRegistry))) - await fetchRegistry({ fetchFn, cacheDir: testCacheDir }) - const call = fetchFn.mock.calls[0] - expect(call[1]?.signal).toBeInstanceOf(AbortSignal) - }) -}) diff --git a/packages/core/src/connectors/registry-fetch.ts b/packages/core/src/connectors/registry-fetch.ts deleted file mode 100644 index b58dca0..0000000 --- a/packages/core/src/connectors/registry-fetch.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { readFileSync, writeFileSync, mkdirSync } from 'node:fs' -import { join } from 'node:path' -import { fileURLToPath } from 'node:url' - -const DEFAULT_REGISTRY_URL = - 'https://raw.githubusercontent.com/spool-lab/spool/main/packages/landing/public/registry.json' -const CACHE_FILE = 'registry-cache.json' -const TIMEOUT_MS = 3_000 - -export interface RegistryConnector { - name: string - id: string - platform: string - label: string - description: string - color: string - author: string - category: string - firstParty: boolean - bundled: boolean - npm: string -} - -interface FetchRegistryOpts { - fetchFn?: typeof fetch - cacheDir: string - /** Override source. HTTP(S) URL, file:// URL, or absolute filesystem path. */ - url?: string -} - -function isFileSource(url: string): boolean { - return url.startsWith('file://') || url.startsWith('/') -} - -function readLocalRegistry(url: string): RegistryConnector[] { - const path = url.startsWith('file://') ? fileURLToPath(url) : url - const raw = readFileSync(path, 'utf-8') - const data = JSON.parse(raw) as { connectors?: RegistryConnector[] } - return data.connectors ?? [] -} - -export async function fetchRegistry(opts: FetchRegistryOpts): Promise { - const { fetchFn = globalThis.fetch, cacheDir, url = DEFAULT_REGISTRY_URL } = opts - const cachePath = join(cacheDir, CACHE_FILE) - - if (isFileSource(url)) { - try { - return readLocalRegistry(url) - } catch { - return readCachedRegistry(cachePath) - } - } - - try { - const res = await fetchFn(url, { signal: AbortSignal.timeout(TIMEOUT_MS) }) - if (!res.ok) throw new Error(`HTTP ${res.status}`) - const data = (await res.json()) as { connectors?: RegistryConnector[] } - const connectors: RegistryConnector[] = data.connectors ?? [] - try { - mkdirSync(cacheDir, { recursive: true }) - writeFileSync(cachePath, JSON.stringify({ connectors, fetchedAt: Date.now() })) - } catch {} - return connectors - } catch { - return readCachedRegistry(cachePath) - } -} - -function readCachedRegistry(cachePath: string): RegistryConnector[] { - try { - const raw = readFileSync(cachePath, 'utf-8') - const cached = JSON.parse(raw) - return cached.connectors ?? [] - } catch { - return [] - } -} diff --git a/packages/core/src/connectors/registry.test.ts b/packages/core/src/connectors/registry.test.ts deleted file mode 100644 index 6d2a71a..0000000 --- a/packages/core/src/connectors/registry.test.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { describe, it, expect } from 'vitest' -import { ConnectorRegistry } from './registry.js' -import type { ConnectorPackage } from './types.js' - -function mkConnector(id: string) { - return { id, platform: 'p', label: id, description: '', color: '#000', ephemeral: false } as any -} - -function mkPkg(id: string, connectorIds: string[]): ConnectorPackage { - return { - id, - packageName: id, - rootDir: '/tmp', - connectors: connectorIds.map(mkConnector), - } as any -} - -describe('ConnectorRegistry.registerPackage', () => { - it('merges connectors when the same package id registers multiple times', () => { - const r = new ConnectorRegistry() - r.registerPackage(mkPkg('p1', ['a'])) - r.registerPackage(mkPkg('p1', ['b'])) - const merged = r.getPackage('p1') - expect(merged?.connectors.map(c => c.id).sort()).toEqual(['a', 'b']) - }) - - it('does not duplicate connectors with the same id', () => { - const r = new ConnectorRegistry() - r.registerPackage(mkPkg('p1', ['a'])) - r.registerPackage(mkPkg('p1', ['a'])) - expect(r.getPackage('p1')?.connectors.map(c => c.id)).toEqual(['a']) - }) - - it('keeps later package fields (e.g. prerequisites) on merge', () => { - const r = new ConnectorRegistry() - r.registerPackage(mkPkg('p1', ['a'])) - const p2 = { ...mkPkg('p1', ['b']), prerequisites: [{ id: 'req1' }] } as any - r.registerPackage(p2) - expect(r.getPackage('p1')?.prerequisites).toEqual([{ id: 'req1' }]) - }) - - it('accumulates three sub-connectors from a multi-connector package', () => { - const r = new ConnectorRegistry() - for (const id of ['x', 'y', 'z']) { - r.registerPackage(mkPkg('multi', [id])) - } - expect(r.getPackage('multi')?.connectors.map(c => c.id).sort()).toEqual(['x', 'y', 'z']) - }) -}) diff --git a/packages/core/src/connectors/registry.ts b/packages/core/src/connectors/registry.ts deleted file mode 100644 index 7cec668..0000000 --- a/packages/core/src/connectors/registry.ts +++ /dev/null @@ -1,68 +0,0 @@ -import type { Connector, ConnectorPackage } from './types.js' - -/** - * In-memory registry of available connectors. - * - * Connectors are registered at app startup. The registry is the single source - * of truth for "what connectors exist" — the scheduler and UI both read from it. - */ -export class ConnectorRegistry { - private connectors = new Map() - private packages = new Map() - - register(connector: Connector): void { - this.connectors.set(connector.id, connector) - } - - registerPackage(pkg: ConnectorPackage): void { - const existing = this.packages.get(pkg.id) - if (existing) { - // Multi-connector packages register once per sub-connector — merge the connectors list - const mergedConnectors = [...existing.connectors] - for (const c of pkg.connectors) { - if (!mergedConnectors.some(e => e.id === c.id)) { - mergedConnectors.push(c) - } - } - this.packages.set(pkg.id, { ...pkg, connectors: mergedConnectors }) - } else { - this.packages.set(pkg.id, pkg) - } - } - - getPackage(id: string): ConnectorPackage | undefined { - return this.packages.get(id) - } - - listPackages(): ConnectorPackage[] { - return Array.from(this.packages.values()) - } - - get(id: string): Connector { - const connector = this.connectors.get(id) - if (!connector) throw new Error(`Connector "${id}" not found`) - return connector - } - - has(id: string): boolean { - return this.connectors.has(id) - } - - remove(id: string): boolean { - return this.connectors.delete(id) - } - - clear(): void { - this.connectors.clear() - this.packages.clear() - } - - list(): Connector[] { - return Array.from(this.connectors.values()) - } - - /** List connectors for a specific platform. */ - listByPlatform(platform: string): Connector[] { - return this.list().filter(c => c.platform === platform) - } -} diff --git a/packages/core/src/connectors/sync-engine.effect.test.ts b/packages/core/src/connectors/sync-engine.effect.test.ts deleted file mode 100644 index 5dbcc51..0000000 --- a/packages/core/src/connectors/sync-engine.effect.test.ts +++ /dev/null @@ -1,180 +0,0 @@ -import { it } from '@effect/vitest' -import { describe, expect, beforeEach } from 'vitest' -import Database from 'better-sqlite3' -import { Deferred, Duration, Effect, Fiber, TestClock } from 'effect' -import { SyncEngine } from './sync-engine.js' -import type { Connector, FetchContext, PageResult, AuthStatus } from './types.js' -import { createTestDB, makeItem } from './test-helpers.js' - -// Regression tests for the Effect rewrite of fetchLoop / syncEphemeral. -// These exercise properties that were not observable in the old Promise- -// based implementation: interruptible sleep via Deferred.await racing, -// deadline-gated sleep via Clock.currentTimeMillis, and graceful abort -// before the first fetch. -// -// Driven by @effect/vitest + TestClock, so virtual time makes the -// assertions deterministic and the total wall-clock cost is near zero. - -function connectorFromHandler( - fetchPage: (ctx: FetchContext) => Promise, - overrides: Partial = {}, -): Connector { - return { - id: 'test-connector', - platform: 'test', - label: 'Test', - description: 'test', - color: '#000', - ephemeral: false, - async checkAuth(): Promise { return { ok: true } }, - fetchPage, - ...overrides, - } -} - -describe('SyncEngine — Effect.gen behavioral regressions', () => { - let db: InstanceType - let engine: SyncEngine - - beforeEach(() => { - db = createTestDB() - engine = new SyncEngine(db) - }) - - it.effect('signal.abort wakes a long inter-page sleep', () => - Effect.gen(function* () { - // Two-page connector with a nominal 60s inter-page delay. Under - // TestClock that sleep never actually elapses — the virtual clock - // does not advance until we tell it to. The abort listener resolves - // the internal cancel Deferred, which wakes the sleep race - // immediately and causes the loop top to return stopReason=cancelled - // on its next iteration. - const connector = connectorFromHandler(async (ctx) => { - if (ctx.cursor === null) { - return { items: [makeItem('#A')], nextCursor: 'c1' } - } - return { items: [makeItem('#B')], nextCursor: null } - }) - - const controller = new AbortController() - const fiber = yield* Effect.fork( - engine.syncEffect(connector, { - direction: 'forward', - delayMs: 60_000, // 60s nominal sleep between pages - signal: controller.signal, - }), - ) - - // Let the forked fiber run until it suspends inside the sleep race. - // Yielding alone is not enough because the fiber is in a sync→async - // chain (loadState → fetchPage Promise → upsert → sleep). A zero- - // duration TestClock adjust lets the scheduler drain ready fibers. - yield* TestClock.adjust(Duration.zero) - - // Abort from outside the Effect world. The bridge listener fires - // synchronously via Deferred.unsafeDone, resolving the cancel signal. - yield* Effect.sync(() => controller.abort()) - - const result = yield* Fiber.join(fiber) - - expect(result.stopReason).toBe('cancelled') - expect(result.added).toBeGreaterThanOrEqual(1) - }), - ) - - it.effect('maxMinutes deadline stops the loop with stopReason=timeout', () => - Effect.gen(function* () { - // An endless page stream. The only way out is the maxMinutes deadline. - const connector = connectorFromHandler(async () => ({ - items: [makeItem(`#${Math.random()}`)], - nextCursor: 'more', - })) - - const fiber = yield* Effect.fork( - engine.syncEffect(connector, { - direction: 'forward', - delayMs: 60_000, - maxMinutes: 1, - }), - ) - - // Advance virtual time past the 1-minute deadline. All suspended - // sleeps resolve; the loop top reads Clock.currentTimeMillis, sees - // it >= deadline, and returns stopReason=timeout. - yield* TestClock.adjust(Duration.minutes(2)) - - const result = yield* Fiber.join(fiber) - - expect(result.stopReason).toBe('timeout') - expect(result.added).toBeGreaterThanOrEqual(1) - }), - ) - - it.effect('aborting before sync starts returns cancelled without fetching', () => - Effect.gen(function* () { - let fetchCalls = 0 - const connector = connectorFromHandler(async () => { - fetchCalls++ - return { items: [makeItem('#A')], nextCursor: null } - }) - - const controller = new AbortController() - controller.abort() - - const result = yield* engine.syncEffect(connector, { - direction: 'forward', - delayMs: 0, - signal: controller.signal, - }) - - expect(result.stopReason).toBe('cancelled') - expect(fetchCalls).toBe(0) - }), - ) - - it.effect('passes an AbortSignal via FetchContext that fires when cancel Deferred resolves', () => - Effect.gen(function* () { - let receivedSignal: AbortSignal | undefined - let pageCount = 0 - - const connector = connectorFromHandler(async (ctx) => { - pageCount++ - receivedSignal = ctx.signal - if (pageCount === 1) { - return { items: [makeItem('#sig-1')], nextCursor: 'c1' } - } - // Second page: block until signal fires, then return data + more cursor - // so the engine would continue if not for the cancel check at loop top. - await new Promise((resolve) => { - if (ctx.signal?.aborted) return resolve() - ctx.signal?.addEventListener('abort', () => resolve(), { once: true }) - }) - return { items: [makeItem('#sig-2')], nextCursor: 'c2' } - }) - - const cancel = yield* Deferred.make() - const fiber = yield* Effect.fork( - engine.syncEffect(connector, { - direction: 'forward', - delayMs: 0, - cancel, - }), - ) - - // Let the fiber reach the second fetchPage (page 1 returns immediately) - yield* TestClock.adjust(Duration.zero) - - expect(receivedSignal).toBeDefined() - expect(receivedSignal!.aborted).toBe(false) - - // Fire cancel Deferred — unblocks the second fetchPage AND sets the - // cancel flag so the loop returns 'cancelled' on its next iteration. - yield* Deferred.succeed(cancel, undefined) - - const result = yield* Fiber.join(fiber) - - expect(result.stopReason).toBe('cancelled') - expect(receivedSignal!.aborted).toBe(true) - }), - ) -}) diff --git a/packages/core/src/connectors/sync-engine.observability.test.ts b/packages/core/src/connectors/sync-engine.observability.test.ts deleted file mode 100644 index 956b35a..0000000 --- a/packages/core/src/connectors/sync-engine.observability.test.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { describe, it, expect, beforeEach } from 'vitest' -import Database from 'better-sqlite3' -import { Effect, Logger, Option, Tracer } from 'effect' -import { SyncEngine } from './sync-engine.js' -import type { Connector, FetchContext, PageResult, AuthStatus } from './types.js' -import { createTestDB, makeItem } from './test-helpers.js' - -// ── Observability regressions ────────────────────────────────────────────── -// Verifies the Logger + Tracer contract added in the Effect rewrite: -// 1. sync completion emits a structured INFO log via Effect.logInfo -// 2. sync.cycle / sync.forward / sync.fetchPage / sync.upsert spans are -// emitted with the expected names and attributes -// -// These tests use Effect's Logger.replace and Tracer layers to intercept -// without touching production code. Both run via `engine.syncEffect(...)`, -// the Effect-native entry point, instead of the Promise wrapper. - -function scripted(pages: PageResult[]): Connector { - let i = 0 - return { - id: 'test-connector', - platform: 'test', - label: 'Test', - description: 'test', - color: '#000', - ephemeral: false, - async checkAuth(): Promise { return { ok: true } }, - async fetchPage(_ctx: FetchContext) { - const page = pages[i] ?? { items: [], nextCursor: null } - i++ - return page - }, - } -} - -describe('SyncEngine — Observability', () => { - let db: InstanceType - let engine: SyncEngine - - beforeEach(() => { - db = createTestDB() - engine = new SyncEngine(db) - }) - - // ── Logger.replace ────────────────────────────────────────────────────── - - it('emits a structured INFO "done" log on successful sync', async () => { - type CapturedLog = { level: string; message: string } - const captured: CapturedLog[] = [] - const testLogger = Logger.make(({ logLevel, message }) => { - const text = Array.isArray(message) ? message.map(String).join(' ') : String(message) - captured.push({ level: logLevel.label, message: text }) - }) - const loggerLayer = Logger.replace(Logger.defaultLogger, testLogger) - - const connector = scripted([ - { items: [makeItem('#1'), makeItem('#2')], nextCursor: null }, - ]) - - const program = engine.syncEffect(connector, { direction: 'forward', delayMs: 0 }) - await Effect.runPromise(program.pipe(Effect.provide(loggerLayer))) - - const doneLog = captured.find( - (l) => l.level === 'INFO' && l.message.includes('done:') && l.message.includes('test-connector'), - ) - expect(doneLog, `expected a "done:" INFO log, got: ${JSON.stringify(captured)}`).toBeDefined() - expect(doneLog!.message).toContain('added=2') - expect(doneLog!.message).toMatch(/reason=end_of_data|reason=caught_up|reason=reached_since/) - }) - - it('emits an ERROR log when fetchPage fails', async () => { - type CapturedLog = { level: string; message: string } - const captured: CapturedLog[] = [] - const testLogger = Logger.make(({ logLevel, message }) => { - const text = Array.isArray(message) ? message.map(String).join(' ') : String(message) - captured.push({ level: logLevel.label, message: text }) - }) - const loggerLayer = Logger.replace(Logger.defaultLogger, testLogger) - - const connector: Connector = { - id: 'test-connector', - platform: 'test', - label: 'Test', - description: 'test', - color: '#000', - ephemeral: false, - async checkAuth(): Promise { return { ok: true } }, - async fetchPage(): Promise { - throw new Error('boom from test') - }, - } - - const program = engine.syncEffect(connector, { direction: 'forward', delayMs: 0 }) - await Effect.runPromise(program.pipe(Effect.provide(loggerLayer))) - - const errLog = captured.find( - (l) => l.level === 'ERROR' && l.message.includes('boom from test'), - ) - expect(errLog, `expected an ERROR log containing the thrown message, got: ${JSON.stringify(captured)}`).toBeDefined() - }) - - // ── Custom Tracer ────────────────────────────────────────────────────── - - interface CapturedSpan { - name: string - attributes: Record - parentName: string | null - } - - function collectingTracer(collected: CapturedSpan[]): Tracer.Tracer { - return Tracer.make({ - span(name, parent, context, links, startTime, kind, options) { - const attrs = new Map() - // withSpan passes initial attributes via the options parameter; - // later calls to .attribute() also update the map. - if (options?.attributes) { - for (const [k, v] of Object.entries(options.attributes)) attrs.set(k, v) - } - const parentName = Option.isSome(parent) - ? parent.value._tag === 'Span' - ? parent.value.name - : parent.value.spanId - : null - const span: Tracer.Span = { - _tag: 'Span', - name, - spanId: `test-${collected.length}`, - traceId: 'test-trace', - parent, - context, - status: { _tag: 'Started', startTime }, - attributes: attrs, - links, - sampled: true, - kind, - attribute(key, value) { - attrs.set(key, value) - }, - event() {}, - addLinks() {}, - end() { - collected.push({ - name, - attributes: Object.fromEntries(attrs), - parentName, - }) - }, - } - return span - }, - context(f) { - return f() - }, - }) - } - - it('emits sync.cycle / sync.forward / sync.fetchPage / sync.upsert spans with correct attributes', async () => { - const spans: CapturedSpan[] = [] - const tracer = collectingTracer(spans) - - const connector = scripted([ - { items: [makeItem('#a'), makeItem('#b')], nextCursor: 'c1' }, - { items: [makeItem('#c')], nextCursor: null }, - ]) - - const program = engine.syncEffect(connector, { direction: 'forward', delayMs: 0 }) - await Effect.runPromise(program.pipe(Effect.withTracer(tracer))) - - const names = spans.map((s) => s.name) - expect(names).toContain('sync.cycle') - expect(names).toContain('sync.forward') - expect(names.filter((n) => n === 'sync.fetchPage')).toHaveLength(2) - expect(names.filter((n) => n === 'sync.upsert')).toHaveLength(2) - - const cycle = spans.find((s) => s.name === 'sync.cycle')! - expect(cycle.attributes['connector.id']).toBe('test-connector') - expect(cycle.attributes['sync.direction']).toBe('forward') - expect(cycle.parentName).toBe(null) - - const forward = spans.find((s) => s.name === 'sync.forward')! - expect(forward.parentName).toBe('sync.cycle') - - const fetchPages = spans.filter((s) => s.name === 'sync.fetchPage') - expect(fetchPages[0].attributes['connector.id']).toBe('test-connector') - expect(fetchPages[0].attributes['sync.phase']).toBe('forward') - expect(fetchPages[0].attributes['sync.page']).toBe(1) - expect(fetchPages[1].attributes['sync.page']).toBe(2) - // fetchPage spans should nest under sync.forward - expect(fetchPages[0].parentName).toBe('sync.forward') - - const upserts = spans.filter((s) => s.name === 'sync.upsert') - expect(upserts[0].attributes['items.count']).toBe(2) - expect(upserts[1].attributes['items.count']).toBe(1) - }) - - it('does not emit sync.backfill when tailComplete is true', async () => { - const spans: CapturedSpan[] = [] - const tracer = collectingTracer(spans) - - // First sync sets up tailComplete via a single-page forward that hits end_of_data. - // Then we inspect the second sync, which should only emit sync.forward (no backfill). - const connector = scripted([ - { items: [makeItem('#x')], nextCursor: null }, - ]) - await Effect.runPromise( - engine.syncEffect(connector, { direction: 'both', delayMs: 0 }).pipe( - Effect.withTracer(collectingTracer([])), - ), - ) - - // Second cycle — this is the one we observe - const connector2 = scripted([ - { items: [makeItem('#x')], nextCursor: null }, - ]) - await Effect.runPromise( - engine.syncEffect(connector2, { direction: 'both', delayMs: 0 }).pipe( - Effect.withTracer(tracer), - ), - ) - - const names = spans.map((s) => s.name) - expect(names).toContain('sync.forward') - expect(names).not.toContain('sync.backfill') - }) -}) diff --git a/packages/core/src/connectors/sync-engine.test.ts b/packages/core/src/connectors/sync-engine.test.ts deleted file mode 100644 index cf0d972..0000000 --- a/packages/core/src/connectors/sync-engine.test.ts +++ /dev/null @@ -1,694 +0,0 @@ -import { describe, it, expect, beforeEach } from 'vitest' -import Database from 'better-sqlite3' -import { SyncEngine, loadSyncState, deleteConnectorItems } from './sync-engine.js' -import { SyncError, SyncErrorCode } from './types.js' -import type { Connector, FetchContext, PageResult, AuthStatus } from './types.js' -import { createTestDB, makeItem, setState, countCaptures } from './test-helpers.js' - -// ── Test Helpers ──────────────────────────────────────────────────────────── - -type FetchPageFn = (ctx: FetchContext) => Promise - -function createConnector(fetchPageFn: FetchPageFn, overrides?: Partial): Connector { - return { - id: 'test-connector', - platform: 'test', - label: 'Test', - description: 'test connector', - color: '#000', - ephemeral: false, - async checkAuth(): Promise { return { ok: true } }, - fetchPage: fetchPageFn, - ...overrides, - } -} - -function createScriptedConnector( - pages: PageResult[], - opts?: { - id?: string - ephemeral?: boolean - onFetch?: (ctx: FetchContext, callIndex: number) => void - }, -): Connector { - let callIndex = 0 - return createConnector( - async (ctx) => { - opts?.onFetch?.(ctx, callIndex) - const page = pages[callIndex] ?? { items: [], nextCursor: null } - callIndex++ - return page - }, - { id: opts?.id ?? 'test-connector', ephemeral: opts?.ephemeral ?? false }, - ) -} - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('SyncEngine contract', () => { - let db: InstanceType - let engine: SyncEngine - - beforeEach(() => { - db = createTestDB() - engine = new SyncEngine(db) - }) - - // ── Tail-side ─────────────────────────────────────────────────────────── - - describe('Tail-side', () => { - it('initial forward: fetches from null, sets tailCursor for backfill handoff', async () => { - const connector = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: 'cur-A' }, - { items: [makeItem('#99')], nextCursor: null }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - const state = loadSyncState(db, 'test-connector') - - expect(state.tailCursor).toBe('cur-A') - expect(state.headItemId).toBe('#100') - expect(state.headCursor).toBeNull() - }) - - it('backfill: resumes from tailCursor and sets tailComplete on end', async () => { - setState(db, { - connectorId: 'test-connector', - tailCursor: 'backfill-start', - headItemId: '#100', - }) - - const calls: FetchContext[] = [] - const connector = createScriptedConnector([ - { items: [makeItem('#50')], nextCursor: 'deep' }, - { items: [makeItem('#49')], nextCursor: null }, - ], { onFetch: (ctx) => calls.push({ ...ctx }) }) - - await engine.sync(connector, { direction: 'backfill', delayMs: 0 }) - const state = loadSyncState(db, 'test-connector') - - expect(calls[0].cursor).toBe('backfill-start') - expect(calls[0].phase).toBe('backfill') - expect(calls[0].sinceItemId).toBeNull() - expect(state.tailComplete).toBe(true) - }) - - it('forward + backfill interleave: 3 forward cycles + 2 backfill cycles maintain state continuity', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount === 1) return { items: [makeItem('#100')], nextCursor: 'f1' } - if (callCount === 2) return { items: [makeItem('#99')], nextCursor: null } - if (callCount === 3) return { items: [makeItem('#50')], nextCursor: 'b1' } - if (callCount === 4) return { items: [makeItem('#49')], nextCursor: null } - if (callCount === 5) return { items: [makeItem('#101'), makeItem('#100')], nextCursor: 'f2' } - if (callCount === 6) return { items: [makeItem('#102'), makeItem('#101')], nextCursor: 'f3' } - return { items: [], nextCursor: null } - }) - - // Cycle 1: forward + backfill - const r1 = await engine.sync(connector, { direction: 'both', delayMs: 0 }) - const s1 = loadSyncState(db, 'test-connector') - expect(r1.added).toBe(4) - expect(s1.headItemId).toBe('#100') - expect(s1.tailComplete).toBe(true) - - // Cycle 2: forward only (backfill complete) - const r2 = await engine.sync(connector, { direction: 'both', delayMs: 0 }) - const s2 = loadSyncState(db, 'test-connector') - expect(r2.added).toBe(1) - expect(s2.headItemId).toBe('#101') - expect(r2.stopReason).toBe('reached_since') - - // Cycle 3: forward again - const r3 = await engine.sync(connector, { direction: 'both', delayMs: 0 }) - const s3 = loadSyncState(db, 'test-connector') - expect(r3.added).toBe(1) - expect(s3.headItemId).toBe('#102') - - expect(countCaptures(db)).toBe(6) - expect(s3.totalSynced).toBe(6) - }) - - it('ephemeral: deleteConnectorItems clears old data, full replace each sync', async () => { - const connector1 = createScriptedConnector([ - { items: [makeItem('#A'), makeItem('#B')], nextCursor: null }, - ], { ephemeral: true }) - - await engine.sync(connector1, { direction: 'forward', delayMs: 0 }) - expect(countCaptures(db)).toBe(2) - - const connector2 = createScriptedConnector([ - { items: [makeItem('#C'), makeItem('#D'), makeItem('#E')], nextCursor: null }, - ], { ephemeral: true }) - - const r2 = await engine.sync(connector2, { direction: 'forward', delayMs: 0 }) - expect(countCaptures(db)).toBe(3) - expect(r2.added).toBe(3) - expect(r2.total).toBe(3) - }) - - it('ephemeral re-sync drops stars on captures that are being wiped', async () => { - const connector1 = createScriptedConnector([ - { items: [makeItem('#A')], nextCursor: null }, - ], { ephemeral: true }) - await engine.sync(connector1, { direction: 'forward', delayMs: 0 }) - - // Star the capture that was just synced - const capUuid = (db.prepare('SELECT capture_uuid FROM captures').get() as { capture_uuid: string }).capture_uuid - db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('capture', ?)").run(capUuid) - expect(db.prepare("SELECT COUNT(*) AS n FROM stars WHERE item_type='capture'").get()).toEqual({ n: 1 }) - - // Re-sync replaces captures with new UUIDs → star on old UUID must go - const connector2 = createScriptedConnector([ - { items: [makeItem('#B')], nextCursor: null }, - ], { ephemeral: true }) - await engine.sync(connector2, { direction: 'forward', delayMs: 0 }) - - expect(db.prepare("SELECT COUNT(*) AS n FROM stars WHERE item_type='capture'").get()).toEqual({ n: 0 }) - }) - - it('deleteConnectorItems (uninstall path) drops stars on wiped captures', async () => { - const connector = createScriptedConnector([ - { items: [makeItem('#X')], nextCursor: null }, - ]) - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - const capUuid = (db.prepare('SELECT capture_uuid FROM captures').get() as { capture_uuid: string }).capture_uuid - db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('capture', ?)").run(capUuid) - - // Simulate uninstall: both app and CLI call deleteConnectorItems directly - deleteConnectorItems(db, 'test-connector') - - expect(db.prepare("SELECT COUNT(*) AS n FROM captures").get()).toEqual({ n: 0 }) - expect(db.prepare("SELECT COUNT(*) AS n FROM stars").get()).toEqual({ n: 0 }) - }) - }) - - // ── Head-side ─────────────────────────────────────────────────────────── - - describe('Head-side', () => { - it('passes sinceItemId and phase via FetchContext on forward', async () => { - const calls: FetchContext[] = [] - const connector = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: null }, - ], { onFetch: (ctx) => calls.push({ ...ctx }) }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - - expect(calls[0].phase).toBe('forward') - expect(calls[0].sinceItemId).toBeNull() - expect(calls[0].cursor).toBeNull() - }) - - it('passes sinceItemId from previous forward cycle', async () => { - const calls: FetchContext[] = [] - let callCount = 0 - const connector = createConnector(async (ctx) => { - calls.push({ ...ctx }) - callCount++ - if (callCount === 1) return { items: [makeItem('#200'), makeItem('#199')], nextCursor: null } - return { items: [makeItem('#201'), makeItem('#200')], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - - expect(calls[1].sinceItemId).toBe('#200') - }) - - it('passes null sinceItemId during backfill', async () => { - const calls: FetchContext[] = [] - const connector = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: null }, - ], { onFetch: (ctx) => calls.push({ ...ctx }) }) - - await engine.sync(connector, { direction: 'backfill', delayMs: 0 }) - - expect(calls[0].phase).toBe('backfill') - expect(calls[0].sinceItemId).toBeNull() - }) - - it('early-exit: stops forward when page contains sinceItemId (reached_since)', async () => { - setState(db, { - connectorId: 'test-connector', - headItemId: '#200', - tailCursor: 'some-tail', - }) - - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - return { items: [makeItem('#202'), makeItem('#201'), makeItem('#200')], nextCursor: 'more' } - }) - - const result = await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(result.stopReason).toBe('reached_since') - expect(callCount).toBe(1) - }) - - it('headItemId advances monotonically across forward cycles', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount === 1) return { items: [makeItem('#100'), makeItem('#99')], nextCursor: null } - if (callCount === 2) return { items: [makeItem('#102'), makeItem('#101'), makeItem('#100')], nextCursor: 'more' } - return { items: [], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headItemId).toBe('#100') - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headItemId).toBe('#102') - }) - - it('stale-page fallback when no anchor exists', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - return { items: [makeItem('#100')], nextCursor: `cur${callCount}` } - }) - - const result = await engine.sync(connector, { direction: 'forward', stalePageLimit: 3, delayMs: 0 }) - expect(result.stopReason).toBe('caught_up') - expect(callCount).toBe(4) - }) - - it('anchor invalidation: clears headItemId when forward completes without hitting anchor', async () => { - setState(db, { - connectorId: 'test-connector', - headItemId: '#200', - tailCursor: 'some-tail', - }) - - const connector = createScriptedConnector([ - { items: [makeItem('#300')], nextCursor: null }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headItemId).toBeNull() - }) - - it('anchor preserved on timeout (incomplete forward cannot judge validity)', async () => { - setState(db, { - connectorId: 'test-connector', - headItemId: '#200', - headCursor: 'resume-cur', - }) - - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - return { items: [makeItem(`#${300 + callCount}`)], nextCursor: `cur${callCount}` } - }) - - await engine.sync(connector, { direction: 'forward', maxMinutes: 0.0001, delayMs: 10 }) - expect(loadSyncState(db, 'test-connector').headItemId).toBe('#200') - }) - - it('anchor preserved when reached_since — then updated to page-0 first item', async () => { - setState(db, { - connectorId: 'test-connector', - headItemId: '#200', - tailCursor: 'some-tail', - }) - - const connector = createScriptedConnector([ - { items: [makeItem('#201'), makeItem('#200')], nextCursor: 'more' }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headItemId).toBe('#201') - }) - - describe('headCursor resume', () => { - it('clears headCursor on normal forward completion', async () => { - const connector = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: null }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headCursor).toBeNull() - }) - - it('preserves headCursor on timeout for resume', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - return { items: [makeItem(`#${100 + callCount}`)], nextCursor: `cur${callCount}` } - }) - - await engine.sync(connector, { direction: 'forward', maxMinutes: 0.0001, delayMs: 10 }) - expect(loadSyncState(db, 'test-connector').headCursor).not.toBeNull() - }) - - it('resumes forward from headCursor instead of null', async () => { - setState(db, { - connectorId: 'test-connector', - headItemId: '#200', - headCursor: 'resume-from-here', - }) - - const calls: FetchContext[] = [] - const connector = createScriptedConnector([ - { items: [makeItem('#198'), makeItem('#200')], nextCursor: null }, - ], { onFetch: (ctx) => calls.push({ ...ctx }) }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(calls[0].cursor).toBe('resume-from-here') - }) - - it('does not update headItemId when resuming from headCursor', async () => { - setState(db, { - connectorId: 'test-connector', - headItemId: '#200', - headCursor: 'resume-cur', - tailCursor: 'tail-cur', - }) - - const connector = createScriptedConnector([ - { items: [makeItem('#195'), makeItem('#200')], nextCursor: null }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headItemId).toBe('#200') - }) - - it('full resume lifecycle: interrupt → resume → complete → clear', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount <= 3) return { items: [makeItem(`#${100 + callCount}`)], nextCursor: `cur${callCount}` } - if (callCount === 4) return { items: [makeItem('#104'), makeItem('#101')], nextCursor: null } - return { items: [], nextCursor: null } - }) - - // Cycle A: timeout after a few pages - await engine.sync(connector, { direction: 'forward', maxMinutes: 0.0001, delayMs: 10 }) - const afterA = loadSyncState(db, 'test-connector') - expect(afterA.headCursor).not.toBeNull() - expect(afterA.headItemId).toBe('#101') - - // Cycle B: resume from headCursor, completes normally - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - const afterB = loadSyncState(db, 'test-connector') - expect(afterB.headCursor).toBeNull() - }) - }) - - it('forward does NOT overwrite tailCursor on subsequent cycles', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount === 1) return { items: [makeItem('#100')], nextCursor: 'cur1' } - if (callCount === 2) return { items: [makeItem('#99')], nextCursor: null } - if (callCount === 3) return { items: [makeItem('#50')], nextCursor: 'deep-cursor' } - if (callCount === 4) return { items: [makeItem('#49')], nextCursor: null } - if (callCount === 5) return { items: [makeItem('#101'), makeItem('#100')], nextCursor: null } - return { items: [], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'both', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').tailCursor).toBe('deep-cursor') - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').tailCursor).toBe('deep-cursor') - }) - - it('initial sync handoff: forward sets tailCursor only when tailCursor and headCursor are both null', async () => { - const connector = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: 'handoff-cursor' }, - { items: [makeItem('#99')], nextCursor: null }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - const state = loadSyncState(db, 'test-connector') - expect(state.tailCursor).toBe('handoff-cursor') - }) - }) - - // ── Error handling ────────────────────────────────────────────────────── - - describe('Error handling', () => { - it('sets lastErrorAt, consecutiveErrors, and error code on failure', async () => { - const connector = createConnector(async () => { - throw new SyncError(SyncErrorCode.NETWORK_OFFLINE, 'no network') - }) - - const result = await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - const state = loadSyncState(db, 'test-connector') - - expect(state.lastErrorAt).not.toBeNull() - expect(state.consecutiveErrors).toBe(1) - expect(state.lastErrorCode).toBe('NETWORK_OFFLINE') - expect(result.error).toBeDefined() - expect(result.stopReason).toBe('error: NETWORK_OFFLINE') - }) - - it('clears error state on successful sync', async () => { - setState(db, { - connectorId: 'test-connector', - consecutiveErrors: 3, - lastErrorAt: '2026-01-01T00:00:00Z', - lastErrorCode: SyncErrorCode.NETWORK_OFFLINE, - lastErrorMessage: 'was offline', - }) - - const connector = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: null }, - ]) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - const state = loadSyncState(db, 'test-connector') - - expect(state.lastErrorAt).toBeNull() - expect(state.consecutiveErrors).toBe(0) - expect(state.lastErrorCode).toBeNull() - }) - - it('increments consecutiveErrors on repeated failures', async () => { - const connector = createConnector(async () => { - throw new Error('fail') - }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').consecutiveErrors).toBe(1) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').consecutiveErrors).toBe(2) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').consecutiveErrors).toBe(3) - }) - - it('partial success: items added before error are persisted', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount === 1) return { items: [makeItem('#100')], nextCursor: 'next' } - throw new SyncError(SyncErrorCode.API_SERVER_ERROR, 'server error on page 2') - }) - - const result = await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - - expect(result.added).toBe(1) - expect(result.error).toBeDefined() - expect(countCaptures(db)).toBe(1) - expect(loadSyncState(db, 'test-connector').consecutiveErrors).toBe(1) - }) - - it('wraps non-SyncError in CONNECTOR_ERROR', async () => { - const connector = createConnector(async () => { - throw new TypeError('unexpected null') - }) - - const result = await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(result.error?.code).toBe('CONNECTOR_ERROR') - }) - }) - - // ── Cancellation ──────────────────────────────────────────────────────── - - describe('Cancellation', () => { - it('signal.abort() stops at page boundary with state saved', async () => { - const controller = new AbortController() - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount === 2) controller.abort() - return { items: [makeItem(`#${callCount}`)], nextCursor: `cur${callCount}` } - }) - - const result = await engine.sync(connector, { - direction: 'forward', - delayMs: 0, - signal: controller.signal, - }) - - expect(result.stopReason).toBe('cancelled') - expect(result.added).toBeGreaterThanOrEqual(1) - const state = loadSyncState(db, 'test-connector') - expect(state.headCursor).not.toBeNull() - }) - - it('can resume forward after cancellation', async () => { - const controller = new AbortController() - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount === 1) { - const result = { items: [makeItem('#100')], nextCursor: 'cur1' } - controller.abort() - return result - } - if (callCount === 2) return { items: [makeItem('#99'), makeItem('#100')], nextCursor: null } - return { items: [], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0, signal: controller.signal }) - const afterCancel = loadSyncState(db, 'test-connector') - expect(afterCancel.headCursor).not.toBeNull() - - const result2 = await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(loadSyncState(db, 'test-connector').headCursor).toBeNull() - }) - }) - - // ── Checkpoint ────────────────────────────────────────────────────────── - - describe('Checkpoint', () => { - it('persists progress across 30 pages', async () => { - const totalPages = 30 - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount <= totalPages) { - return { items: [makeItem(`#${callCount}`)], nextCursor: callCount < totalPages ? `cur${callCount}` : null } - } - return { items: [], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - - const state = loadSyncState(db, 'test-connector') - expect(state.totalSynced).toBe(30) - expect(countCaptures(db)).toBe(30) - }) - }) - - // ── Dedup ─────────────────────────────────────────────────────────────── - - describe('Dedup', () => { - it('same platformId twice does not produce duplicate rows', async () => { - const connector = createScriptedConnector([ - { items: [makeItem('#100'), makeItem('#100')], nextCursor: null }, - ]) - - const result = await engine.sync(connector, { direction: 'forward', delayMs: 0 }) - expect(result.added).toBe(1) - expect(countCaptures(db)).toBe(1) - }) - - it('same platformId across syncs updates rather than duplicates', async () => { - const connector1 = createScriptedConnector([ - { items: [makeItem('#100')], nextCursor: null }, - ]) - await engine.sync(connector1, { direction: 'forward', delayMs: 0 }) - - let callCount = 0 - const connector2 = createConnector(async () => { - callCount++ - if (callCount === 1) return { items: [makeItem('#100')], nextCursor: null } - return { items: [], nextCursor: null } - }) - - const result = await engine.sync(connector2, { direction: 'forward', delayMs: 0 }) - expect(result.added).toBe(0) - expect(countCaptures(db)).toBe(1) - }) - }) - - // ── Direction routing ─────────────────────────────────────────────────── - - describe('Direction routing', () => { - it('direction=both runs forward then backfill', async () => { - const phases: string[] = [] - const connector = createConnector(async (ctx) => { - phases.push(ctx.phase) - return { items: [makeItem(`#${phases.length}`)], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'both', delayMs: 0 }) - expect(phases).toEqual(['forward', 'backfill']) - }) - - it('direction=both skips backfill when tailComplete', async () => { - setState(db, { - connectorId: 'test-connector', - tailComplete: true, - headItemId: '#100', - }) - - const phases: string[] = [] - const connector = createConnector(async (ctx) => { - phases.push(ctx.phase) - return { items: [makeItem('#101'), makeItem('#100')], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'both', delayMs: 0 }) - expect(phases).toEqual(['forward']) - }) - - it('direction=both skips backfill when forward errors', async () => { - let callCount = 0 - const connector = createConnector(async (ctx) => { - callCount++ - if (ctx.phase === 'forward') throw new Error('fail') - return { items: [makeItem('#1')], nextCursor: null } - }) - - await engine.sync(connector, { direction: 'both', delayMs: 0 }) - expect(callCount).toBe(1) - }) - }) - - // ── Progress callback ────────────────────────────────────────────────── - - describe('Progress callback', () => { - it('calls onProgress per page and once at completion', async () => { - const progress: Array<{ page: number; running: boolean }> = [] - const connector = createScriptedConnector([ - { items: [makeItem('#1')], nextCursor: 'c1' }, - { items: [makeItem('#2')], nextCursor: null }, - ]) - - await engine.sync(connector, { - direction: 'forward', - delayMs: 0, - onProgress: (p) => progress.push({ page: p.page, running: p.running }), - }) - - expect(progress.length).toBe(3) - expect(progress[0]).toEqual({ page: 1, running: true }) - expect(progress[1]).toEqual({ page: 2, running: true }) - expect(progress[2].running).toBe(false) - }) - }) - - // ── Timeout ───────────────────────────────────────────────────────────── - - describe('Timeout', () => { - it('maxMinutes=0 means unlimited (no timeout)', async () => { - let callCount = 0 - const connector = createConnector(async () => { - callCount++ - if (callCount <= 5) return { items: [makeItem(`#${callCount}`)], nextCursor: `c${callCount}` } - return { items: [makeItem(`#${callCount}`)], nextCursor: null } - }) - - const result = await engine.sync(connector, { direction: 'forward', maxMinutes: 0, delayMs: 0 }) - expect(result.stopReason).toBe('end_of_data') - expect(callCount).toBe(6) - }) - }) -}) diff --git a/packages/core/src/connectors/sync-engine.ts b/packages/core/src/connectors/sync-engine.ts deleted file mode 100644 index 3176c05..0000000 --- a/packages/core/src/connectors/sync-engine.ts +++ /dev/null @@ -1,804 +0,0 @@ -import type Database from 'better-sqlite3' -import { randomUUID } from 'node:crypto' -import { Clock, Deferred, Duration, Effect, Scope } from 'effect' -import type { - Connector, - FetchContext, - SyncState, - SyncOptions, - ConnectorSyncResult, - SyncProgress, -} from './types.js' -import { SyncError, SyncErrorCode, DEFAULT_SCHEDULE } from './types.js' -import type { CapturedItem } from './types.js' - -/** - * The internal "please stop" signal. Resolved once → the loop returns - * gracefully with stopReason='cancelled' on its next iteration, and any - * in-flight sleep is interrupted via Effect.race below. Driven by - * `opts.signal` (via `bridgeAbortSignal` at the top of `syncEffect`) or - * by direct resolution in tests. - */ -type CancelSignal = Deferred.Deferred - -/** - * Bridge an optional AbortSignal into a CancelSignal registered in the - * ambient Scope. The listener is cleaned up by the scope's finalizer. - */ -function bridgeAbortSignal( - signal: AbortSignal | undefined, - cancel: CancelSignal, -): Effect.Effect { - if (!signal) return Effect.void - if (signal.aborted) return Deferred.succeed(cancel, undefined).pipe(Effect.asVoid) - return Effect.acquireRelease( - Effect.sync(() => { - const handler = () => { - Deferred.unsafeDone(cancel, Effect.void) - } - signal.addEventListener('abort', handler, { once: true }) - return handler - }), - (handler) => Effect.sync(() => signal.removeEventListener('abort', handler)), - ).pipe(Effect.asVoid) -} - -function cancelDeferredToSignal( - cancel: Deferred.Deferred, -): Effect.Effect { - return Effect.sync(() => { - const controller = new AbortController() - Effect.runFork( - Effect.gen(function* () { - yield* Deferred.await(cancel) - controller.abort() - }), - ) - return controller.signal - }) -} - -/** - * Sleep `ms` milliseconds but wake immediately if the cancel signal fires. - * The loop top still polls the cancel signal for a graceful - * `stopReason='cancelled'` return; this racer just cuts the sleep short. - */ -function interruptibleSleep(ms: number, cancel: CancelSignal): Effect.Effect { - return Effect.race( - Effect.sleep(Duration.millis(ms)), - Deferred.await(cancel), - ).pipe(Effect.asVoid) -} - -const nowIso: Effect.Effect = Effect.map( - Clock.currentTimeMillis, - (ms) => new Date(ms).toISOString(), -) - -// ── Sync State Persistence ────────────────────────────────────────────────── - -export function loadSyncState(db: Database.Database, connectorId: string): SyncState { - const row = db.prepare('SELECT * FROM connector_sync_state WHERE connector_id = ?') - .get(connectorId) as Record | undefined - - if (!row) { - return { - connectorId, - headCursor: null, - headItemId: null, - tailCursor: null, - tailComplete: false, - lastForwardSyncAt: null, - lastBackfillSyncAt: null, - totalSynced: 0, - consecutiveErrors: 0, - enabled: false, - configJson: {}, - lastErrorAt: null, - lastErrorCode: null, - lastErrorMessage: null, - } - } - - let configJson: Record = {} - try { configJson = JSON.parse(row['config_json'] as string) } catch {} - - return { - connectorId, - headCursor: row['head_cursor'] as string | null, - headItemId: row['head_item_id'] as string | null, - tailCursor: row['tail_cursor'] as string | null, - tailComplete: Boolean(row['tail_complete']), - lastForwardSyncAt: row['last_forward_sync_at'] as string | null, - lastBackfillSyncAt: row['last_backfill_sync_at'] as string | null, - totalSynced: row['total_synced'] as number, - consecutiveErrors: row['consecutive_errors'] as number, - enabled: Boolean(row['enabled']), - configJson, - lastErrorAt: row['last_error_at'] as string | null, - lastErrorCode: row['last_error_code'] as SyncErrorCode | null, - lastErrorMessage: row['last_error_message'] as string | null, - } -} - -export function saveSyncState(db: Database.Database, state: SyncState): void { - db.prepare(` - INSERT INTO connector_sync_state - (connector_id, head_cursor, head_item_id, tail_cursor, tail_complete, - last_forward_sync_at, last_backfill_sync_at, total_synced, - consecutive_errors, enabled, config_json, - last_error_at, last_error_code, last_error_message) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - ON CONFLICT(connector_id) DO UPDATE SET - head_cursor = excluded.head_cursor, - head_item_id = excluded.head_item_id, - tail_cursor = excluded.tail_cursor, - tail_complete = excluded.tail_complete, - last_forward_sync_at = excluded.last_forward_sync_at, - last_backfill_sync_at = excluded.last_backfill_sync_at, - total_synced = excluded.total_synced, - consecutive_errors = excluded.consecutive_errors, - enabled = excluded.enabled, - config_json = excluded.config_json, - last_error_at = excluded.last_error_at, - last_error_code = excluded.last_error_code, - last_error_message = excluded.last_error_message - `).run( - state.connectorId, - state.headCursor, state.headItemId, - state.tailCursor, state.tailComplete ? 1 : 0, - state.lastForwardSyncAt, state.lastBackfillSyncAt, - state.totalSynced, state.consecutiveErrors, - state.enabled ? 1 : 0, - JSON.stringify(state.configJson), - state.lastErrorAt, state.lastErrorCode, state.lastErrorMessage, - ) -} - -// ── Item Upsert ───────────────────────────────────────────────────────────── - -interface UpsertResult { - newCount: number - updatedCount: number -} - -function upsertItems( - db: Database.Database, - sourceId: number, - connectorId: string, - items: CapturedItem[], -): UpsertResult { - let newCount = 0 - let updatedCount = 0 - - const checkStmt = db.prepare( - 'SELECT id FROM captures WHERE platform = ? AND platform_id = ?', - ) - const updateStmt = db.prepare(` - UPDATE captures SET - title = ?, content_text = ?, author = ?, metadata = ?, - captured_at = ?, raw_json = ?, thumbnail_url = ? - WHERE id = ? - `) - const insertStmt = db.prepare(` - INSERT INTO captures - (source_id, capture_uuid, url, title, content_text, - author, platform, platform_id, content_type, thumbnail_url, - metadata, captured_at, raw_json) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - `) - const linkStmt = db.prepare( - 'INSERT OR IGNORE INTO capture_connectors (capture_id, connector_id) VALUES (?, ?)', - ) - - for (const item of items) { - let captureId: number | bigint | null = null - - if (item.platformId) { - const existing = checkStmt.get(item.platform, item.platformId) as - | { id: number } - | undefined - if (existing) { - updateStmt.run( - item.title, item.contentText, item.author, - JSON.stringify(item.metadata), item.capturedAt, item.rawJson, - item.thumbnailUrl, existing.id, - ) - captureId = existing.id - updatedCount++ - } - } - - if (captureId === null) { - const info = insertStmt.run( - sourceId, randomUUID(), item.url, item.title, item.contentText, - item.author, item.platform, item.platformId, item.contentType, - item.thumbnailUrl, JSON.stringify(item.metadata), item.capturedAt, - item.rawJson, - ) - captureId = info.lastInsertRowid - newCount++ - } - - linkStmt.run(captureId, connectorId) - } - - return { newCount, updatedCount } -} - -export function deleteConnectorItems(db: Database.Database, connectorId: string): void { - // 1. Drop this connector's M:N claims. - db.prepare('DELETE FROM capture_connectors WHERE connector_id = ?').run(connectorId) - // 2. Drop stars on captures that are about to disappear. Bulk-replace - // connectors (ephemeral or full-wipe) re-insert captures with new UUIDs - // on the next sync, so without this the star becomes a permanent orphan. - // Scoped with the same predicate as the capture delete below. - db.prepare(` - DELETE FROM stars - WHERE item_type = 'capture' - AND item_uuid IN ( - SELECT capture_uuid FROM captures - WHERE source_id = (SELECT id FROM sources WHERE name = 'connector') - AND NOT EXISTS ( - SELECT 1 FROM capture_connectors WHERE capture_id = captures.id - ) - ) - `).run() - // 3. Delete captures that belonged to this connector and have no other - // connector attribution left. Scoped to source='connector' so we never - // touch session-world captures. - db.prepare(` - DELETE FROM captures - WHERE source_id = (SELECT id FROM sources WHERE name = 'connector') - AND NOT EXISTS ( - SELECT 1 FROM capture_connectors WHERE capture_id = captures.id - ) - `).run() -} - -// ── Sync Engine ───────────────────────────────────────────────────────────── - -function getSourceId(db: Database.Database): number { - // Connector captures all share a single generic 'connector' source row. - // Per-connector attribution lives in the capture_connectors M:N table. - const row = db.prepare("SELECT id FROM sources WHERE name = 'connector'").get() as - | { id: number } - | undefined - if (!row) throw new Error("Source 'connector' not found in DB") - return row.id -} - -function hasKnownItem( - db: Database.Database, - platform: string, - items: CapturedItem[], -): boolean { - if (items.length === 0) return false - const stmt = db.prepare( - 'SELECT 1 FROM captures WHERE platform = ? AND platform_id = ? LIMIT 1', - ) - for (const item of items) { - if (item.platformId && stmt.get(platform, item.platformId)) return true - } - return false -} - -export class SyncEngine { - constructor(private db: Database.Database) {} - - loadState(connectorId: string): SyncState { - return loadSyncState(this.db, connectorId) - } - - /** - * Effect-native entry point. Returns an Effect that, when run, executes a - * full sync cycle (ephemeral or dual-frontier persistent) and records the - * outcome in connector_sync_state. The Effect is `Effect` - * because all SyncErrors are caught internally and encoded in `result.error`. - * - * Preferred by callers that already live in the Effect world (the Scheduler - * Effect migration, tests that want to inject Logger / Tracer layers via - * `Effect.provide`). Promise-based callers should use `sync()` instead. - */ - syncEffect( - connector: Connector, - opts: SyncOptions = {}, - ): Effect.Effect { - const db = this.db - const self = this - - return Effect.gen(function* () { - const state = yield* Effect.sync(() => loadSyncState(db, connector.id)) - const startedAt = yield* Clock.currentTimeMillis - const cancel = opts.cancel ?? (yield* Deferred.make()) - yield* bridgeAbortSignal(opts.signal, cancel) - - const body = connector.ephemeral - ? self.syncEphemeralEffect(connector, state, opts, startedAt, cancel) - : self.syncPersistentEffect(connector, state, opts, startedAt, cancel) - - const result = yield* body.pipe( - Effect.withSpan('sync.cycle', { - attributes: { - 'connector.id': connector.id, - 'sync.direction': opts.direction ?? 'both', - }, - }), - ) - - if (result.error) { - state.consecutiveErrors += 1 - state.lastErrorAt = yield* nowIso - state.lastErrorCode = result.error.code as SyncErrorCode - state.lastErrorMessage = result.error.message - } else { - state.consecutiveErrors = 0 - state.lastErrorAt = null - state.lastErrorCode = null - state.lastErrorMessage = null - } - yield* Effect.sync(() => saveSyncState(db, state)) - return result - // Effect.scoped discharges the Scope required by bridgeAbortSignal's - // acquireRelease, guaranteeing removeEventListener runs on every exit - // path (success, failure, interruption). - }).pipe(Effect.scoped) - } - - async sync(connector: Connector, opts: SyncOptions = {}): Promise { - // opts.signal is bridged into an internal Deferred by syncEffect — - // NOT passed to runPromise, because runtime interruption would - // skip state persistence and surface as an error. Cancellation must - // be observable inside the loop (via Deferred.isDone) so the loop - // can return gracefully with stopReason='cancelled' + partial progress. - try { - return await Effect.runPromise(this.syncEffect(connector, opts)) - } catch (err) { - // Defect surfaced past the typed error channel — defensive fallback. - const state = loadSyncState(this.db, connector.id) - const syncErr = SyncError.from(err) - state.consecutiveErrors += 1 - state.lastErrorAt = new Date().toISOString() - state.lastErrorCode = syncErr.code - state.lastErrorMessage = syncErr.message - saveSyncState(this.db, state) - return { - connectorId: connector.id, - added: 0, - total: state.totalSynced, - pages: 0, - direction: opts.direction ?? 'both', - stopReason: `error: ${syncErr.code}`, - error: { code: syncErr.code, message: syncErr.message }, - } - } - } - - /** - * Fetch pages in an Effect-based loop until a stop condition is met. - * Errors are caught internally and returned in the result (never fails). - */ - private fetchLoopEffect( - connector: Connector, - state: SyncState, - opts: SyncOptions & { phase: 'forward' | 'backfill' }, - sourceId: number, - startCursor: string | null, - startedAt: number, - cancel: CancelSignal, - signal: AbortSignal, - ): Effect.Effect { - const db = this.db - const delayMs = opts.delayMs ?? DEFAULT_SCHEDULE.pageDelayMs - const maxMinutes = opts.maxMinutes ?? 0 - const maxPages = opts.maxPages ?? 100 - const stalePageLimit = opts.stalePageLimit ?? 3 - const checkpointEvery = 25 - const deadline = maxMinutes > 0 ? startedAt + maxMinutes * 60_000 : Number.POSITIVE_INFINITY - - // Initial sync handoff: forward writes tailCursor ONLY on the very first sync - // (tailCursor still null, no prior forward interrupted = headCursor null). - // This lets backfill pick up where forward left off. On subsequent cycles, - // forward must NOT touch tailCursor — otherwise it overwrites backfill's - // deep progress with a shallow position near the newest end. - const isInitialSync = opts.phase === 'forward' - && state.tailCursor === null - && state.headCursor === null - - // Capture the since-anchor at loop entry, before page-0 may update - // headItemId. This is the stop signal for forward early-exit: - // "stop when you reach this item — everything at or beyond it is already indexed." - const sinceItemId = opts.phase === 'forward' ? state.headItemId : null - - return Effect.gen(function* () { - let cursor = startCursor - let added = 0 - let pages = 0 - let stalePages = 0 - - while (true) { - const now = yield* Clock.currentTimeMillis - if (now >= deadline) { - return { added, pages, stopReason: 'timeout' } - } - if (pages >= maxPages) { - return { added, pages, stopReason: 'max_pages' } - } - if (yield* Deferred.isDone(cancel)) { - return { added, pages, stopReason: 'cancelled' } - } - - const fetchCtx: FetchContext = { cursor, sinceItemId, phase: opts.phase, signal } - const outcome = yield* Effect.either( - Effect.tryPromise({ - try: () => connector.fetchPage(fetchCtx), - catch: SyncError.from, - }).pipe( - Effect.withSpan('sync.fetchPage', { - attributes: { - 'connector.id': connector.id, - 'sync.phase': opts.phase, - 'sync.page': pages + 1, - }, - }), - ), - ) - - if (outcome._tag === 'Left') { - const err = outcome.left - yield* Effect.logError( - `[sync-engine] ${connector.id} ${opts.phase} page ${pages + 1} error: ${err.message}`, - ) - yield* Effect.sync(() => saveSyncState(db, state)) - return { - added, - pages, - stopReason: `error: ${err.code}`, - error: { code: err.code, message: err.message }, - } - } - - const result = outcome.right - pages++ - - if (result.items.length === 0 && !result.nextCursor) { - if (opts.phase === 'forward') state.headCursor = null - if (opts.phase === 'backfill') state.tailComplete = true - return { - added, - pages, - stopReason: opts.phase === 'backfill' ? 'backfill_complete' : 'end_of_data', - } - } - - const { newCount } = yield* Effect.sync(() => - db.transaction(() => upsertItems(db, sourceId, connector.id, result.items))(), - ).pipe( - Effect.withSpan('sync.upsert', { - attributes: { 'items.count': result.items.length }, - }), - ) - added += newCount - - // Update headItemId: the platform ID of the newest item we've ever seen. - // Only on forward, only on the first page (pages === 1 after increment), - // and only when NOT resuming from headCursor — a resumed forward is - // catching up to the existing anchor, not establishing a new one. - // On platforms with cursor-walking (no server-side since), the first page - // of a fresh forward always starts at the newest end, so page-0's first - // item is guaranteed to be >= the current headItemId. - if (opts.phase === 'forward' && pages === 1 && startCursor === null) { - const firstItem = result.items[0] - if (firstItem?.platformId && firstItem.platformId !== state.headItemId) { - state.headItemId = firstItem.platformId - } - } - - if (opts.onProgress) { - const progress: SyncProgress = { - connectorId: connector.id, - phase: opts.phase, - page: pages, - fetched: result.items.length, - added, - running: true, - } - yield* Effect.sync(() => opts.onProgress!(progress)) - } - - // Early-exit: forward stops when it reaches the since-anchor (headItemId). - // This means we've caught up to the point where the last forward left off. - // Much more efficient than stale-page detection for small incremental syncs - // (e.g. 2 new bookmarks → 1 page instead of 3+ stale pages). - // Note: sinceItemId was already captured into headItemId before page 0's - // update (headItemId is only updated on page 0 of a fresh forward), - // so we compare against the original anchor stored at fetchLoop entry. - if (opts.phase === 'forward' && sinceItemId) { - const hitAnchor = result.items.some( - item => item.platformId === sinceItemId, - ) - if (hitAnchor) { - state.headCursor = null - return { added, pages, stopReason: 'reached_since' } - } - } - - // Stale page detection: stop when we keep seeing only known data - if (newCount === 0) stalePages++ - else stalePages = 0 - if (stalePages >= stalePageLimit) { - if (opts.phase === 'forward') state.headCursor = null - if (opts.phase === 'backfill') state.tailComplete = true - return { - added, - pages, - stopReason: opts.phase === 'forward' ? 'caught_up' : 'backfill_complete', - } - } - - if (!result.nextCursor) { - if (opts.phase === 'forward') state.headCursor = null - if (opts.phase === 'backfill') state.tailComplete = true - return { - added, - pages, - stopReason: opts.phase === 'backfill' ? 'backfill_complete' : 'end_of_data', - } - } - - cursor = result.nextCursor - - if (opts.phase === 'forward') { - // Save forward progress so an interrupted cycle can resume here - // instead of re-fetching from the newest end. - state.headCursor = cursor - // Only write tailCursor during initial sync (handoff to backfill). - if (isInitialSync) state.tailCursor = cursor - } else { - state.tailCursor = cursor - } - - if (pages % checkpointEvery === 0) { - yield* Effect.sync(() => saveSyncState(db, state)) - } - - // Cap the inter-page delay at the remaining deadline so maxMinutes - // has ms-level precision instead of being gated on polling frequency. - const nowAfter = yield* Clock.currentTimeMillis - const remaining = deadline - nowAfter - const actualDelay = Math.max(0, Math.min(delayMs, remaining)) - if (actualDelay > 0) { - yield* interruptibleSleep(actualDelay, cancel) - } - } - }) - } - - private syncPersistentEffect( - connector: Connector, - state: SyncState, - opts: SyncOptions, - startedAt: number, - cancel: CancelSignal, - ): Effect.Effect { - const db = this.db - const direction = opts.direction ?? 'both' - const sourceId = getSourceId(db) - const self = this - - return Effect.gen(function* () { - const signal = yield* cancelDeferredToSignal(cancel) - let totalAdded = 0 - let totalPages = 0 - let stopReason = 'complete' - let lastError: { code: string; message: string } | undefined - - if (direction === 'forward' || direction === 'both') { - const hadAnchor = state.headItemId !== null - const fwd = yield* self - .fetchLoopEffect( - connector, - state, - { ...opts, phase: 'forward' }, - sourceId, - state.headCursor ?? null, - startedAt, - cancel, - signal, - ) - .pipe(Effect.withSpan('sync.forward')) - - totalAdded += fwd.added - totalPages += fwd.pages - stopReason = fwd.stopReason - if (fwd.error) lastError = fwd.error - state.lastForwardSyncAt = yield* nowIso - - // Anchor invalidation recovery (Q3): if forward ran to completion - // (not interrupted) but never hit the since-anchor, the anchor is stale - // (e.g. user un-bookmarked that item). Clear it so next forward starts - // fresh and re-establishes the anchor from page 0. - const completedWithoutHit = hadAnchor - && fwd.stopReason !== 'reached_since' - && fwd.stopReason !== 'timeout' - && fwd.stopReason !== 'cancelled' - && !fwd.stopReason.startsWith('error') - if (completedWithoutHit) { - state.headItemId = null - } - } - - if (!lastError && !state.tailComplete && (direction === 'backfill' || direction === 'both')) { - const bf = yield* self - .fetchLoopEffect( - connector, - state, - { ...opts, phase: 'backfill' }, - sourceId, - state.tailCursor, - startedAt, - cancel, - signal, - ) - .pipe(Effect.withSpan('sync.backfill')) - - totalAdded += bf.added - totalPages += bf.pages - stopReason = bf.stopReason - if (bf.error) lastError = bf.error - state.lastBackfillSyncAt = yield* nowIso - } - - state.totalSynced += totalAdded - yield* Effect.sync(() => saveSyncState(db, state)) - - yield* Effect.logInfo( - `[sync-engine] ${connector.id} done: added=${totalAdded} pages=${totalPages} reason=${stopReason}`, - ) - - if (opts.onProgress) { - const progress: SyncProgress = { - connectorId: connector.id, - phase: 'forward', - page: totalPages, - fetched: 0, - added: totalAdded, - running: false, - } - yield* Effect.sync(() => opts.onProgress!(progress)) - } - - const ret: ConnectorSyncResult = { - connectorId: connector.id, - added: totalAdded, - total: state.totalSynced, - pages: totalPages, - direction, - stopReason, - } - if (lastError) { - ret.error = { code: lastError.code as SyncErrorCode, message: lastError.message } - } - return ret - }) - } - - private syncEphemeralEffect( - connector: Connector, - state: SyncState, - opts: SyncOptions, - startedAt: number, - cancel: CancelSignal, - ): Effect.Effect { - const db = this.db - const delayMs = opts.delayMs ?? DEFAULT_SCHEDULE.pageDelayMs - const maxMinutes = opts.maxMinutes ?? 0 - const maxPages = opts.maxPages ?? 100 - const sourceId = getSourceId(db) - const deadline = maxMinutes > 0 ? startedAt + maxMinutes * 60_000 : Number.POSITIVE_INFINITY - - return Effect.gen(function* () { - const signal = yield* cancelDeferredToSignal(cancel) - yield* Effect.sync(() => - db.transaction(() => deleteConnectorItems(db, connector.id))(), - ) - - let cursor: string | null = null - let totalAdded = 0 - let totalPages = 0 - let stopReason = 'complete' - - while (true) { - const now = yield* Clock.currentTimeMillis - if (now >= deadline) { - stopReason = 'timeout' - break - } - if (totalPages >= maxPages) { - stopReason = 'max_pages' - break - } - if (yield* Deferred.isDone(cancel)) { - stopReason = 'cancelled' - break - } - - const outcome = yield* Effect.either( - Effect.tryPromise({ - try: () => connector.fetchPage({ cursor, sinceItemId: null, phase: 'forward', signal }), - catch: SyncError.from, - }).pipe( - Effect.withSpan('sync.fetchPage', { - attributes: { - 'connector.id': connector.id, - 'sync.phase': 'forward', - 'sync.page': totalPages + 1, - }, - }), - ), - ) - - if (outcome._tag === 'Left') { - const err = outcome.left - yield* Effect.logError( - `[sync-engine] ${connector.id} forward page ${totalPages + 1} error: ${err.message}`, - ) - state.totalSynced = totalAdded - state.lastForwardSyncAt = yield* nowIso - yield* Effect.sync(() => saveSyncState(db, state)) - return { - connectorId: connector.id, - added: totalAdded, - total: totalAdded, - pages: totalPages, - direction: 'forward', - stopReason: `error: ${err.code}`, - error: { code: err.code, message: err.message }, - } - } - - const result = outcome.right - totalPages++ - - const { newCount } = yield* Effect.sync(() => - db.transaction(() => upsertItems(db, sourceId, connector.id, result.items))(), - ).pipe( - Effect.withSpan('sync.upsert', { - attributes: { 'items.count': result.items.length }, - }), - ) - totalAdded += newCount - - if (!result.nextCursor) break - cursor = result.nextCursor - - const nowAfter = yield* Clock.currentTimeMillis - const remaining = deadline - nowAfter - const actualDelay = Math.max(0, Math.min(delayMs, remaining)) - if (actualDelay > 0) { - yield* interruptibleSleep(actualDelay, cancel) - } - } - - state.totalSynced = totalAdded - state.lastForwardSyncAt = yield* nowIso - yield* Effect.sync(() => saveSyncState(db, state)) - - return { - connectorId: connector.id, - added: totalAdded, - total: totalAdded, - pages: totalPages, - direction: 'forward', - stopReason, - } - }) - } -} - -interface FetchLoopResult { - added: number - pages: number - stopReason: string - error?: { code: string; message: string } -} diff --git a/packages/core/src/connectors/sync-scheduler.effect.test.ts b/packages/core/src/connectors/sync-scheduler.effect.test.ts deleted file mode 100644 index 8048959..0000000 --- a/packages/core/src/connectors/sync-scheduler.effect.test.ts +++ /dev/null @@ -1,433 +0,0 @@ -import { describe, it, expect, beforeEach, afterEach } from 'vitest' -import { Duration, Effect, ManagedRuntime, TestClock, TestContext } from 'effect' -import Database from 'better-sqlite3' -import { SyncScheduler, type SchedulerEvent } from './sync-scheduler.js' -import { ConnectorRegistry } from './registry.js' -import { SyncError, SyncErrorCode } from './types.js' -import type { Connector, AuthStatus, FetchContext, PageResult } from './types.js' -import { createTestDB, makeItem, setState } from './test-helpers.js' - -// ── Test Helpers ──────────────────────────────────────────────────────────── - -function createTestConnector( - id: string, - fetchPageFn?: (ctx: FetchContext) => Promise, -): Connector { - return { - id, - platform: 'test', - label: `Test ${id}`, - description: 'test connector', - color: '#000', - ephemeral: false, - async checkAuth(): Promise { return { ok: true } }, - fetchPage: - fetchPageFn ?? (async () => ({ items: [makeItem(`${id}-1`)], nextCursor: null })), - } -} - -/** - * Build a test runtime whose default Clock is TestClock. The scheduler's - * tick fiber and per-job runJob fibers all run in this runtime so - * `runtime.runPromise(TestClock.adjust(...))` advances time deterministically. - */ -function makeTestRuntime() { - return ManagedRuntime.make(TestContext.TestContext) -} - -/** - * TestClock advance + microtask drain. The scheduler forks real Promise-based - * fetchPage calls, so after advancing virtual time we need to yield to the JS - * microtask/macrotask queues a few times so the forked runJob fibers can - * progress through their await boundaries before we assert. - */ -async function flush(runtime: ReturnType, ms: number): Promise { - await runtime.runPromise(TestClock.adjust(Duration.millis(ms))) - for (let i = 0; i < 5; i++) { - await new Promise((resolve) => setImmediate(resolve)) - await runtime.runPromise(Effect.sleep(Duration.zero)) - } -} - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('SyncScheduler contract (effect)', () => { - let db: InstanceType - let registry: ConnectorRegistry - let scheduler: SyncScheduler | undefined - let runtime: ReturnType - - beforeEach(() => { - db = createTestDB() - registry = new ConnectorRegistry() - runtime = makeTestRuntime() - scheduler = undefined - }) - - afterEach(async () => { - scheduler?.stop() - await runtime.dispose() - }) - - describe('Backoff', () => { - it('AUTH_* error: no further syncs after auth failure', async () => { - const fetchCalls: number[] = [] - const connector = createTestConnector('auth-fail', async () => { - fetchCalls.push(Date.now()) - throw new SyncError(SyncErrorCode.AUTH_SESSION_EXPIRED, 'expired') - }) - registry.register(connector) - setState(db, { connectorId: 'auth-fail' }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 1_000, - retryBackoffMs: [1_000], - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - expect(fetchCalls).toHaveLength(1) - - // AUTH errors never retry — advance far past any configured backoff - await flush(runtime, 120_000) - expect(fetchCalls).toHaveLength(1) - }) - - it('non-auth error: respects backoff sequence across ticks', async () => { - const fetchCalls: number[] = [] - const connector = createTestConnector('backoff-test', async () => { - fetchCalls.push(Date.now()) - throw new SyncError(SyncErrorCode.NETWORK_OFFLINE, 'server down') - }) - registry.register(connector) - setState(db, { connectorId: 'backoff-test' }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 1_000, - backfillIntervalMs: 999_999_999, - retryBackoffMs: [5_000, 60_000], - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - expect(fetchCalls).toHaveLength(1) - - // t=30s: tick fires, backoff=5s has elapsed (30>5), retry → errors=2 - await flush(runtime, 30_000) - expect(fetchCalls).toHaveLength(2) - - // t=60s: 30s since last error, backoff=60s → skip - await flush(runtime, 30_000) - expect(fetchCalls).toHaveLength(2) - - // t=90s: 60s since last error → retry - await flush(runtime, 30_000) - expect(fetchCalls).toHaveLength(3) - }) - - it('backoff base is lastErrorAt, not lastForwardSyncAt', async () => { - const fetchCalls: number[] = [] - const connector = createTestConnector('backoff-base', async () => { - fetchCalls.push(Date.now()) - throw new SyncError(SyncErrorCode.API_SERVER_ERROR, 'fail') - }) - registry.register(connector) - setState(db, { connectorId: 'backoff-base' }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 1_000, - retryBackoffMs: [60_000], - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - expect(fetchCalls).toHaveLength(1) - - // t=30s: only 30s since lastErrorAt, backoff=60s → skip - await flush(runtime, 30_000) - expect(fetchCalls).toHaveLength(1) - - // t=60s: 60s since lastErrorAt → retry - await flush(runtime, 30_000) - expect(fetchCalls).toHaveLength(2) - }) - - it('success after errors resets consecutiveErrors and lastErrorAt', async () => { - let shouldFail = true - const connector = createTestConnector('recovery', async () => { - if (shouldFail) throw new SyncError(SyncErrorCode.API_SERVER_ERROR, 'fail') - return { items: [makeItem('ok')], nextCursor: null } - }) - registry.register(connector) - setState(db, { connectorId: 'recovery' }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 1_000, - retryBackoffMs: [5_000], - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - shouldFail = false - - // t=30s: backoff=5s elapsed, retry fires — now succeeds - await flush(runtime, 30_000) - - const state = db - .prepare( - 'SELECT consecutive_errors, last_error_at FROM connector_sync_state WHERE connector_id = ?', - ) - .get('recovery') as { consecutive_errors: number; last_error_at: string | null } - expect(state.consecutive_errors).toBe(0) - expect(state.last_error_at).toBeNull() - }) - }) - - describe('Scheduling', () => { - it('forward sync is scheduled after forwardIntervalMs elapses', async () => { - const fetchCalls: string[] = [] - const connector = createTestConnector('interval-test', async (ctx) => { - fetchCalls.push(ctx.phase) - return { items: [makeItem('i-1')], nextCursor: null } - }) - registry.register(connector) - setState(db, { connectorId: 'interval-test' }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 10_000, - backfillIntervalMs: 999_999_999, - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - expect(fetchCalls.length).toBeGreaterThanOrEqual(1) - - fetchCalls.length = 0 - - // t=30s: 30s > 10s forwardIntervalMs → forward due - await flush(runtime, 30_000) - expect(fetchCalls.length).toBeGreaterThanOrEqual(1) - }) - - it('backfill not scheduled when tailComplete', async () => { - const phases: string[] = [] - const connector = createTestConnector('backfill-done', async (ctx) => { - phases.push(ctx.phase) - return { items: [makeItem('bf-1')], nextCursor: null } - }) - registry.register(connector) - setState(db, { - connectorId: 'backfill-done', - tailComplete: true, - }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 999_999_999, - backfillIntervalMs: 1_000, - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - await flush(runtime, 90_000) - - const backfillCalls = phases.filter((p) => p === 'backfill') - expect(backfillCalls).toHaveLength(0) - }) - - it('triggerNow during running sync is a no-op (enqueue dedupes)', async () => { - let fetchCount = 0 - let release: () => void = () => {} - const connector = createTestConnector('dedupe', async () => { - fetchCount++ - await new Promise((resolve) => { release = resolve }) - return { items: [makeItem('d-1')], nextCursor: null } - }) - registry.register(connector) - setState(db, { connectorId: 'dedupe', tailComplete: true }) - - scheduler = new SyncScheduler( - db, - registry, - { forwardIntervalMs: 999_999_999, pageDelayMs: 0, maxMinutesPerRun: 1 }, - runtime, - ) - scheduler.start() - await flush(runtime, 100) - - expect(fetchCount).toBe(1) // startup sync in flight, parked - - scheduler.triggerNow('dedupe', 'forward') - await flush(runtime, 100) - - expect(fetchCount).toBe(1) // no new sync — enqueue sees running.has(id) and no-ops - - release() - await flush(runtime, 100) // let cleanup run - }) - - it('triggerNow runs immediately with highest priority', async () => { - const fetchCalls: number[] = [] - const connector = createTestConnector('manual', async () => { - fetchCalls.push(Date.now()) - return { items: [makeItem('m-1')], nextCursor: null } - }) - registry.register(connector) - setState(db, { connectorId: 'manual' }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 999_999_999, - pageDelayMs: 0, - maxMinutesPerRun: 1, - }, - runtime, - ) - scheduler.start() - - await flush(runtime, 100) - const afterStartup = fetchCalls.length - - scheduler.triggerNow('manual', 'forward') - await flush(runtime, 100) - - expect(fetchCalls.length).toBeGreaterThan(afterStartup) - }) - }) - - describe('Concurrency', () => { - it('semaphore caps simultaneous syncs at config.concurrency', async () => { - const inFlight = new Set() - let maxInFlight = 0 - const gates = new Map void>() - const makeSlow = (id: string): Connector => - createTestConnector(id, async () => { - inFlight.add(id) - maxInFlight = Math.max(maxInFlight, inFlight.size) - await new Promise((resolve) => gates.set(id, resolve)) - inFlight.delete(id) - return { items: [makeItem(`${id}-1`)], nextCursor: null } - }) - - for (const id of ['c1', 'c2', 'c3', 'c4']) { - registry.register(makeSlow(id)) - setState(db, { connectorId: id, tailComplete: true }) - } - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 1_000, - pageDelayMs: 0, - maxMinutesPerRun: 1, - concurrency: 2, - }, - runtime, - ) - scheduler.start() - - // Let all 4 runJob fibers be forked and block on fetchPage - await flush(runtime, 100) - - expect(maxInFlight).toBe(2) - expect(inFlight.size).toBe(2) - - // Release the 2 currently in flight — next 2 should pick up permits - for (const id of Array.from(inFlight)) gates.get(id)!() - await flush(runtime, 100) - expect(maxInFlight).toBe(2) // still capped - - // Release second wave - for (const id of Array.from(inFlight)) gates.get(id)!() - await flush(runtime, 100) - }) - }) - - describe('Cancellation', () => { - it('stop() causes in-flight sync to return with stopReason=cancelled and no sync-error', async () => { - const events: SchedulerEvent[] = [] - let page = 0 - const connector = createTestConnector('cancel-me', async () => { - page++ - return { - items: [makeItem(`c-${page}`)], - nextCursor: `cursor-${page + 1}`, - } - }) - registry.register(connector) - setState(db, { connectorId: 'cancel-me', tailComplete: true }) - - scheduler = new SyncScheduler( - db, - registry, - { - forwardIntervalMs: 999_999_999, - // long enough that the inter-page sleep is still pending when stop() fires - pageDelayMs: 60_000, - maxMinutesPerRun: 5, - }, - runtime, - ) - scheduler.on((event) => events.push(event)) - scheduler.start() - - await flush(runtime, 100) - expect(page).toBeGreaterThanOrEqual(1) - - scheduler.stop() - await flush(runtime, 100) - - const completes = events.filter((e) => e.type === 'sync-complete') - const errors = events.filter((e) => e.type === 'sync-error') - expect(completes).toHaveLength(1) - expect( - completes[0]!.type === 'sync-complete' && completes[0]!.result.stopReason, - ).toBe('cancelled') - expect(errors).toHaveLength(0) - }) - }) -}) diff --git a/packages/core/src/connectors/sync-scheduler.ts b/packages/core/src/connectors/sync-scheduler.ts deleted file mode 100644 index 2ddd397..0000000 --- a/packages/core/src/connectors/sync-scheduler.ts +++ /dev/null @@ -1,314 +0,0 @@ -import type { - ConnectorSyncResult, - SyncJob, - SyncJobPriority, - ScheduleConfig, - SchedulerStatus, - ConnectorStatus, - SyncProgress, -} from './types.js' -import { DEFAULT_SCHEDULE, SyncErrorCode } from './types.js' -import type { ConnectorRegistry } from './registry.js' -import { SyncEngine, loadSyncState } from './sync-engine.js' -import type Database from 'better-sqlite3' -import { - Cause, - Clock, - Deferred, - Duration, - Effect, - Fiber, - Layer, - ManagedRuntime, - pipe, - Schedule, -} from 'effect' - -type Direction = SyncJob['direction'] - -export type SchedulerEvent = - | { type: 'sync-start'; connectorId: string } - | { type: 'sync-progress'; progress: SyncProgress } - | { type: 'sync-complete'; result: ConnectorSyncResult } - | { type: 'sync-error'; connectorId: string; code: SyncErrorCode; message: string } - -export type SchedulerEventHandler = (event: SchedulerEvent) => void - -export class SyncScheduler { - private engine: SyncEngine - private config: ScheduleConfig - private queue: SyncJob[] = [] - // Per-job cancel tokens. Fired by stop() so in-flight syncs wind down - // cooperatively (engine checks Deferred.isDone at every loop yield point). - private running = new Map>() - private tickFiber: Fiber.RuntimeFiber | null = null - private eventHandlers: SchedulerEventHandler[] = [] - // Production: Layer.empty. Tests inject TestContext.TestContext for TestClock. - private runtime: ManagedRuntime.ManagedRuntime - private semaphore: Effect.Semaphore - - constructor( - private db: Database.Database, - private registry: ConnectorRegistry, - config?: Partial, - runtime?: ManagedRuntime.ManagedRuntime, - ) { - this.engine = new SyncEngine(db) - this.config = { ...DEFAULT_SCHEDULE, ...config } - this.runtime = runtime ?? ManagedRuntime.make(Layer.empty) - this.semaphore = Effect.runSync(Effect.makeSemaphore(this.config.concurrency)) - } - - // ── Lifecycle ───────────────────────────────────────────────────────────── - - start(): void { - if (this.tickFiber) return - - // Queue immediate forward sync synchronously so observers of the queue - // right after start() see it populated. - this.queueAll('both', 80) - - const tickProgram = pipe( - this.tickOnceEffect(), - Effect.repeat(Schedule.spaced(Duration.seconds(30))), - Effect.asVoid, - Effect.catchAllCause((cause) => - Cause.isInterruptedOnly(cause) - ? Effect.void - : Effect.logError('scheduler tick fiber crashed', cause), - ), - ) - this.tickFiber = this.runtime.runFork(tickProgram) - } - - stop(): void { - if (this.tickFiber) { - // Fire-and-forget interrupt. runJob fibers are siblings (not children), - // so this does NOT cascade to in-flight syncs — they unwind via the - // per-job Deferred below. - this.runtime.runFork(Fiber.interrupt(this.tickFiber)) - this.tickFiber = null - } - for (const [, deferred] of this.running) { - Effect.runSync(Deferred.succeed(deferred, void 0)) - } - this.running.clear() - this.queue = [] - } - - /** Cancel an in-flight sync and remove queued jobs for a connector. */ - cancelIfRunning(connectorId: string): void { - const cancel = this.running.get(connectorId) - if (cancel) { - Effect.runSync(Deferred.succeed(cancel, void 0)) - this.running.delete(connectorId) - } - this.queue = this.queue.filter(j => j.connectorId !== connectorId) - } - - /** Manually trigger sync for a specific connector. */ - triggerNow(connectorId: string, direction: Direction = 'both'): void { - this.enqueue({ connectorId, direction, priority: 100, queuedAt: Date.now() }) - this.poke() - } - - /** Notify the scheduler that the system woke from sleep. */ - onWake(): void { - this.queueAll('forward', 60) - this.poke() - } - - /** Subscribe to scheduler events. */ - on(handler: SchedulerEventHandler): () => void { - this.eventHandlers.push(handler) - return () => { - this.eventHandlers = this.eventHandlers.filter(h => h !== handler) - } - } - - // ── Status ──────────────────────────────────────────────────────────────── - - getStatus(): SchedulerStatus { - const connectors: ConnectorStatus[] = this.registry.list().map(c => { - const state = loadSyncState(this.db, c.id) - return { - id: c.id, - label: c.label, - description: c.description, - platform: c.platform, - color: c.color, - enabled: state.enabled, - syncing: this.running.has(c.id), - bundled: false, - version: '0.0.0', - packageName: '', - state, - } - }) - - return { running: this.tickFiber !== null, connectors } - } - - // ── Internal ────────────────────────────────────────────────────────────── - - private emit(event: SchedulerEvent): void { - for (const handler of this.eventHandlers) { - try { handler(event) } catch {} - } - } - - private poke(): void { - this.runtime.runFork(this.tickOnceEffect()) - } - - private queueAll(direction: Direction, priority: SyncJobPriority): void { - for (const connector of this.registry.list()) { - const state = loadSyncState(this.db, connector.id) - if (!state.enabled) continue - this.enqueue({ connectorId: connector.id, direction, priority, queuedAt: Date.now() }) - } - } - - private enqueue(job: SyncJob): void { - if (this.running.has(job.connectorId)) return - if (this.queue.some(j => j.connectorId === job.connectorId)) { - this.queue = this.queue.map(j => - j.connectorId === job.connectorId && job.priority > j.priority ? job : j, - ) - return - } - this.queue.push(job) - this.queue.sort((a, b) => b.priority - a.priority) - } - - private tickOnceEffect(): Effect.Effect { - const self = this - return Effect.gen(function* () { - if (self.tickFiber === null) return - - const now = yield* Clock.currentTimeMillis - - for (const connector of self.registry.list()) { - const state = loadSyncState(self.db, connector.id) - if (!state.enabled) continue - - if (state.consecutiveErrors > 0 && state.lastErrorAt) { - const backoffMs = self.getBackoffMs(state.consecutiveErrors) - if (now - new Date(state.lastErrorAt).getTime() < backoffMs) continue - } - - if (state.lastErrorCode?.startsWith('AUTH_')) continue - - const lastForward = state.lastForwardSyncAt - ? new Date(state.lastForwardSyncAt).getTime() - : 0 - if (now - lastForward >= self.config.forwardIntervalMs) { - self.enqueue({ - connectorId: connector.id, - direction: 'forward', - priority: 40, - queuedAt: now, - }) - } - - if (!state.tailComplete) { - const lastBackfill = state.lastBackfillSyncAt - ? new Date(state.lastBackfillSyncAt).getTime() - : 0 - if (now - lastBackfill >= self.config.backfillIntervalMs) { - self.enqueue({ - connectorId: connector.id, - direction: 'backfill', - priority: 20, - queuedAt: now, - }) - } - } - } - - yield* Effect.sync(() => self.drainQueue()) - }) - } - - // Drain queued work up to concurrency. The semaphore is the *real* gate on - // how many jobs run simultaneously; this loop just submits fibers. - // Fork as siblings of the tick fiber, NOT children — stop()'s interrupt of - // the tick fiber must not cascade and short-circuit in-flight state persistence. - private drainQueue(): void { - while (this.queue.length > 0 && this.running.size < this.config.concurrency) { - const job = this.queue.shift()! - this.runtime.runFork(this.runJobEffect(job)) - } - } - - private runJobEffect(job: SyncJob): Effect.Effect { - const self = this - - const body = Effect.gen(function* () { - if (!self.registry.has(job.connectorId)) return - const connector = self.registry.get(job.connectorId) - - const cancel = yield* Deferred.make() - yield* Effect.sync(() => { - self.running.set(job.connectorId, cancel) - }) - - yield* Effect.sync(() => - self.emit({ type: 'sync-start', connectorId: job.connectorId }), - ) - - const result = yield* self.engine.syncEffect(connector, { - direction: job.direction, - delayMs: self.config.pageDelayMs, - maxMinutes: self.config.maxMinutesPerRun, - cancel, - onProgress: (progress) => { - self.emit({ type: 'sync-progress', progress }) - }, - }) - - yield* Effect.sync(() => { - self.emit({ type: 'sync-complete', result }) - if (result.error) { - self.emit({ - type: 'sync-error', - connectorId: job.connectorId, - code: result.error.code as SyncErrorCode, - message: result.error.message, - }) - } - }) - }) - - return pipe( - this.semaphore.withPermits(1)(body), - Effect.ensuring( - Effect.sync(() => { - self.running.delete(job.connectorId) - // Drain remaining queue immediately rather than waiting 30s for the - // next periodic tick. Skips the connector rescan in tickOnceEffect. - if (self.queue.length > 0 && self.tickFiber !== null) { - self.drainQueue() - } - }), - ), - Effect.catchAllCause((cause) => - Cause.isInterruptedOnly(cause) - ? Effect.void - : Effect.sync(() => { - self.emit({ - type: 'sync-error', - connectorId: job.connectorId, - code: SyncErrorCode.CONNECTOR_ERROR, - message: `runJob defect: ${Cause.pretty(cause)}`, - }) - }), - ), - ) - } - - private getBackoffMs(consecutiveErrors: number): number { - const idx = Math.min(consecutiveErrors - 1, this.config.retryBackoffMs.length - 1) - return this.config.retryBackoffMs[idx] ?? this.config.retryBackoffMs.at(-1) ?? 60_000 - } -} diff --git a/packages/core/src/connectors/test-helpers.ts b/packages/core/src/connectors/test-helpers.ts deleted file mode 100644 index 0c020bb..0000000 --- a/packages/core/src/connectors/test-helpers.ts +++ /dev/null @@ -1,114 +0,0 @@ -import Database from 'better-sqlite3' -import { saveSyncState } from './sync-engine.js' -import type { SyncState } from './types.js' -import type { CapturedItem } from './types.js' - -export function createTestDB(): InstanceType { - const db = new Database(':memory:') - db.pragma('journal_mode = WAL') - db.pragma('foreign_keys = ON') - - db.exec(` - CREATE TABLE sources ( - id INTEGER PRIMARY KEY, - name TEXT NOT NULL UNIQUE, - base_path TEXT NOT NULL, - created_at TEXT NOT NULL DEFAULT (datetime('now')) - ); - INSERT INTO sources (name, base_path) VALUES - ('claude', '~/.claude/projects'), - ('connector', ''); - - CREATE TABLE captures ( - id INTEGER PRIMARY KEY, - source_id INTEGER NOT NULL REFERENCES sources(id), - capture_uuid TEXT NOT NULL UNIQUE, - url TEXT NOT NULL, - title TEXT NOT NULL DEFAULT '', - content_text TEXT NOT NULL DEFAULT '', - author TEXT, - platform TEXT NOT NULL, - platform_id TEXT, - content_type TEXT NOT NULL DEFAULT 'page', - thumbnail_url TEXT, - metadata TEXT NOT NULL DEFAULT '{}', - captured_at TEXT NOT NULL, - indexed_at TEXT NOT NULL DEFAULT (datetime('now')), - raw_json TEXT - ); - - CREATE TABLE capture_connectors ( - capture_id INTEGER NOT NULL REFERENCES captures(id) ON DELETE CASCADE, - connector_id TEXT NOT NULL, - PRIMARY KEY (capture_id, connector_id) - ); - CREATE INDEX idx_capture_connectors_connector ON capture_connectors(connector_id); - - CREATE TABLE connector_sync_state ( - connector_id TEXT PRIMARY KEY, - head_cursor TEXT, - head_item_id TEXT, - tail_cursor TEXT, - tail_complete INTEGER NOT NULL DEFAULT 0, - last_forward_sync_at TEXT, - last_backfill_sync_at TEXT, - total_synced INTEGER NOT NULL DEFAULT 0, - consecutive_errors INTEGER NOT NULL DEFAULT 0, - enabled INTEGER NOT NULL DEFAULT 1, - config_json TEXT NOT NULL DEFAULT '{}', - last_error_at TEXT, - last_error_code TEXT, - last_error_message TEXT - ); - - CREATE TABLE stars ( - item_type TEXT NOT NULL CHECK (item_type IN ('session', 'capture')), - item_uuid TEXT NOT NULL, - starred_at TEXT NOT NULL DEFAULT (datetime('now')), - PRIMARY KEY (item_type, item_uuid) - ); - `) - return db -} - -export function makeItem(platformId: string): CapturedItem { - return { - url: `https://example.com/${platformId}`, - title: `Item ${platformId}`, - contentText: `Content for ${platformId}`, - author: null, - platform: 'test', - platformId, - contentType: 'post', - thumbnailUrl: null, - metadata: {}, - capturedAt: new Date().toISOString(), - rawJson: null, - } -} - -export function setState(db: InstanceType, partial: Partial & { connectorId: string }): void { - const { connectorId, ...rest } = partial - const state: SyncState = { - connectorId, - headCursor: null, - headItemId: null, - tailCursor: null, - tailComplete: false, - lastForwardSyncAt: null, - lastBackfillSyncAt: null, - totalSynced: 0, - consecutiveErrors: 0, - enabled: true, - configJson: {}, - lastErrorAt: null, - lastErrorCode: null, - lastErrorMessage: null, - ...rest, - } - saveSyncState(db, state) -} - -export function countCaptures(db: InstanceType): number { - return (db.prepare('SELECT COUNT(*) as c FROM captures').get() as { c: number }).c -} diff --git a/packages/core/src/connectors/trust-store.test.ts b/packages/core/src/connectors/trust-store.test.ts deleted file mode 100644 index ce83253..0000000 --- a/packages/core/src/connectors/trust-store.test.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { describe, it, expect, beforeEach, afterEach } from 'vitest' -import { mkdtempSync, rmSync, writeFileSync, readFileSync } from 'node:fs' -import { join } from 'node:path' -import { tmpdir } from 'node:os' -import { TrustStore } from './trust-store.js' - -describe('TrustStore', () => { - let dir: string - let store: TrustStore - - beforeEach(() => { - dir = mkdtempSync(join(tmpdir(), 'spool-trust-')) - store = new TrustStore(dir) - }) - - afterEach(() => { - rmSync(dir, { recursive: true, force: true }) - }) - - it('auto-trusts @spool-lab/* packages', () => { - expect(store.isTrusted('@spool-lab/connector-twitter-bookmarks')).toBe(true) - expect(store.isTrusted('@spool-lab/connector-anything')).toBe(true) - }) - - it('rejects unknown community packages', () => { - expect(store.isTrusted('@community/connector-foo')).toBe(false) - }) - - it('trusts community package after explicit add', () => { - store.add('@community/connector-foo') - expect(store.isTrusted('@community/connector-foo')).toBe(true) - }) - - it('persists trust to config.json', () => { - store.add('@community/connector-foo') - const raw = JSON.parse(readFileSync(join(dir, 'config.json'), 'utf8')) - expect(raw.trustedConnectors).toContain('@community/connector-foo') - }) - - it('removes trust', () => { - store.add('@community/connector-foo') - store.remove('@community/connector-foo') - expect(store.isTrusted('@community/connector-foo')).toBe(false) - }) - - it('loads existing config on construction', () => { - writeFileSync( - join(dir, 'config.json'), - JSON.stringify({ trustedConnectors: ['@community/connector-bar'] }), - ) - const store2 = new TrustStore(dir) - expect(store2.isTrusted('@community/connector-bar')).toBe(true) - }) - - it('preserves other config keys when writing', () => { - writeFileSync( - join(dir, 'config.json'), - JSON.stringify({ someOtherKey: 42 }), - ) - const store2 = new TrustStore(dir) - store2.add('@community/connector-foo') - const raw = JSON.parse(readFileSync(join(dir, 'config.json'), 'utf8')) - expect(raw.someOtherKey).toBe(42) - expect(raw.trustedConnectors).toContain('@community/connector-foo') - }) -}) diff --git a/packages/core/src/connectors/trust-store.ts b/packages/core/src/connectors/trust-store.ts deleted file mode 100644 index f817ddf..0000000 --- a/packages/core/src/connectors/trust-store.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs' -import { join } from 'node:path' - -const FIRST_PARTY_SCOPE = '@spool-lab/' -const CONFIG_FILE = 'config.json' - -export class TrustStore { - private trusted: Set - private readonly configPath: string - private configData: Record - - constructor(private readonly spoolDir: string) { - this.configPath = join(spoolDir, CONFIG_FILE) - this.configData = this.readConfig() - const list = Array.isArray(this.configData['trustedConnectors']) - ? (this.configData['trustedConnectors'] as string[]) - : [] - this.trusted = new Set(list) - } - - isTrusted(packageName: string): boolean { - if (packageName.startsWith(FIRST_PARTY_SCOPE)) return true - return this.trusted.has(packageName) - } - - add(packageName: string): void { - this.trusted.add(packageName) - this.save() - } - - remove(packageName: string): void { - this.trusted.delete(packageName) - this.save() - } - - private save(): void { - mkdirSync(this.spoolDir, { recursive: true }) - this.configData['trustedConnectors'] = [...this.trusted] - writeFileSync(this.configPath, JSON.stringify(this.configData, null, 2), 'utf8') - } - - private readConfig(): Record { - if (!existsSync(this.configPath)) return {} - try { - return JSON.parse(readFileSync(this.configPath, 'utf8')) - } catch { - return {} - } - } -} diff --git a/packages/core/src/connectors/types.ts b/packages/core/src/connectors/types.ts deleted file mode 100644 index 1ee14a6..0000000 --- a/packages/core/src/connectors/types.ts +++ /dev/null @@ -1,179 +0,0 @@ -import { Data } from 'effect' -import type { Deferred } from 'effect' -import type { - Connector, - AuthStatus, - PageResult, - FetchContext, - CapturedItem, - SyncState, -} from '@spool-lab/connector-sdk' -import { SyncErrorCode, SYNC_ERROR_HINTS, isSyncError } from '@spool-lab/connector-sdk' - -// ── Re-exports from SDK ──────────────────────────────────────────────────── -export { - SyncErrorCode, - SYNC_ERROR_HINTS, -} from '@spool-lab/connector-sdk' -export type { - Connector, - AuthStatus, - PageResult, - FetchContext, - CapturedItem, - SyncState, -} from '@spool-lab/connector-sdk' - -// ── Internal Effect-tagged SyncError ────────────────────────────────────── -const RETRYABLE_CODES = new Set([ - SyncErrorCode.API_RATE_LIMITED, - SyncErrorCode.API_SERVER_ERROR, - SyncErrorCode.NETWORK_OFFLINE, - SyncErrorCode.NETWORK_TIMEOUT, - SyncErrorCode.SYNC_MAX_PAGES, - SyncErrorCode.SYNC_TIMEOUT, - SyncErrorCode.SYNC_CANCELLED, -]) - -/** - * Internal Effect-tagged version of SyncError used inside @spool-lab/core for - * Effect's typed error channel. External callers (connectors) use the plain - * class exported from @spool-lab/connector-sdk. Translation between the two - * happens in SyncError.from(). - */ -export class SyncError extends Data.TaggedError('SyncError')<{ - readonly code: SyncErrorCode - readonly message: string - readonly cause?: unknown -}> { - constructor(code: SyncErrorCode, message?: string, cause?: unknown) { - super({ code, message: message ?? SYNC_ERROR_HINTS[code], cause }) - } - - static from(e: unknown): SyncError { - if (e instanceof SyncError) return e - if (isSyncError(e)) { - return new SyncError(e.code, e.message, e.cause) - } - return new SyncError( - SyncErrorCode.CONNECTOR_ERROR, - e instanceof Error ? e.message : String(e), - e, - ) - } - - /** Whether this error indicates the connector needs re-authentication. */ - get needsReauth(): boolean { - return this.code.startsWith('AUTH_') - } - - /** Whether this error is transient and the sync can be retried. */ - get retryable(): boolean { - return RETRYABLE_CODES.has(this.code) - } -} - -// ── SyncOptions / SyncProgress / ConnectorSyncResult ────────────────────── - -export interface SyncOptions { - /** Which direction to sync. Default: 'both'. */ - direction?: 'forward' | 'backfill' | 'both' - /** Delay between page requests in ms. Default: 600. */ - delayMs?: number - /** Max runtime in minutes. 0 = unlimited. Default: 0 (no limit). */ - maxMinutes?: number - /** Consecutive pages with 0 new items before stopping forward sync. Default: 3. */ - stalePageLimit?: number - /** Hard cap on total pages fetched per sync run. Default: 100. */ - maxPages?: number - /** AbortSignal for cancellation. */ - signal?: AbortSignal - /** - * Caller-owned cancellation Deferred. When provided, syncEffect uses this - * instead of creating its own — allowing callers like SyncScheduler to - * cancel from outside. If `signal` is also set, it is bridged into this - * Deferred. - */ - cancel?: Deferred.Deferred - /** Progress callback. */ - onProgress?: (progress: SyncProgress) => void -} - -export interface SyncProgress { - connectorId: string - phase: 'forward' | 'backfill' - page: number - fetched: number - added: number - running: boolean -} - -export interface ConnectorSyncResult { - connectorId: string - added: number - total: number - pages: number - direction: 'forward' | 'backfill' | 'both' - stopReason: string - error?: { - code: SyncErrorCode - message: string - } -} - -// ── Scheduler types (remain in core) ────────────────────────────────────── - -export interface ScheduleConfig { - forwardIntervalMs: number - backfillIntervalMs: number - concurrency: number - pageDelayMs: number - retryBackoffMs: number[] - maxMinutesPerRun: number -} - -export const DEFAULT_SCHEDULE: ScheduleConfig = { - forwardIntervalMs: 15 * 60_000, - backfillIntervalMs: 60 * 60_000, - concurrency: 1, - pageDelayMs: 1200, - retryBackoffMs: [60_000, 300_000, 1_800_000, 7_200_000], - maxMinutesPerRun: 10, -} - -export type SyncJobPriority = 100 | 80 | 60 | 40 | 20 - -export interface SyncJob { - connectorId: string - direction: 'forward' | 'backfill' | 'both' - priority: SyncJobPriority - queuedAt: number -} - -export interface ConnectorPackage { - id: string - packageName: string - rootDir: string - connectors: Connector[] - prerequisites?: import('@spool-lab/connector-sdk').Prerequisite[] -} - -export interface ConnectorStatus { - id: string - label: string - description: string - platform: string - color: string - enabled: boolean - syncing: boolean - version: string - packageName: string - packageId?: string - setup?: import('@spool-lab/connector-sdk').SetupStep[] - state: SyncState -} - -export interface SchedulerStatus { - running: boolean - connectors: ConnectorStatus[] -} diff --git a/packages/core/src/db/db.ts b/packages/core/src/db/db.ts index de81e18..b12ef45 100644 --- a/packages/core/src/db/db.ts +++ b/packages/core/src/db/db.ts @@ -38,10 +38,9 @@ function runMigrations(db: Database.Database): void { ); INSERT OR IGNORE INTO sources (name, base_path) VALUES - ('claude', '~/.claude/projects'), - ('codex', '~/.codex/sessions'), - ('gemini', '~/.gemini/tmp'), - ('connector', ''); + ('claude', '~/.claude/projects'), + ('codex', '~/.codex/sessions'), + ('gemini', '~/.gemini/tmp'); CREATE TABLE IF NOT EXISTS projects ( id INTEGER PRIMARY KEY, @@ -140,96 +139,24 @@ function runMigrations(db: Database.Database): void { tokenize='trigram' ); - -- ── Captures (connector items) ───────────────────────────────────────── - - CREATE TABLE IF NOT EXISTS captures ( - id INTEGER PRIMARY KEY, - source_id INTEGER NOT NULL REFERENCES sources(id), - capture_uuid TEXT NOT NULL UNIQUE, - url TEXT NOT NULL, - title TEXT NOT NULL DEFAULT '', - content_text TEXT NOT NULL DEFAULT '', - author TEXT, - platform TEXT NOT NULL, - platform_id TEXT, - content_type TEXT NOT NULL DEFAULT 'page', - thumbnail_url TEXT, - metadata TEXT NOT NULL DEFAULT '{}', - captured_at TEXT NOT NULL, - indexed_at TEXT NOT NULL DEFAULT (datetime('now')), - raw_json TEXT - ); - - CREATE INDEX IF NOT EXISTS idx_captures_platform ON captures(platform); - CREATE INDEX IF NOT EXISTS idx_captures_url ON captures(url); - CREATE INDEX IF NOT EXISTS idx_captures_captured ON captures(captured_at DESC); - - CREATE VIRTUAL TABLE IF NOT EXISTS captures_fts USING fts5( - title, - content_text, - content='captures', - content_rowid='id', - tokenize='unicode61 remove_diacritics 1' - ); - - CREATE VIRTUAL TABLE IF NOT EXISTS captures_fts_trigram USING fts5( - title, - content_text, - content='captures', - content_rowid='id', - tokenize='trigram' - ); - - -- ── Connector sync state ──────────────────────────────────────────────── - - CREATE TABLE IF NOT EXISTS capture_connectors ( - capture_id INTEGER NOT NULL REFERENCES captures(id) ON DELETE CASCADE, - connector_id TEXT NOT NULL, - PRIMARY KEY (capture_id, connector_id) - ); - - CREATE INDEX IF NOT EXISTS idx_capture_connectors_connector - ON capture_connectors(connector_id); - -- ── Stars ───────────────────────────────────────────────────────────── - -- Unified star table for both sessions and captures. Referent is keyed by - -- its natural UUID (session_uuid / capture_uuid), which stays stable - -- across re-index. No FK — queries filter orphans at read time via JOIN, - -- so transient referent absence (e.g. transcript file removed then - -- restored) doesn't destroy the star. CHECK constraint guards against - -- typos in item_type. + -- Star table keyed by session_uuid (natural id, stable across re-index). + -- No FK — queries filter orphans at read time via JOIN, so transient + -- referent absence (e.g. transcript file removed then restored) doesn't + -- destroy the star. CHECK constraint guards against typos in item_type. CREATE TABLE IF NOT EXISTS stars ( - item_type TEXT NOT NULL CHECK (item_type IN ('session', 'capture')), + item_type TEXT NOT NULL CHECK (item_type = 'session'), item_uuid TEXT NOT NULL, starred_at TEXT NOT NULL DEFAULT (datetime('now')), PRIMARY KEY (item_type, item_uuid) ); CREATE INDEX IF NOT EXISTS idx_stars_starred_at ON stars(starred_at DESC); - - CREATE TABLE IF NOT EXISTS connector_sync_state ( - connector_id TEXT PRIMARY KEY, - head_cursor TEXT, - head_item_id TEXT, - tail_cursor TEXT, - tail_complete INTEGER NOT NULL DEFAULT 0, - last_forward_sync_at TEXT, - last_backfill_sync_at TEXT, - total_synced INTEGER NOT NULL DEFAULT 0, - consecutive_errors INTEGER NOT NULL DEFAULT 0, - enabled INTEGER NOT NULL DEFAULT 1, - config_json TEXT NOT NULL DEFAULT '{}', - last_error_at TEXT, - last_error_code TEXT, - last_error_message TEXT - ); `) db.exec(` DROP TRIGGER IF EXISTS messages_fts_insert; DROP TRIGGER IF EXISTS messages_fts_delete; - DROP TRIGGER IF EXISTS captures_fts_insert; - DROP TRIGGER IF EXISTS captures_fts_delete; DROP TRIGGER IF EXISTS session_search_fts_insert; DROP TRIGGER IF EXISTS session_search_fts_update; DROP TRIGGER IF EXISTS session_search_fts_delete; @@ -250,22 +177,6 @@ function runMigrations(db: Database.Database): void { VALUES('delete', OLD.id, OLD.content_text); END; - CREATE TRIGGER captures_fts_insert - AFTER INSERT ON captures BEGIN - INSERT INTO captures_fts(rowid, title, content_text) - VALUES(NEW.id, NEW.title, NEW.content_text); - INSERT INTO captures_fts_trigram(rowid, title, content_text) - VALUES(NEW.id, NEW.title, NEW.content_text); - END; - - CREATE TRIGGER captures_fts_delete - AFTER DELETE ON captures BEGIN - INSERT INTO captures_fts(captures_fts, rowid, title, content_text) - VALUES('delete', OLD.id, OLD.title, OLD.content_text); - INSERT INTO captures_fts_trigram(captures_fts_trigram, rowid, title, content_text) - VALUES('delete', OLD.id, OLD.title, OLD.content_text); - END; - CREATE TRIGGER session_search_fts_insert AFTER INSERT ON session_search BEGIN INSERT INTO session_search_fts(rowid, title, user_text, assistant_text) @@ -301,64 +212,47 @@ function runMigrations(db: Database.Database): void { // the next sequential version number. const version = (db.pragma('user_version') as [{ user_version: number }])[0].user_version + // Historical connector migrations (v1-v3): all operate on the captures / + // connector_sync_state tables that were dropped in v5. Wrapped to no-op + // when those tables aren't present (fresh install on the post-v5 schema). if (version < 1) { - // v1: add last_error_at for accurate backoff timing - try { - db.exec('ALTER TABLE connector_sync_state ADD COLUMN last_error_at TEXT') - } catch { - // Column may already exist if user ran a dev build before this migration - } + try { db.exec('ALTER TABLE connector_sync_state ADD COLUMN last_error_at TEXT') } catch {} db.pragma('user_version = 1') } if (version < 2) { - // v2: rebuild captures FTS indexes to fix corruption from old opencli data - // that causes DELETE triggers to fail with SQLITE_CORRUPT - try { - db.exec("INSERT INTO captures_fts(captures_fts) VALUES('rebuild')") - db.exec("INSERT INTO captures_fts_trigram(captures_fts_trigram) VALUES('rebuild')") - } catch { - // FTS tables may not exist yet on fresh installs — safe to skip - } + try { db.exec("INSERT INTO captures_fts(captures_fts) VALUES('rebuild')") } catch {} + try { db.exec("INSERT INTO captures_fts_trigram(captures_fts_trigram) VALUES('rebuild')") } catch {} db.pragma('user_version = 2') } if (version < 3) { - // v3: migrate connector provenance from metadata.connectorId (single-valued, - // clobbered on UPSERT) to the M:N capture_connectors table, and stop - // claiming connector captures came from source_id=claude. - db.transaction(() => { - // Backfill M:N from existing metadata.connectorId. - db.exec(` - INSERT OR IGNORE INTO capture_connectors (capture_id, connector_id) - SELECT id, json_extract(metadata, '$.connectorId') - FROM captures - WHERE json_extract(metadata, '$.connectorId') IS NOT NULL - `) - // Strip the now-redundant field from metadata. - db.exec(` - UPDATE captures - SET metadata = json_remove(metadata, '$.connectorId') - WHERE json_extract(metadata, '$.connectorId') IS NOT NULL - `) - // Point connector captures at the 'connector' source row instead of claude. - db.exec(` - UPDATE captures - SET source_id = (SELECT id FROM sources WHERE name = 'connector') - WHERE id IN (SELECT capture_id FROM capture_connectors) - `) - // idx_captures_source was never used by any query — drop it. - db.exec(`DROP INDEX IF EXISTS idx_captures_source`) - })() + try { + db.transaction(() => { + db.exec(` + INSERT OR IGNORE INTO capture_connectors (capture_id, connector_id) + SELECT id, json_extract(metadata, '$.connectorId') + FROM captures + WHERE json_extract(metadata, '$.connectorId') IS NOT NULL + `) + db.exec(` + UPDATE captures + SET metadata = json_remove(metadata, '$.connectorId') + WHERE json_extract(metadata, '$.connectorId') IS NOT NULL + `) + db.exec(` + UPDATE captures + SET source_id = (SELECT id FROM sources WHERE name = 'connector') + WHERE id IN (SELECT capture_id FROM capture_connectors) + `) + db.exec(`DROP INDEX IF EXISTS idx_captures_source`) + })() + } catch {} db.pragma('user_version = 3') } if (version < 4) { - // v4: unified stars table covering both sessions and captures. An earlier - // in-development iteration used a session-only `session_stars` table — - // drop it if present before creating the unified table. Since this - // version was never released, users skipping past the intermediate state - // simply get the final schema. + // v4: stars table. Created with the historical wide CHECK; v5 narrows it. db.exec(` DROP TABLE IF EXISTS session_stars; CREATE TABLE IF NOT EXISTS stars ( @@ -372,28 +266,54 @@ function runMigrations(db: Database.Database): void { db.pragma('user_version = 4') } + if (version < 5) { + // v5: connector subsystem removed. Drop captures + connector_sync_state + // tables, narrow stars CHECK to session-only. + // + // SQLite can't ALTER a CHECK constraint, so we rebuild the stars table. + // For users who never had the wide CHECK (fresh install on post-v5 + // schema), the rebuild is a no-op rename round-trip — safe and cheap. + // Captures data is dropped without backup; users were directed to Spool + // Daemon for connector functionality. + db.transaction(() => { + db.exec(`DROP TRIGGER IF EXISTS captures_fts_insert`) + db.exec(`DROP TRIGGER IF EXISTS captures_fts_delete`) + db.exec(`DROP TABLE IF EXISTS captures_fts_trigram`) + db.exec(`DROP TABLE IF EXISTS captures_fts`) + db.exec(`DROP TABLE IF EXISTS capture_connectors`) + db.exec(`DROP TABLE IF EXISTS captures`) + db.exec(`DROP TABLE IF EXISTS connector_sync_state`) + + db.exec(`DELETE FROM stars WHERE item_type = 'capture'`) + db.exec(` + CREATE TABLE stars_new ( + item_type TEXT NOT NULL CHECK (item_type = 'session'), + item_uuid TEXT NOT NULL, + starred_at TEXT NOT NULL DEFAULT (datetime('now')), + PRIMARY KEY (item_type, item_uuid) + ); + INSERT INTO stars_new (item_type, item_uuid, starred_at) + SELECT item_type, item_uuid, starred_at FROM stars; + DROP TABLE stars; + ALTER TABLE stars_new RENAME TO stars; + CREATE INDEX IF NOT EXISTS idx_stars_starred_at ON stars(starred_at DESC); + `) + + db.exec(`DELETE FROM sources WHERE name = 'connector'`) + })() + db.pragma('user_version = 5') + } + rebuildFtsTableIfEmpty(db, 'messages', 'messages_fts_trigram') - rebuildFtsTableIfEmpty(db, 'captures', 'captures_fts_trigram') rebuildFtsTableIfEmpty(db, 'session_search', 'session_search_fts') rebuildFtsTableIfEmpty(db, 'session_search', 'session_search_fts_trigram') - - // Prune stars on captures that no longer exist. Connector-sourced captures - // are bulk-replaced with fresh UUIDs on re-sync, so orphans here are - // permanent (no "transient absence" semantics like session files have). - // Cheap, idempotent, bounded by orphan count. - db.exec(` - DELETE FROM stars - WHERE item_type = 'capture' - AND NOT EXISTS (SELECT 1 FROM captures WHERE capture_uuid = stars.item_uuid) - `) } function rebuildFtsTableIfEmpty( db: Database.Database, - contentTable: 'messages' | 'captures' | 'session_search', + contentTable: 'messages' | 'session_search', ftsTable: | 'messages_fts_trigram' - | 'captures_fts_trigram' | 'session_search_fts' | 'session_search_fts_trigram', ): void { diff --git a/packages/core/src/db/migration-v5.test.ts b/packages/core/src/db/migration-v5.test.ts new file mode 100644 index 0000000..e599a08 --- /dev/null +++ b/packages/core/src/db/migration-v5.test.ts @@ -0,0 +1,193 @@ +import { afterEach, describe, expect, it, vi } from 'vitest' +import { mkdtempSync, rmSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' +import Database from 'better-sqlite3' + +const tempDirs: string[] = [] + +afterEach(() => { + vi.unstubAllEnvs() + vi.resetModules() + while (tempDirs.length > 0) { + const dir = tempDirs.pop() + if (dir) rmSync(dir, { recursive: true, force: true }) + } +}) + +function makeTempDir(prefix: string): string { + const dir = mkdtempSync(join(tmpdir(), prefix)) + tempDirs.push(dir) + return dir +} + +/** + * Build a DB at user_version=4 with the historical schema (captures, + * connector_sync_state, capture_connectors, wide stars CHECK) and pre-existing + * capture/session data, then load it through the post-v5 getDB() and verify + * the migration drops all connector tables, narrows the stars CHECK, and + * preserves session stars. + */ +describe('migration v5 (connector subsystem removal)', () => { + it('drops connector tables, narrows stars CHECK, deletes capture stars, preserves session stars', async () => { + const spoolDir = makeTempDir('spool-v5-mig-') + const dbPath = join(spoolDir, 'spool.db') + + // ── Seed a v4 DB by hand ────────────────────────────────────────────── + const seed = new Database(dbPath) + seed.pragma('journal_mode = WAL') + seed.pragma('foreign_keys = ON') + + seed.exec(` + CREATE TABLE sources ( + id INTEGER PRIMARY KEY, name TEXT NOT NULL UNIQUE, + base_path TEXT NOT NULL, created_at TEXT NOT NULL DEFAULT (datetime('now')) + ); + INSERT INTO sources (name, base_path) VALUES + ('claude','~/.claude/projects'),('codex','~/.codex/sessions'), + ('gemini','~/.gemini/tmp'),('connector',''); + + CREATE TABLE projects ( + id INTEGER PRIMARY KEY, source_id INTEGER NOT NULL REFERENCES sources(id), + slug TEXT NOT NULL, display_path TEXT NOT NULL, display_name TEXT NOT NULL, + last_synced TEXT, UNIQUE (source_id, slug) + ); + CREATE TABLE sessions ( + id INTEGER PRIMARY KEY, project_id INTEGER NOT NULL REFERENCES projects(id), + source_id INTEGER NOT NULL REFERENCES sources(id), + session_uuid TEXT NOT NULL UNIQUE, file_path TEXT NOT NULL UNIQUE, + title TEXT, started_at TEXT NOT NULL, ended_at TEXT NOT NULL, + message_count INTEGER NOT NULL DEFAULT 0, has_tool_use INTEGER NOT NULL DEFAULT 0, + cwd TEXT, model TEXT, raw_file_mtime TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')) + ); + CREATE TABLE messages ( + id INTEGER PRIMARY KEY, session_id INTEGER NOT NULL REFERENCES sessions(id) ON DELETE CASCADE, + source_id INTEGER NOT NULL REFERENCES sources(id), + msg_uuid TEXT, parent_uuid TEXT, role TEXT NOT NULL, + content_text TEXT NOT NULL DEFAULT '', timestamp TEXT NOT NULL, + is_sidechain INTEGER NOT NULL DEFAULT 0, tool_names TEXT NOT NULL DEFAULT '[]', + seq INTEGER NOT NULL + ); + CREATE VIRTUAL TABLE messages_fts USING fts5(content_text, content='messages', content_rowid='id'); + CREATE VIRTUAL TABLE messages_fts_trigram USING fts5(content_text, content='messages', content_rowid='id', tokenize='trigram'); + CREATE TABLE sync_log ( + id INTEGER PRIMARY KEY, source_id INTEGER NOT NULL REFERENCES sources(id), + file_path TEXT NOT NULL, status TEXT NOT NULL, message TEXT, + synced_at TEXT NOT NULL DEFAULT (datetime('now')) + ); + CREATE TABLE session_search ( + session_id INTEGER PRIMARY KEY REFERENCES sessions(id) ON DELETE CASCADE, + title TEXT NOT NULL DEFAULT '', user_text TEXT NOT NULL DEFAULT '', + assistant_text TEXT NOT NULL DEFAULT '', + updated_at TEXT NOT NULL DEFAULT (datetime('now')) + ); + CREATE VIRTUAL TABLE session_search_fts USING fts5(title, user_text, assistant_text, content='session_search', content_rowid='session_id'); + CREATE VIRTUAL TABLE session_search_fts_trigram USING fts5(title, user_text, assistant_text, content='session_search', content_rowid='session_id', tokenize='trigram'); + + -- The historical connector tables that v5 must drop: + CREATE TABLE captures ( + id INTEGER PRIMARY KEY, source_id INTEGER NOT NULL REFERENCES sources(id), + capture_uuid TEXT NOT NULL UNIQUE, url TEXT NOT NULL, + title TEXT NOT NULL DEFAULT '', content_text TEXT NOT NULL DEFAULT '', + author TEXT, platform TEXT NOT NULL, platform_id TEXT, + content_type TEXT NOT NULL DEFAULT 'page', thumbnail_url TEXT, + metadata TEXT NOT NULL DEFAULT '{}', captured_at TEXT NOT NULL, + indexed_at TEXT NOT NULL DEFAULT (datetime('now')), raw_json TEXT + ); + CREATE VIRTUAL TABLE captures_fts USING fts5(title, content_text, content='captures', content_rowid='id'); + CREATE VIRTUAL TABLE captures_fts_trigram USING fts5(title, content_text, content='captures', content_rowid='id', tokenize='trigram'); + CREATE TABLE capture_connectors ( + capture_id INTEGER NOT NULL REFERENCES captures(id) ON DELETE CASCADE, + connector_id TEXT NOT NULL, PRIMARY KEY (capture_id, connector_id) + ); + CREATE TABLE connector_sync_state ( + connector_id TEXT PRIMARY KEY, head_cursor TEXT, head_item_id TEXT, + tail_cursor TEXT, tail_complete INTEGER NOT NULL DEFAULT 0, + last_forward_sync_at TEXT, last_backfill_sync_at TEXT, + total_synced INTEGER NOT NULL DEFAULT 0, consecutive_errors INTEGER NOT NULL DEFAULT 0, + enabled INTEGER NOT NULL DEFAULT 1, config_json TEXT NOT NULL DEFAULT '{}', + last_error_at TEXT, last_error_code TEXT, last_error_message TEXT + ); + + -- v4 wide-CHECK stars: + CREATE TABLE stars ( + item_type TEXT NOT NULL CHECK (item_type IN ('session', 'capture')), + item_uuid TEXT NOT NULL, + starred_at TEXT NOT NULL DEFAULT (datetime('now')), + PRIMARY KEY (item_type, item_uuid) + ); + `) + + // Seed a session, a capture, and one star of each kind + seed.prepare("INSERT INTO projects (source_id, slug, display_path, display_name) VALUES (1, 'p', '/p', 'p')").run() + seed.prepare(` + INSERT INTO sessions (project_id, source_id, session_uuid, file_path, title, started_at, ended_at, message_count) + VALUES (1, 1, 'sess-uuid', '/fake/sess.jsonl', 'A session', '2026-01-01T00:00:00Z', '2026-01-01T00:01:00Z', 1) + `).run() + seed.prepare(` + INSERT INTO captures (source_id, capture_uuid, url, title, platform, captured_at) + VALUES (4, 'cap-uuid', 'https://x.com/1', 'A tweet', 'twitter', '2026-01-01T00:00:00Z') + `).run() + seed.prepare("INSERT INTO capture_connectors (capture_id, connector_id) VALUES (1, 'twitter-bookmarks')").run() + seed.prepare("INSERT INTO connector_sync_state (connector_id) VALUES ('twitter-bookmarks')").run() + + seed.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('session', 'sess-uuid')").run() + seed.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('capture', 'cap-uuid')").run() + + seed.pragma('user_version = 4') + seed.close() + + // ── Run the post-v5 getDB() against this seeded DB ──────────────────── + vi.stubEnv('SPOOL_DATA_DIR', spoolDir) + vi.resetModules() + const dbModule = await import('./db.js') + const db = dbModule.getDB() + + // user_version bumped to 5 + expect((db.pragma('user_version') as Array<{ user_version: number }>)[0]?.user_version).toBe(5) + + // Connector tables and FTS gone + const tablesAfter = db.prepare("SELECT name FROM sqlite_master WHERE type='table' OR type='virtual'").all() as Array<{ name: string }> + const tableNames = new Set(tablesAfter.map(r => r.name)) + expect(tableNames.has('captures')).toBe(false) + expect(tableNames.has('captures_fts')).toBe(false) + expect(tableNames.has('captures_fts_trigram')).toBe(false) + expect(tableNames.has('capture_connectors')).toBe(false) + expect(tableNames.has('connector_sync_state')).toBe(false) + + // 'connector' source row also dropped + const sources = db.prepare('SELECT name FROM sources').all() as Array<{ name: string }> + expect(sources.map(s => s.name).sort()).toEqual(['claude', 'codex', 'gemini']) + + // Session star preserved, capture star gone + const stars = db.prepare('SELECT item_type, item_uuid FROM stars').all() + expect(stars).toEqual([{ item_type: 'session', item_uuid: 'sess-uuid' }]) + + // Stars CHECK is now session-only — inserting 'capture' must throw + expect(() => db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('capture', 'x')").run()).toThrow() + + // Session itself still there + const sess = db.prepare("SELECT session_uuid FROM sessions WHERE session_uuid='sess-uuid'").get() as { session_uuid: string } + expect(sess.session_uuid).toBe('sess-uuid') + + db.close() + }) + + it('is a no-op on a fresh install (no connector tables to drop)', async () => { + const spoolDir = makeTempDir('spool-v5-fresh-') + vi.stubEnv('SPOOL_DATA_DIR', spoolDir) + vi.resetModules() + const dbModule = await import('./db.js') + const db = dbModule.getDB() + + expect((db.pragma('user_version') as Array<{ user_version: number }>)[0]?.user_version).toBe(5) + + // Stars exists with narrow CHECK + expect(() => db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('capture', 'x')").run()).toThrow() + db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('session', 'x')").run() + expect(db.prepare('SELECT COUNT(*) AS c FROM stars').get()).toEqual({ c: 1 }) + + db.close() + }) +}) diff --git a/packages/core/src/db/queries.ts b/packages/core/src/db/queries.ts index be5adc0..fbc76ca 100644 --- a/packages/core/src/db/queries.ts +++ b/packages/core/src/db/queries.ts @@ -1,7 +1,5 @@ -import { randomUUID } from 'node:crypto' import type Database from 'better-sqlite3' -import type { Session, Message, FragmentResult, StatusInfo, CaptureResult, SearchResult, Source, SearchMatchType, SessionSource, StarKind, StarredItem, Capture } from '../types.js' -import type { CapturedItem } from '../connectors/types.js' +import type { Session, Message, FragmentResult, StatusInfo, SearchMatchType, SessionSource, StarKind, StarredItem } from '../types.js' import { DB_PATH, getDBSize } from './db.js' import { buildSearchPlan, canUseSessionSearchFts, getNaturalSearchPhrase, getNaturalSearchTerms, selectFtsTableKind, shouldUseSessionFallback } from './search-query.js' @@ -874,86 +872,46 @@ export function isStarred(db: Database.Database, kind: StarKind, uuid: string): export function getStarredUuidsByType( db: Database.Database, -): { session: string[]; capture: string[] } { - // Orphan-filter to stay consistent with listStarredItems — otherwise the - // badge count shows stars the list can't render (e.g. capture deleted by - // an ephemeral connector re-sync). +): { session: string[] } { const rows = db.prepare(` - SELECT item_type AS kind, item_uuid AS uuid + SELECT item_uuid AS uuid FROM stars - WHERE (item_type = 'session' AND EXISTS (SELECT 1 FROM sessions WHERE session_uuid = stars.item_uuid)) - OR (item_type = 'capture' AND EXISTS (SELECT 1 FROM captures WHERE capture_uuid = stars.item_uuid)) - `).all() as Array<{ kind: StarKind; uuid: string }> - const session: string[] = [] - const capture: string[] = [] - for (const r of rows) { - if (r.kind === 'session') session.push(r.uuid) - else if (r.kind === 'capture') capture.push(r.uuid) - } - return { session, capture } + WHERE item_type = 'session' + AND EXISTS (SELECT 1 FROM sessions WHERE session_uuid = stars.item_uuid) + `).all() as Array<{ uuid: string }> + return { session: rows.map(r => r.uuid) } } export function listStarredItems(db: Database.Database, limit = 200): StarredItem[] { // Orphan-filter at the SQL level so LIMIT counts only live rows; otherwise // a user with 200+ orphaned stars could see an empty page. const rows = db.prepare(` - SELECT item_type AS kind, item_uuid AS uuid, starred_at AS starredAt + SELECT item_uuid AS uuid, starred_at AS starredAt FROM stars - WHERE (item_type = 'session' AND EXISTS (SELECT 1 FROM sessions WHERE session_uuid = stars.item_uuid)) - OR (item_type = 'capture' AND EXISTS (SELECT 1 FROM captures WHERE capture_uuid = stars.item_uuid)) + WHERE item_type = 'session' + AND EXISTS (SELECT 1 FROM sessions WHERE session_uuid = stars.item_uuid) ORDER BY starred_at DESC LIMIT ? - `).all(limit) as Array<{ kind: StarKind; uuid: string; starredAt: string }> + `).all(limit) as Array<{ uuid: string; starredAt: string }> if (rows.length === 0) return [] - const sessionUuids = rows.filter(r => r.kind === 'session').map(r => r.uuid) - const captureUuids = rows.filter(r => r.kind === 'capture').map(r => r.uuid) - + const sessionUuids = rows.map(r => r.uuid) const sessionMap = new Map() - if (sessionUuids.length > 0) { - const placeholders = sessionUuids.map(() => '?').join(', ') - const sessRows = db.prepare(` - ${SESSION_SELECT} - WHERE s.session_uuid IN (${placeholders}) - `).all(...sessionUuids) as Array> - for (const row of sessRows) { - const session = rowToSession(row) - sessionMap.set(session.sessionUuid, session) - } - } - - const captureMap = new Map() - if (captureUuids.length > 0) { - const placeholders = captureUuids.map(() => '?').join(', ') - const capRows = db.prepare(` - SELECT - id AS captureId, - capture_uuid AS captureUuid, - url, - title, - author, - platform, - content_type AS contentType, - thumbnail_url AS thumbnailUrl, - captured_at AS capturedAt - FROM captures - WHERE capture_uuid IN (${placeholders}) - `).all(...captureUuids) as Array - for (const row of capRows) { - captureMap.set(row.captureUuid, row) - } + const placeholders = sessionUuids.map(() => '?').join(', ') + const sessRows = db.prepare(` + ${SESSION_SELECT} + WHERE s.session_uuid IN (${placeholders}) + `).all(...sessionUuids) as Array> + for (const row of sessRows) { + const session = rowToSession(row) + sessionMap.set(session.sessionUuid, session) } const items: StarredItem[] = [] for (const r of rows) { - if (r.kind === 'session') { - const session = sessionMap.get(r.uuid) - if (session) items.push({ kind: 'session', starredAt: r.starredAt, session }) - } else if (r.kind === 'capture') { - const capture = captureMap.get(r.uuid) - if (capture) items.push({ kind: 'capture', starredAt: r.starredAt, capture }) - } + const session = sessionMap.get(r.uuid) + if (session) items.push({ kind: 'session', starredAt: r.starredAt, session }) } return items } @@ -1010,205 +968,3 @@ function getProfileLabelFromFilePath(filePath: string): string | undefined { return match?.[1] } -// ── Captures ──────────────────────────────────────────────────────────────── - -export function insertCapture( - db: Database.Database, - sourceId: number, - item: CapturedItem, -): number { - const captureUuid = randomUUID() - - // Dedup by platform_id if provided - if (item.platformId) { - const existing = db - .prepare('SELECT id FROM captures WHERE platform = ? AND platform_id = ?') - .get(item.platform, item.platformId) as { id: number } | undefined - if (existing) { - db.prepare(` - UPDATE captures SET - title = ?, content_text = ?, author = ?, metadata = ?, - captured_at = ?, raw_json = ?, thumbnail_url = ? - WHERE id = ? - `).run( - item.title, item.contentText, item.author, - JSON.stringify(item.metadata), item.capturedAt, item.rawJson, - item.thumbnailUrl, - existing.id, - ) - return existing.id - } - } - - const result = db.prepare(` - INSERT INTO captures - (source_id, capture_uuid, url, title, content_text, - author, platform, platform_id, content_type, thumbnail_url, - metadata, captured_at, raw_json) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - `).run( - sourceId, captureUuid, item.url, item.title, item.contentText, - item.author, item.platform, item.platformId, item.contentType, item.thumbnailUrl, - JSON.stringify(item.metadata), item.capturedAt, item.rawJson, - ) - - return Number(result.lastInsertRowid) -} - -export function searchCaptures( - db: Database.Database, - query: string, - opts: { limit?: number; platform?: string; since?: string; onlyStarred?: boolean } = {}, -): CaptureResult[] { - const { limit = 10, platform, since, onlyStarred } = opts - - const ftsTable = selectFtsTableKind(query) === 'trigram' ? 'captures_fts_trigram' : 'captures_fts' - const rowLimit = Math.max(limit * 10, 50) - const groups = buildSearchPlan(query).map(step => { - const rows = searchCaptureRows(db, ftsTable, step.query, rowLimit, { - ...(platform ? { platform } : {}), - ...(since ? { since } : {}), - ...(onlyStarred ? { onlyStarred } : {}), - }) - return mapCaptureRows(rows, step.matchType) - }) - - return mergeCaptureGroups(groups, limit) -} - -function searchCaptureRows( - db: Database.Database, - ftsTable: 'captures_fts' | 'captures_fts_trigram', - ftsQuery: string, - limit: number, - opts: { platform?: string; since?: string; onlyStarred?: boolean } = {}, -): Array> { - const { platform, since, onlyStarred } = opts - const conditions: string[] = [`${ftsTable} MATCH ?`] - const params: (string | number)[] = [ftsQuery] - - if (platform) { - conditions.push('c.platform = ?') - params.push(platform) - } - if (since) { - conditions.push('c.captured_at >= ?') - params.push(since) - } - if (onlyStarred) { - conditions.push("EXISTS (SELECT 1 FROM stars WHERE stars.item_type = 'capture' AND stars.item_uuid = c.capture_uuid)") - } - params.push(limit) - - const sql = ` - SELECT - rank, - c.id AS captureId, - c.capture_uuid AS captureUuid, - c.url, - c.title, - c.author, - c.platform, - c.content_type AS contentType, - c.captured_at AS capturedAt, - snippet(${ftsTable}, -1, '', '', '…', 20) AS snippet - FROM ${ftsTable} - JOIN captures c ON c.id = ${ftsTable}.rowid - WHERE ${conditions.join(' AND ')} - ORDER BY rank - LIMIT ? - ` - - return db.prepare(sql).all(...params) as Array> -} - -function mapCaptureRows(rows: Array>, matchType: SearchMatchType): CaptureResult[] { - return rows.map((row, i) => ({ - rank: i + 1, - captureId: row['captureId'] as number, - captureUuid: row['captureUuid'] as string, - matchType, - url: row['url'] as string, - title: (row['title'] as string) || '(no title)', - snippet: row['snippet'] as string, - platform: row['platform'] as string, - contentType: row['contentType'] as string, - author: (row['author'] as string | null) ?? null, - capturedAt: row['capturedAt'] as string, - })) -} - -function mergeCaptureGroups(groups: CaptureResult[][], limit: number): CaptureResult[] { - const merged: CaptureResult[] = [] - const seen = new Set() - - for (const group of groups) { - for (const capture of group) { - if (seen.has(capture.captureUuid)) continue - if (merged.length >= limit) continue - - const next = { - ...capture, - rank: merged.length + 1, - } - merged.push(next) - seen.add(next.captureUuid) - } - } - - return merged -} - -export function searchAll( - db: Database.Database, - query: string, - opts: { limit?: number; source?: Source; since?: string; onlyStarred?: boolean } = {}, -): SearchResult[] { - const { limit = 20, source, since, onlyStarred } = opts - - const fragOpts: { limit: number; source?: SessionSource; since?: string; onlyStarred?: boolean } = { limit } - if (source === 'claude' || source === 'codex' || source === 'gemini') fragOpts.source = source - if (since) fragOpts.since = since - if (onlyStarred) fragOpts.onlyStarred = true - - const fragments = searchFragments(db, query, fragOpts) - .map(f => ({ ...f, kind: 'fragment' as const })) - - const capOpts: { limit: number; since?: string; onlyStarred?: boolean } = { limit } - if (since) capOpts.since = since - if (onlyStarred) capOpts.onlyStarred = true - - const captures = searchCaptures(db, query, capOpts) - .map(c => ({ ...c, kind: 'capture' as const })) - - return [...fragments, ...captures] - .sort(compareSearchResultRelevance) - .slice(0, limit) - .map((result, index) => ({ ...result, rank: index + 1 })) -} - -function compareSearchResultRelevance(a: SearchResult, b: SearchResult): number { - const typeDiff = getMatchTypePriority(a.matchType) - getMatchTypePriority(b.matchType) - if (typeDiff !== 0) return typeDiff - return a.rank - b.rank -} - -function getMatchTypePriority(matchType: SearchMatchType): number { - switch (matchType) { - case 'phrase': - return 0 - case 'all_terms': - return 1 - default: - return 2 - } -} - -export function getCaptureCount(db: Database.Database, platform?: string): number { - if (platform) { - const row = db.prepare('SELECT COUNT(*) AS cnt FROM captures WHERE platform = ?').get(platform) as { cnt: number } - return row.cnt - } - const row = db.prepare('SELECT COUNT(*) AS cnt FROM captures').get() as { cnt: number } - return row.cnt -} diff --git a/packages/core/src/db/stars.test.ts b/packages/core/src/db/stars.test.ts index 8a24a7f..b8836b5 100644 --- a/packages/core/src/db/stars.test.ts +++ b/packages/core/src/db/stars.test.ts @@ -16,8 +16,8 @@ afterEach(() => { } }) -describe('stars (unified)', () => { - it('star + isStarred + unstar roundtrip for sessions', async () => { +describe('stars', () => { + it('star + isStarred + unstar roundtrip', async () => { const mod = await load() const { db, seedSession } = mod const uuid = 'sess-1' @@ -31,30 +31,6 @@ describe('stars (unified)', () => { expect(mod.isStarred(db, 'session', uuid)).toBe(false) }) - it('star + isStarred + unstar roundtrip for captures', async () => { - const mod = await load() - const { db, seedCapture } = mod - const uuid = 'cap-1' - seedCapture(uuid, 'https://x.com/1', 'Tweet', 'twitter') - - expect(mod.isStarred(db, 'capture', uuid)).toBe(false) - mod.starItem(db, 'capture', uuid) - expect(mod.isStarred(db, 'capture', uuid)).toBe(true) - - mod.unstarItem(db, 'capture', uuid) - expect(mod.isStarred(db, 'capture', uuid)).toBe(false) - }) - - it('session and capture with same uuid string are independent', async () => { - const mod = await load() - const { db } = mod - mod.starItem(db, 'session', 'same-uuid') - expect(mod.isStarred(db, 'session', 'same-uuid')).toBe(true) - expect(mod.isStarred(db, 'capture', 'same-uuid')).toBe(false) - mod.starItem(db, 'capture', 'same-uuid') - expect(mod.isStarred(db, 'capture', 'same-uuid')).toBe(true) - }) - it('starItem is idempotent and preserves original starred_at', async () => { const mod = await load() const { db, seedSession } = mod @@ -66,99 +42,85 @@ describe('stars (unified)', () => { expect(second.starred_at).toBe(first.starred_at) }) - it('CHECK constraint rejects unknown item_type', async () => { + it('CHECK constraint rejects non-session item_type', async () => { const mod = await load() const { db } = mod + expect(() => + db.prepare('INSERT INTO stars (item_type, item_uuid) VALUES (?, ?)').run('capture', 'x'), + ).toThrow() expect(() => db.prepare('INSERT INTO stars (item_type, item_uuid) VALUES (?, ?)').run('bogus', 'x'), ).toThrow() }) - it('listStarredItems returns mixed sessions + captures by starred_at DESC', async () => { + it('listStarredItems returns sessions ordered by starred_at DESC', async () => { const mod = await load() - const { db, seedSession, seedCapture } = mod + const { db, seedSession } = mod seedSession('s1', 'P', 'Session one') - seedCapture('c1', 'https://u/1', 'Cap one', 'twitter') seedSession('s2', 'P', 'Session two') + seedSession('s3', 'P', 'Session three') - // Explicit timestamps so order is deterministic. db.prepare("INSERT INTO stars (item_type, item_uuid, starred_at) VALUES ('session', 's1', '2026-01-01 00:00:00')").run() - db.prepare("INSERT INTO stars (item_type, item_uuid, starred_at) VALUES ('capture', 'c1', '2026-02-01 00:00:00')").run() - db.prepare("INSERT INTO stars (item_type, item_uuid, starred_at) VALUES ('session', 's2', '2026-03-01 00:00:00')").run() + db.prepare("INSERT INTO stars (item_type, item_uuid, starred_at) VALUES ('session', 's2', '2026-02-01 00:00:00')").run() + db.prepare("INSERT INTO stars (item_type, item_uuid, starred_at) VALUES ('session', 's3', '2026-03-01 00:00:00')").run() const items = mod.listStarredItems(db) - expect(items.map(i => i.kind === 'session' ? i.session.sessionUuid : i.capture.captureUuid)) - .toEqual(['s2', 'c1', 's1']) - expect(items[1]!.kind).toBe('capture') + expect(items.map(i => i.session.sessionUuid)).toEqual(['s3', 's2', 's1']) }) - it('listStarredItems filters orphans (starred referent missing)', async () => { + it('listStarredItems filters orphans (session referent missing)', async () => { const mod = await load() const { db, seedSession } = mod seedSession('alive', 'P', 'A') mod.starItem(db, 'session', 'alive') mod.starItem(db, 'session', 'ghost-session') - mod.starItem(db, 'capture', 'ghost-capture') const items = mod.listStarredItems(db) expect(items).toHaveLength(1) - expect(items[0]!.kind).toBe('session') - if (items[0]!.kind === 'session') { - expect(items[0]!.session.sessionUuid).toBe('alive') - } + expect(items[0]!.session.sessionUuid).toBe('alive') }) - it('getStarredUuidsByType splits session + capture uuids', async () => { + it('getStarredUuidsByType returns session uuids', async () => { const mod = await load() - const { db, seedSession, seedCapture } = mod + const { db, seedSession } = mod seedSession('s1', 'P', 'T') seedSession('s2', 'P', 'T') - seedCapture('c1', 'https://u/1', 'T', 'twitter') mod.starItem(db, 'session', 's1') mod.starItem(db, 'session', 's2') - mod.starItem(db, 'capture', 'c1') - const { session, capture } = mod.getStarredUuidsByType(db) + const { session } = mod.getStarredUuidsByType(db) expect(new Set(session)).toEqual(new Set(['s1', 's2'])) - expect(new Set(capture)).toEqual(new Set(['c1'])) }) - it('getStarredUuidsByType filters orphans (matches listStarredItems semantics)', async () => { + it('getStarredUuidsByType filters orphans', async () => { const mod = await load() const { db, seedSession } = mod seedSession('alive', 'P', 'T') mod.starItem(db, 'session', 'alive') mod.starItem(db, 'session', 'ghost-session') - mod.starItem(db, 'capture', 'ghost-capture') - const { session, capture } = mod.getStarredUuidsByType(db) + const { session } = mod.getStarredUuidsByType(db) expect(session).toEqual(['alive']) - expect(capture).toEqual([]) }) it('unstarItem on non-starred uuid is a no-op', async () => { const mod = await load() const { db } = mod expect(() => mod.unstarItem(db, 'session', 'nobody')).not.toThrow() - expect(() => mod.unstarItem(db, 'capture', 'nobody')).not.toThrow() }) - it('startup sweep prunes orphan capture stars but preserves session orphans', async () => { + it('preserves session orphans across re-open (transient-absence design)', async () => { const spoolDir = makeTempDir('spool-stars-sweep-') vi.stubEnv('SPOOL_DATA_DIR', spoolDir) const first = await loadInto(spoolDir) - // Inject two orphan stars directly (no referent rows). - first.db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('capture', 'ghost-cap')").run() first.db.prepare("INSERT INTO stars (item_type, item_uuid) VALUES ('session', 'ghost-sess')").run() first.db.close() openDbs.length = 0 - // Reopen → migrations + startup sweep run vi.resetModules() const second = await loadInto(spoolDir) - const rows = second.db.prepare('SELECT item_type, item_uuid FROM stars ORDER BY item_type').all() - // capture orphan gone; session orphan preserved (transient-absence design) + const rows = second.db.prepare('SELECT item_type, item_uuid FROM stars').all() expect(rows).toEqual([{ item_type: 'session', item_uuid: 'ghost-sess' }]) }) @@ -222,15 +184,6 @@ async function loadInto(_spoolDir: string) { }) } - function seedCapture(captureUuid: string, url: string, title: string, platform: string): void { - const connectorSource = db.prepare("SELECT id FROM sources WHERE name='connector'").get() as { id: number } - db.prepare(` - INSERT INTO captures - (source_id, capture_uuid, url, title, content_text, author, platform, platform_id, content_type, thumbnail_url, metadata, captured_at, raw_json) - VALUES (?, ?, ?, ?, '', NULL, ?, NULL, 'page', NULL, '{}', '2026-01-01T00:00:00Z', NULL) - `).run(connectorSource.id, captureUuid, url, title, platform) - } - return { db, starItem: queryModule.starItem, @@ -239,6 +192,5 @@ async function loadInto(_spoolDir: string) { listStarredItems: queryModule.listStarredItems, getStarredUuidsByType: queryModule.getStarredUuidsByType, seedSession, - seedCapture, } } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 256add2..8594ac4 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -6,63 +6,4 @@ export * from './parsers/codex.js' export * from './parsers/gemini.js' export * from './sync/syncer.js' export * from './sync/watcher.js' -export { searchFragments, searchCaptures, searchAll } from './db/queries.js' export { resolveSystemBinary, cachedResolve, clearResolveCache } from './util/resolve-bin.js' - -// ── Connector framework ───────────────────────────────────────────────────── -export { ConnectorRegistry } from './connectors/registry.js' -export { SyncEngine, loadSyncState, saveSyncState, deleteConnectorItems } from './connectors/sync-engine.js' -export { SyncScheduler } from './connectors/sync-scheduler.js' -export type { SchedulerEvent, SchedulerEventHandler } from './connectors/sync-scheduler.js' -export { - SyncError, - SyncErrorCode, - SYNC_ERROR_HINTS, - DEFAULT_SCHEDULE, -} from './connectors/types.js' -export type { - Connector, - AuthStatus, - FetchContext, - PageResult, - SyncState, - SyncOptions, - ConnectorSyncResult, - SyncProgress, - SyncJob, - ScheduleConfig, - ConnectorStatus, - SchedulerStatus, -} from './connectors/types.js' - -export { downloadAndInstall, uninstallConnector, resolveNpmPackage, registryUrl, checkForUpdates } from './connectors/npm-install.js' -export type { UpdateInfo } from './connectors/npm-install.js' -export { fetchRegistry } from './connectors/registry-fetch.js' -export type { RegistryConnector } from './connectors/registry-fetch.js' - -// ── Plugin loader ────────────────────────────────────────────────────────── -export { loadConnectors } from './connectors/loader.js' -export type { LoadDeps, LoadReport, LoadResult, CapabilityImpls } from './connectors/loader.js' -export { TrustStore } from './connectors/trust-store.js' -export { - makeFetchCapability, - makeChromeCookiesCapability, - makeLogCapabilityFor, - makeSqliteCapability, - makeExecCapability, -} from './connectors/capabilities/index.js' -export { PrerequisiteChecker } from './connectors/prerequisites.js' -export type { ConnectorPackage } from './connectors/types.js' - -// ── SDK re-exports (so app doesn't need a direct connector-sdk dep) ───────── -export type { - Prerequisite, - PrerequisiteKind, - Detect, - Install, - ManualInstall, - SetupStep, - SetupStatus, - PrerequisitesCapability, -} from '@spool-lab/connector-sdk' -export { checkAuthViaPrerequisites } from '@spool-lab/connector-sdk' diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 6b66a75..a63b342 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -95,45 +95,12 @@ export interface SyncResult { errors: number } -// ── Capture Types ──────────────────────────────────────────────────────────── +// ── Search ────────────────────────────────────────────────────────────────── -export interface CaptureResult { - rank: number - captureId: number - captureUuid: string - matchType: SearchMatchType - url: string - title: string - snippet: string - platform: string - contentType: string - author: string | null - capturedAt: string -} - -export type { CapturedItem } from './connectors/types.js' - -export type SearchResult = - | (FragmentResult & { kind: 'fragment' }) - | (CaptureResult & { kind: 'capture' }) +export type SearchResult = FragmentResult & { kind: 'fragment' } // ── Stars ────────────────────────────────────────────────────────────────── -export type StarKind = 'session' | 'capture' - -/** Display-shape of a capture row outside of search results. */ -export interface Capture { - captureId: number - captureUuid: string - url: string - title: string - author: string | null - platform: string - contentType: string - thumbnailUrl: string | null - capturedAt: string -} +export type StarKind = 'session' -export type StarredItem = - | { kind: 'session'; starredAt: string; session: Session } - | { kind: 'capture'; starredAt: string; capture: Capture } +export type StarredItem = { kind: 'session'; starredAt: string; session: Session } From 3b63ced9298333842c0db852676db9a945f83e00 Mon Sep 17 00:00:00 2001 From: Chen <99816898+donteatfriedrice@users.noreply.github.com> Date: Tue, 28 Apr 2026 17:31:26 +0800 Subject: [PATCH 2/2] chore(m5): sync pnpm-lock.yaml after dropping core deps The previous commit removed @spool-lab/connector-sdk, effect, semver, tar (and @effect/vitest, @types/semver dev deps) from packages/core/package.json without re-running pnpm install. CI's --frozen-lockfile install rejected the drift. Co-Authored-By: Claude Opus 4.7 (1M context) --- pnpm-lock.yaml | 112 ++++++------------------------------------------- 1 file changed, 13 insertions(+), 99 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d4b9e2d..2a3ab55 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -68,6 +68,13 @@ importers: zustand: specifier: ^5.0.3 version: 5.0.12(@types/react@19.2.14)(react@19.2.5) + optionalDependencies: + acp-extension-codex-darwin-arm64: + specifier: ^0.10.0 + version: 0.10.0 + acp-extension-codex-linux-x64: + specifier: ^0.10.0 + version: 0.10.0 devDependencies: '@electron/rebuild': specifier: ^3.7.1 @@ -101,7 +108,7 @@ importers: version: 34.5.8 electron-builder: specifier: ^26.0.12 - version: 26.8.1(electron-builder-squirrel-windows@26.8.1) + version: 26.8.1(electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1)) electron-vite: specifier: ^3.1.0 version: 3.1.0(vite@6.4.1(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0)) @@ -114,13 +121,6 @@ importers: vitest: specifier: ^4.1.4 version: 4.1.4(@opentelemetry/api@1.9.1)(@types/node@22.19.17)(vite@6.4.1(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0)) - optionalDependencies: - acp-extension-codex-darwin-arm64: - specifier: ^0.10.0 - version: 0.10.0 - acp-extension-codex-linux-x64: - specifier: ^0.10.0 - version: 0.10.0 packages/cli: dependencies: @@ -251,34 +251,16 @@ importers: packages/core: dependencies: - '@spool-lab/connector-sdk': - specifier: workspace:^ - version: link:../connector-sdk better-sqlite3: specifier: ^11.10.0 version: 11.10.0 - effect: - specifier: ^3.21.0 - version: 3.21.0 - semver: - specifier: ^7.7.4 - version: 7.7.4 - tar: - specifier: ^7.5.13 - version: 7.5.13 devDependencies: - '@effect/vitest': - specifier: ^0.29.0 - version: 0.29.0(effect@3.21.0)(vitest@3.2.4(@types/debug@4.1.13)(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0)) '@types/better-sqlite3': specifier: ^7.6.13 version: 7.6.13 '@types/node': specifier: ^22.19.17 version: 22.19.17 - '@types/semver': - specifier: ^7.7.1 - version: 7.7.1 vitest: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.13)(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0) @@ -397,12 +379,6 @@ packages: resolution: {integrity: sha512-0cp4PsWQ/9avqTVMCtZ+GirikIA36ikvjtHweU4/j8yLtgObI0+JUPhYFScgwlteveGB1rt3Cm8UhN04XayDig==} engines: {node: '>= 8.9.0'} - '@effect/vitest@0.29.0': - resolution: {integrity: sha512-DvWr1aeEcaZ8mtu8hNVb4e3rEYvGEwQSr7wsNrW53t6nKYjkmjRICcvVEsXUhjoCblRHSxRsRV0TOt0+UmcvaQ==} - peerDependencies: - effect: ^3.21.0 - vitest: ^3.2.0 - '@electron/asar@3.4.1': resolution: {integrity: sha512-i4/rNPRS84t0vSRa2HorerGRXWyF4vThfHesw0dmcWHp+cspK743UanA0suA5Q5y8kzY2y6YKrvbIUn69BCAiA==} engines: {node: '>=10.12.0'} @@ -801,66 +777,56 @@ packages: resolution: {integrity: sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==} cpu: [arm64] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-arm@1.2.4': resolution: {integrity: sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==} cpu: [arm] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-x64@1.2.4': resolution: {integrity: sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==} cpu: [x64] os: [linux] - libc: [glibc] '@img/sharp-libvips-linuxmusl-arm64@1.2.4': resolution: {integrity: sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==} cpu: [arm64] os: [linux] - libc: [musl] '@img/sharp-libvips-linuxmusl-x64@1.2.4': resolution: {integrity: sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==} cpu: [x64] os: [linux] - libc: [musl] '@img/sharp-linux-arm64@0.34.5': resolution: {integrity: sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] - libc: [glibc] '@img/sharp-linux-arm@0.34.5': resolution: {integrity: sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm] os: [linux] - libc: [glibc] '@img/sharp-linux-x64@0.34.5': resolution: {integrity: sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] - libc: [glibc] '@img/sharp-linuxmusl-arm64@0.34.5': resolution: {integrity: sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] - libc: [musl] '@img/sharp-linuxmusl-x64@0.34.5': resolution: {integrity: sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] - libc: [musl] '@img/sharp-win32-arm64@0.34.5': resolution: {integrity: sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==} @@ -973,79 +939,66 @@ packages: resolution: {integrity: sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g==} cpu: [arm] os: [linux] - libc: [glibc] '@rollup/rollup-linux-arm-musleabihf@4.60.0': resolution: {integrity: sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ==} cpu: [arm] os: [linux] - libc: [musl] '@rollup/rollup-linux-arm64-gnu@4.60.0': resolution: {integrity: sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A==} cpu: [arm64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-arm64-musl@4.60.0': resolution: {integrity: sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ==} cpu: [arm64] os: [linux] - libc: [musl] '@rollup/rollup-linux-loong64-gnu@4.60.0': resolution: {integrity: sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw==} cpu: [loong64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-loong64-musl@4.60.0': resolution: {integrity: sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog==} cpu: [loong64] os: [linux] - libc: [musl] '@rollup/rollup-linux-ppc64-gnu@4.60.0': resolution: {integrity: sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ==} cpu: [ppc64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-ppc64-musl@4.60.0': resolution: {integrity: sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg==} cpu: [ppc64] os: [linux] - libc: [musl] '@rollup/rollup-linux-riscv64-gnu@4.60.0': resolution: {integrity: sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA==} cpu: [riscv64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-riscv64-musl@4.60.0': resolution: {integrity: sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ==} cpu: [riscv64] os: [linux] - libc: [musl] '@rollup/rollup-linux-s390x-gnu@4.60.0': resolution: {integrity: sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ==} cpu: [s390x] os: [linux] - libc: [glibc] '@rollup/rollup-linux-x64-gnu@4.60.0': resolution: {integrity: sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg==} cpu: [x64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-x64-musl@4.60.0': resolution: {integrity: sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw==} cpu: [x64] os: [linux] - libc: [musl] '@rollup/rollup-openbsd-x64@4.60.0': resolution: {integrity: sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw==} @@ -1126,28 +1079,24 @@ packages: engines: {node: '>= 20'} cpu: [arm64] os: [linux] - libc: [glibc] '@tailwindcss/oxide-linux-arm64-musl@4.2.2': resolution: {integrity: sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==} engines: {node: '>= 20'} cpu: [arm64] os: [linux] - libc: [musl] '@tailwindcss/oxide-linux-x64-gnu@4.2.2': resolution: {integrity: sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==} engines: {node: '>= 20'} cpu: [x64] os: [linux] - libc: [glibc] '@tailwindcss/oxide-linux-x64-musl@4.2.2': resolution: {integrity: sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==} engines: {node: '>= 20'} cpu: [x64] os: [linux] - libc: [musl] '@tailwindcss/oxide-wasm32-wasi@4.2.2': resolution: {integrity: sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==} @@ -1287,9 +1236,6 @@ packages: '@types/responselike@1.0.3': resolution: {integrity: sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==} - '@types/semver@7.7.1': - resolution: {integrity: sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==} - '@types/verror@1.10.11': resolution: {integrity: sha512-RlDm9K7+o5stv0Co8i8ZRGxDbrTxhJtgjqjFyVh/tXQyl/rYtTKlnTvZ88oSTeYREWurwx20Js4kTuKCsFkUtg==} @@ -1779,9 +1725,6 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - effect@3.21.0: - resolution: {integrity: sha512-PPN80qRokCd1f015IANNhrwOnLO7GrrMQfk4/lnZRE/8j7UPWrNNjPV0uBrZutI/nHzernbW+J0hdqQysHiSnQ==} - ejs@3.1.10: resolution: {integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==} engines: {node: '>=0.10.0'} @@ -1913,10 +1856,6 @@ packages: resolution: {integrity: sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA==} engines: {'0': node >=0.6.0} - fast-check@3.23.2: - resolution: {integrity: sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==} - engines: {node: '>=8.0.0'} - fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -2263,28 +2202,24 @@ packages: engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] - libc: [glibc] lightningcss-linux-arm64-musl@1.32.0: resolution: {integrity: sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] - libc: [musl] lightningcss-linux-x64-gnu@1.32.0: resolution: {integrity: sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] - libc: [glibc] lightningcss-linux-x64-musl@1.32.0: resolution: {integrity: sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] - libc: [musl] lightningcss-win32-arm64-msvc@1.32.0: resolution: {integrity: sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==} @@ -2652,9 +2587,6 @@ packages: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} - pure-rand@6.1.0: - resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==} - quick-lru@5.1.1: resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} engines: {node: '>=10'} @@ -3385,11 +3317,6 @@ snapshots: ajv: 6.14.0 ajv-keywords: 3.5.2(ajv@6.14.0) - '@effect/vitest@0.29.0(effect@3.21.0)(vitest@3.2.4(@types/debug@4.1.13)(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0))': - dependencies: - effect: 3.21.0 - vitest: 3.2.4(@types/debug@4.1.13)(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0) - '@electron/asar@3.4.1': dependencies: commander: 5.1.0 @@ -4093,8 +4020,6 @@ snapshots: dependencies: '@types/node': 22.19.17 - '@types/semver@7.7.1': {} - '@types/verror@1.10.11': optional: true @@ -4284,7 +4209,7 @@ snapshots: app-builder-bin@5.0.0-alpha.12: {} - app-builder-lib@26.8.1(dmg-builder@26.8.1)(electron-builder-squirrel-windows@26.8.1): + app-builder-lib@26.8.1(dmg-builder@26.8.1(electron-builder-squirrel-windows@26.8.1))(electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1)): dependencies: '@develar/schema-utils': 2.6.5 '@electron/asar': 3.4.1 @@ -4636,7 +4561,7 @@ snapshots: dmg-builder@26.8.1(electron-builder-squirrel-windows@26.8.1): dependencies: - app-builder-lib: 26.8.1(dmg-builder@26.8.1)(electron-builder-squirrel-windows@26.8.1) + app-builder-lib: 26.8.1(dmg-builder@26.8.1(electron-builder-squirrel-windows@26.8.1))(electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1)) builder-util: 26.8.1 fs-extra: 10.1.0 iconv-lite: 0.6.3 @@ -4673,27 +4598,22 @@ snapshots: eastasianwidth@0.2.0: {} - effect@3.21.0: - dependencies: - '@standard-schema/spec': 1.1.0 - fast-check: 3.23.2 - ejs@3.1.10: dependencies: jake: 10.9.4 electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1): dependencies: - app-builder-lib: 26.8.1(dmg-builder@26.8.1)(electron-builder-squirrel-windows@26.8.1) + app-builder-lib: 26.8.1(dmg-builder@26.8.1(electron-builder-squirrel-windows@26.8.1))(electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1)) builder-util: 26.8.1 electron-winstaller: 5.4.0 transitivePeerDependencies: - dmg-builder - supports-color - electron-builder@26.8.1(electron-builder-squirrel-windows@26.8.1): + electron-builder@26.8.1(electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1)): dependencies: - app-builder-lib: 26.8.1(dmg-builder@26.8.1)(electron-builder-squirrel-windows@26.8.1) + app-builder-lib: 26.8.1(dmg-builder@26.8.1(electron-builder-squirrel-windows@26.8.1))(electron-builder-squirrel-windows@26.8.1(dmg-builder@26.8.1)) builder-util: 26.8.1 builder-util-runtime: 9.5.1 chalk: 4.1.2 @@ -4898,10 +4818,6 @@ snapshots: extsprintf@1.4.1: optional: true - fast-check@3.23.2: - dependencies: - pure-rand: 6.1.0 - fast-deep-equal@3.1.3: {} fast-json-stable-stringify@2.1.0: {} @@ -5634,8 +5550,6 @@ snapshots: punycode@2.3.1: {} - pure-rand@6.1.0: {} - quick-lru@5.1.1: {} rc@1.2.8: