diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ba48e743e..cac0afec5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -242,6 +242,8 @@ jobs: - name: Prime tmux server (ensures socket dir exists) run: tmux new-session -d -s init && tmux kill-session -t init - run: npm ci + - name: Install web deps (active timeline refresh e2e wrapper invokes web vitest) + run: ./scripts/ci-npm-ci.sh web - name: Run pipe-pane e2e tests run: npx vitest run test/e2e/pipe-pane-stream.test.ts - name: Run other e2e tests diff --git a/server/src/db/migrations/043_session_text_tail_cache.sql b/server/src/db/migrations/043_session_text_tail_cache.sql new file mode 100644 index 000000000..13fceb7ac --- /dev/null +++ b/server/src/db/migrations/043_session_text_tail_cache.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS session_text_tail_cache ( + server_id TEXT NOT NULL, + session_name TEXT NOT NULL, + events JSONB NOT NULL DEFAULT '[]'::jsonb, + latest_ts BIGINT, + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (server_id, session_name) +); + +CREATE INDEX IF NOT EXISTS idx_session_text_tail_cache_updated_at + ON session_text_tail_cache (updated_at DESC); diff --git a/server/src/db/queries.ts b/server/src/db/queries.ts index 483b25d19..986c8e3a1 100644 --- a/server/src/db/queries.ts +++ b/server/src/db/queries.ts @@ -96,6 +96,143 @@ export interface QuickData { phrases: string[]; } +export const SESSION_TEXT_TAIL_CACHE_LIMIT = 50; + +export interface SessionTextTailCacheItem { + eventId: string; + ts: number; + type: 'user.message' | 'assistant.text'; + text: string; + source?: string; + confidence?: string; +} + +interface DbSessionTextTailCacheRow { + server_id: string; + session_name: string; + events: SessionTextTailCacheItem[] | string | null; + latest_ts: number | null; + updated_at: Date; +} + +interface ClassifiedSessionTextTailEvent { + sessionName: string; + item: SessionTextTailCacheItem; +} + +function normalizeSessionTextTailText(text: unknown): string | null { + if (typeof text !== 'string') return null; + const trimmed = text.trim(); + return trimmed || null; +} + +function isSessionTextTailType(type: unknown): type is SessionTextTailCacheItem['type'] { + return type === 'user.message' || type === 'assistant.text'; +} + +function parseSessionTextTailCacheEvents( + raw: SessionTextTailCacheItem[] | string | null | undefined, +): SessionTextTailCacheItem[] { + let parsed: unknown = raw; + if (typeof parsed === 'string') { + try { + parsed = JSON.parse(parsed); + } catch { + return []; + } + } + if (!Array.isArray(parsed)) return []; + const items: SessionTextTailCacheItem[] = []; + for (const entry of parsed) { + if (!entry || typeof entry !== 'object') return []; + const row = entry as Record; + if ( + typeof row.eventId !== 'string' + || typeof row.ts !== 'number' + || !isSessionTextTailType(row.type) + || typeof row.text !== 'string' + ) { + return []; + } + const text = normalizeSessionTextTailText(row.text); + if (!text) return []; + const item: SessionTextTailCacheItem = { + eventId: row.eventId, + ts: row.ts, + type: row.type, + text, + }; + if (typeof row.source === 'string' && row.source.trim()) item.source = row.source.trim(); + if (typeof row.confidence === 'string' && row.confidence.trim()) item.confidence = row.confidence.trim(); + items.push(item); + } + return items; +} + +function mergeSessionTextTailCacheEvents( + existing: SessionTextTailCacheItem[], + incoming: SessionTextTailCacheItem, +): SessionTextTailCacheItem[] { + const deduped = new Map(); + for (const item of existing) deduped.set(item.eventId, item); + deduped.set(incoming.eventId, incoming); + const merged = [...deduped.values()].sort((a, b) => { + if (a.ts !== b.ts) return a.ts - b.ts; + return a.eventId.localeCompare(b.eventId); + }); + return merged.length > SESSION_TEXT_TAIL_CACHE_LIMIT + ? merged.slice(merged.length - SESSION_TEXT_TAIL_CACHE_LIMIT) + : merged; +} + +export function mergeSessionTextTailCacheItems( + existing: SessionTextTailCacheItem[], + incoming: SessionTextTailCacheItem[], +): SessionTextTailCacheItem[] { + const deduped = new Map(); + for (const item of existing) deduped.set(item.eventId, item); + for (const item of incoming) deduped.set(item.eventId, item); + const merged = [...deduped.values()].sort((a, b) => { + if (a.ts !== b.ts) return a.ts - b.ts; + return a.eventId.localeCompare(b.eventId); + }); + return merged.length > SESSION_TEXT_TAIL_CACHE_LIMIT + ? merged.slice(merged.length - SESSION_TEXT_TAIL_CACHE_LIMIT) + : merged; +} + +export function classifySessionTextTailEvent(rawEvent: Record): ClassifiedSessionTextTailEvent | null { + const sessionName = typeof rawEvent.sessionId === 'string' ? rawEvent.sessionId : null; + const eventId = typeof rawEvent.eventId === 'string' ? rawEvent.eventId : null; + const ts = typeof rawEvent.ts === 'number' ? rawEvent.ts : null; + const type = isSessionTextTailType(rawEvent.type) ? rawEvent.type : null; + const payload = rawEvent.payload && typeof rawEvent.payload === 'object' + ? rawEvent.payload as Record + : null; + if (!sessionName || !eventId || ts === null || !type || !payload) return null; + if (type === 'assistant.text' && payload.streaming === true) return null; + const text = normalizeSessionTextTailText(payload.text); + if (!text) return null; + const item: SessionTextTailCacheItem = { eventId, ts, type, text }; + if (typeof rawEvent.source === 'string' && rawEvent.source.trim()) item.source = rawEvent.source.trim(); + if (typeof rawEvent.confidence === 'string' && rawEvent.confidence.trim()) item.confidence = rawEvent.confidence.trim(); + return { sessionName, item }; +} + +export function collectSessionTextTailCacheItems( + sessionName: string, + rawEvents: unknown[], +): SessionTextTailCacheItem[] { + const items: SessionTextTailCacheItem[] = []; + for (const raw of rawEvents) { + if (!raw || typeof raw !== 'object') continue; + const classified = classifySessionTextTailEvent(raw as Record); + if (!classified || classified.sessionName !== sessionName) continue; + items.push(classified.item); + } + return mergeSessionTextTailCacheItems([], items); +} + // ── Users ───────────────────────────────────────────────────────────────── export async function createUser(db: Database, id: string): Promise { @@ -542,6 +679,68 @@ export async function updateSession( ); } +export async function upsertSessionTextTailCacheEvent( + db: Database, + serverId: string, + rawEvent: Record, +): Promise { + const classified = classifySessionTextTailEvent(rawEvent); + if (!classified) return; + await db.transaction(async (tx) => { + const row = await tx.queryOne>( + `SELECT events + FROM session_text_tail_cache + WHERE server_id = $1 AND session_name = $2 + FOR UPDATE`, + [serverId, classified.sessionName], + ); + const existing = parseSessionTextTailCacheEvents(row?.events ?? null); + const events = mergeSessionTextTailCacheEvents(existing, classified.item); + const latestTs = events.length > 0 ? events[events.length - 1]!.ts : null; + await tx.execute( + `INSERT INTO session_text_tail_cache (server_id, session_name, events, latest_ts, updated_at) + VALUES ($1, $2, $3::jsonb, $4, NOW()) + ON CONFLICT (server_id, session_name) + DO UPDATE SET events = EXCLUDED.events, latest_ts = EXCLUDED.latest_ts, updated_at = NOW()`, + [serverId, classified.sessionName, JSON.stringify(events), latestTs], + ); + }); +} + +export async function getSessionTextTailCache( + db: Database, + serverId: string, + sessionName: string, +): Promise { + const row = await db.queryOne>( + `SELECT events + FROM session_text_tail_cache + WHERE server_id = $1 AND session_name = $2`, + [serverId, sessionName], + ); + const events = parseSessionTextTailCacheEvents(row?.events ?? null); + return events.length > SESSION_TEXT_TAIL_CACHE_LIMIT + ? events.slice(events.length - SESSION_TEXT_TAIL_CACHE_LIMIT) + : events; +} + +export async function replaceSessionTextTailCache( + db: Database, + serverId: string, + sessionName: string, + events: SessionTextTailCacheItem[], +): Promise { + const bounded = mergeSessionTextTailCacheItems([], events); + const latestTs = bounded.length > 0 ? bounded[bounded.length - 1]!.ts : null; + await db.execute( + `INSERT INTO session_text_tail_cache (server_id, session_name, events, latest_ts, updated_at) + VALUES ($1, $2, $3::jsonb, $4, NOW()) + ON CONFLICT (server_id, session_name) + DO UPDATE SET events = EXCLUDED.events, latest_ts = EXCLUDED.latest_ts, updated_at = NOW()`, + [serverId, sessionName, JSON.stringify(bounded), latestTs], + ); +} + // ── Quick data ──────────────────────────────────────────────────────────── const EMPTY_QUICK_DATA: QuickData = { history: [], sessionHistory: {}, commands: [], phrases: [] }; diff --git a/server/src/routes/watch.ts b/server/src/routes/watch.ts index 8b3c92072..c03b44aa5 100644 --- a/server/src/routes/watch.ts +++ b/server/src/routes/watch.ts @@ -1,6 +1,16 @@ import { Hono } from 'hono'; import type { Env } from '../env.js'; -import { getServersByUserId, getDbSessionsByServer, getSubSessionsByServer, getUserPref } from '../db/queries.js'; +import { + getServersByUserId, + getDbSessionsByServer, + getSubSessionsByServer, + getUserPref, + getSessionTextTailCache, + collectSessionTextTailCacheItems, + mergeSessionTextTailCacheItems, + replaceSessionTextTailCache, + SESSION_TEXT_TAIL_CACHE_LIMIT, +} from '../db/queries.js'; import { requireAuth, resolveServerRole } from '../security/authorization.js'; import { WsBridge } from '../ws/bridge.js'; import { IMCODES_POD_HEADER } from '../../../shared/http-header-names.js'; @@ -8,6 +18,9 @@ import { getPodIdentity } from '../util/pod-identity.js'; import logger from '../util/logger.js'; export const watchRoutes = new Hono<{ Bindings: Env; Variables: { userId: string; role: string } }>(); +const TEXT_TAIL_HISTORY_PAGE_LIMIT = 500; +const TEXT_TAIL_HISTORY_MAX_PAGES = 6; +const TEXT_TAIL_HISTORY_TIMEOUT_MS = 1500; type WatchSessionState = 'working' | 'idle' | 'error' | 'stopped'; @@ -52,6 +65,61 @@ function titleForSubSession(sub: { id: string; label: string | null; type: strin return sub.type || sub.id; } +async function backfillSessionTextTailFromDaemon( + serverId: string, + sessionName: string, + cached: Awaited>, +): Promise>> { + let events = cached; + let beforeTs: number | undefined; + const seenPages = new Set(); + + for (let page = 0; page < TEXT_TAIL_HISTORY_MAX_PAGES; page++) { + if (events.length >= SESSION_TEXT_TAIL_CACHE_LIMIT) break; + + const response = await WsBridge.get(serverId).requestTimelineHistory({ + sessionName, + limit: TEXT_TAIL_HISTORY_PAGE_LIMIT, + timeoutMs: TEXT_TAIL_HISTORY_TIMEOUT_MS, + ...(beforeTs !== undefined ? { beforeTs } : {}), + }); + const rawEvents = Array.isArray(response.events) + ? response.events.filter((event): event is Record => !!event && typeof event === 'object') + : []; + if (rawEvents.length === 0) break; + + const fingerprint = JSON.stringify([ + rawEvents.length, + rawEvents[0]?.eventId, + rawEvents[0]?.ts, + rawEvents.at(-1)?.eventId, + rawEvents.at(-1)?.ts, + ]); + if (seenPages.has(fingerprint)) break; + seenPages.add(fingerprint); + + const live = collectSessionTextTailCacheItems(sessionName, rawEvents); + if (live.length > 0) { + events = mergeSessionTextTailCacheItems(events, live); + } + + if (rawEvents.length < TEXT_TAIL_HISTORY_PAGE_LIMIT) break; + + let oldestTs: number | undefined; + for (const event of rawEvents) { + if (typeof event.ts !== 'number' || !Number.isFinite(event.ts)) continue; + oldestTs = oldestTs === undefined ? event.ts : Math.min(oldestTs, event.ts); + } + if (oldestTs === undefined) break; + + // Keep a 1ms overlap on the page boundary so same-ts events are not + // skipped when the next page is requested. + beforeTs = oldestTs + 1; + } + + return events; +} + function sanitizeWatchTimelineEvent(raw: unknown): { eventId: string; sessionId: string; @@ -356,3 +424,39 @@ watchRoutes.get('/server/:id/timeline/history/full', requireAuth(), async (c) => return c.json({ error: 'relay_failed' }, 502); } }); + +watchRoutes.get('/server/:id/timeline/text-tail', requireAuth(), async (c) => { + const userId = c.get('userId' as never) as string; + const serverId = c.req.param('id')!; + const role = await resolveServerRole(c.env.DB, serverId, userId); + if (role === 'none') return c.json({ error: 'forbidden' }, 403); + + const sessionName = c.req.query('sessionName')?.trim(); + if (!sessionName) return c.json({ error: 'session_name_required' }, 400); + + try { + const cached = await getSessionTextTailCache(c.env.DB, serverId, sessionName); + let events = cached; + try { + events = await backfillSessionTextTailFromDaemon(serverId, sessionName, cached); + if (JSON.stringify(events) !== JSON.stringify(cached)) { + await replaceSessionTextTailCache(c.env.DB, serverId, sessionName, events); + } + } catch (err) { + logger.info({ + serverId, + sessionName, + err: err instanceof Error ? err.message : String(err), + }, 'timeline.text-tail backfill skipped'); + } + c.header(IMCODES_POD_HEADER, getPodIdentity()); + return c.json({ sessionName, events }); + } catch (err) { + logger.warn({ + serverId, + sessionName, + err: err instanceof Error ? err.message : String(err), + }, 'timeline.text-tail failed'); + return c.json({ error: 'cache_read_failed' }, 500); + } +}); diff --git a/server/src/ws/bridge.ts b/server/src/ws/bridge.ts index 097ce4c91..4413f50c6 100644 --- a/server/src/ws/bridge.ts +++ b/server/src/ws/bridge.ts @@ -51,7 +51,7 @@ import { type PreviewWsOpenedMessage, } from '../../../shared/preview-types.js'; import { LocalWebPreviewRegistry } from '../preview/registry.js'; -import { updateServerHeartbeat, updateServerStatus, upsertDiscussion, insertDiscussionRound, createSubSession, updateSubSession, upsertOrchestrationRun, updateProviderStatus, clearProviderStatus, updateProviderRemoteSessions } from '../db/queries.js'; +import { updateServerHeartbeat, updateServerStatus, upsertDiscussion, insertDiscussionRound, createSubSession, getSubSessionById, updateSubSession, upsertOrchestrationRun, updateProviderStatus, clearProviderStatus, updateProviderRemoteSessions, upsertSessionTextTailCacheEvent } from '../db/queries.js'; import logger from '../util/logger.js'; import { pickReadableSessionDisplay } from '../../../shared/session-display.js'; import { isKnownTestSessionLike } from '../../../shared/test-session-guard.js'; @@ -968,12 +968,17 @@ export class WsBridge { // ── Timeline events: session-scoped ─────────────────────────────────────── if (type === 'timeline.event') { - const sessionId = (msg.event as Record | undefined)?.sessionId as string | undefined; - if (!sessionId) { + const rawEvent = msg.event as Record | undefined; + const sessionId = rawEvent?.sessionId as string | undefined; + if (!rawEvent || !sessionId) { logger.warn({ serverId: this.serverId }, 'timeline.event missing sessionId — discarded'); return; } - this.ingestRecentTextFromTimelineEvent(msg.event as Record); + this.ingestRecentTextFromTimelineEvent(rawEvent); + if (this.db) { + void upsertSessionTextTailCacheEvent(this.db, this.serverId, rawEvent) + .catch((err) => logger.warn({ err, serverId: this.serverId, sessionId }, 'Failed to update session_text_tail_cache')); + } this.sendToSessionSubscribers(sessionId, JSON.stringify(msg)); return; } @@ -1061,6 +1066,7 @@ export class WsBridge { // ── Sub-session sync: daemon creates sub-sessions → persist to DB ──────── if (type === 'subsession.sync' && this.db) { + const db = this.db; if (isKnownTestSessionLike({ name: typeof msg.id === 'string' ? `deck_sub_${msg.id}` : undefined, cwd: typeof msg.cwd === 'string' ? msg.cwd : undefined, @@ -1075,33 +1081,43 @@ export class WsBridge { const agentType = typeof msg.sessionType === 'string' && msg.sessionType ? msg.sessionType : undefined; this.activeSubSessions.set(subSessionName, { name: subSessionName, label, parentSession, agentType }); } - void createSubSession( - this.db, - msg.id as string, - this.serverId, - msg.sessionType as string, - (msg.shellBin as string) || null, - (msg.cwd as string) || null, - (msg.label as string) || null, - (msg.ccSessionId as string) || null, - (msg.geminiSessionId as string) || null, - (msg.parentSession as string) || null, - (msg.runtimeType as string) || null, - (msg.providerId as string) || null, - (msg.providerSessionId as string) || null, - (msg.description as string) || null, - (msg.ccPresetId as string) || null, - (msg.requestedModel as string) || null, - ((msg.activeModel as string) || (msg.modelDisplay as string)) || null, - (msg.effort as string) || null, - (msg.transportConfig as Record) || null, - ).then(() => { + void (async () => { + const requestedType = typeof msg.sessionType === 'string' && msg.sessionType.trim() + ? msg.sessionType.trim() + : null; + const persisted = requestedType ? null : await getSubSessionById(db, msg.id as string, this.serverId).catch(() => null); + const sessionType = requestedType ?? persisted?.type ?? null; + if (!sessionType) { + logger.warn({ id: msg.id }, 'Skipping sub-session DB sync without sessionType'); + return; + } + await createSubSession( + db, + msg.id as string, + this.serverId, + sessionType, + (msg.shellBin as string) || null, + (msg.cwd as string) || null, + (msg.label as string) || null, + (msg.ccSessionId as string) || null, + (msg.geminiSessionId as string) || null, + (msg.parentSession as string) || null, + (msg.runtimeType as string) || null, + (msg.providerId as string) || null, + (msg.providerSessionId as string) || null, + (msg.description as string) || null, + (msg.ccPresetId as string) || null, + (msg.requestedModel as string) || null, + ((msg.activeModel as string) || (msg.modelDisplay as string)) || null, + (msg.effort as string) || null, + (msg.transportConfig as Record) || null, + ); // Notify browsers so sub-session appears immediately without page refresh this.broadcastToBrowsers(JSON.stringify({ type: 'subsession.created', id: msg.id, sessionName: `deck_sub_${msg.id}`, - sessionType: msg.sessionType, + sessionType, cwd: msg.cwd || null, label: msg.label || null, parentSession: msg.parentSession || null, @@ -1123,7 +1139,7 @@ export class WsBridge { quotaMeta: msg.quotaMeta || null, state: (msg.state as string) || 'idle', })); - }).catch((e) => logger.error({ err: e, id: msg.id }, 'Failed to sync sub-session to DB')); + })().catch((e) => logger.error({ err: e, id: msg.id }, 'Failed to sync sub-session to DB')); return; } if (type === 'subsession.update_gemini_id' && this.db) { @@ -2533,9 +2549,6 @@ export class WsBridge { } private async dispatchEventPush(db: Database, env: Env, msg: Record): Promise { - // Always send APNs push — iOS handles foreground display via UNUserNotificationCenterDelegate. - // Badge count must increment regardless of app state. - // Dedup: same session idle/error can fire from both hook and timeline paths const sessionKey = `${msg.type}:${msg.session ?? msg.sessionId ?? ''}`; const now = Date.now(); @@ -2546,6 +2559,12 @@ export class WsBridge { const server = await db.queryOne<{ user_id: string; name: string }>('SELECT user_id, name FROM servers WHERE id = $1', [this.serverId]); if (!server) return; + for (const mobileWs of this.mobileSockets) { + if (mobileWs.readyState !== WebSocket.OPEN) continue; + if (this.browserUserIds.get(mobileWs) !== server.user_id) continue; + return; + } + const { dispatchPush } = await import('../routes/push.js').catch((err) => { logger.error({ err }, 'Failed to import push module — push notifications disabled'); return { dispatchPush: null }; diff --git a/server/test/bridge.test.ts b/server/test/bridge.test.ts index 6e202264c..11e0ef099 100644 --- a/server/test/bridge.test.ts +++ b/server/test/bridge.test.ts @@ -1,6 +1,7 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import { EventEmitter } from 'node:events'; import { WsBridge } from '../src/ws/bridge.js'; +import * as dbQueries from '../src/db/queries.js'; // ── Mock WebSocket ───────────────────────────────────────────────────────────── @@ -49,13 +50,15 @@ function packFrame(sessionName: string, payload: Buffer): Buffer { // ── Mock DB ──────────────────────────────────────────────────────────────────── function makeDb(tokenHash: string) { - return { + const db = { queryOne: async () => ({ token_hash: tokenHash }), query: async () => [], execute: async () => ({ changes: 1 }), exec: async () => {}, + transaction: async (fn: (tx: import('../src/db/client.js').Database) => Promise) => fn(db as unknown as import('../src/db/client.js').Database), close: () => {}, - } as unknown as import('../src/db/client.js').Database; + }; + return db as unknown as import('../src/db/client.js').Database; } // ── Mock crypto + push ───────────────────────────────────────────────────────── @@ -985,6 +988,28 @@ describe('WsBridge', () => { expect(browserA.sentStrings.length).toBeGreaterThan(0); expect(browserB.sentStrings.length).toBeGreaterThan(0); }); + + it('timeline.event still reaches subscribers when text-tail cache write fails', async () => { + const spy = vi.spyOn(dbQueries, 'upsertSessionTextTailCacheEvent').mockRejectedValueOnce(new Error('db down')); + const { daemonWs, browserA, browserB } = await setupTwoBrowsers(); + + daemonWs.emit('message', JSON.stringify({ + type: 'timeline.event', + event: { + sessionId: 'session-a', + eventId: 'tail-fail-1', + ts: 123, + type: 'assistant.text', + payload: { text: 'still delivered' }, + }, + })); + await flushAsync(); + + expect(browserA.sentStrings.some((msg) => msg.includes('tail-fail-1'))).toBe(true); + expect(browserB.sentStrings.length).toBe(0); + expect(spy).toHaveBeenCalled(); + spy.mockRestore(); + }); }); // ── P0: default-deny — missing session identifier → discard, NOT broadcast ─ @@ -1774,7 +1799,7 @@ describe('WsBridge', () => { expect(payload.body).toContain('ready for input'); }); - it('sends push even when mobile client is connected (badge must increment)', async () => { + it('suppresses push when a mobile client is connected', async () => { const { dispatchPush } = await import('../src/routes/push.js'); const { bridge, daemonWs } = await setupPushBridge(); @@ -1787,7 +1812,7 @@ describe('WsBridge', () => { })); await flushAsync(); - expect(dispatchPush).toHaveBeenCalled(); + expect(dispatchPush).not.toHaveBeenCalled(); }); it('sends push when only desktop browser is connected', async () => { @@ -2637,5 +2662,38 @@ describe('WsBridge', () => { { eventId: 'e3', type: 'user.message', text: 'second', ts: 3 }, ]); }); + + it('fails open when session_text_tail_cache update throws', async () => { + const bridge = WsBridge.get(serverId); + const daemonWs = new MockWs(); + const db = makeDb('valid-hash') as import('../src/db/client.js').Database & { transaction: ReturnType }; + db.transaction = vi.fn(async () => { throw new Error('write failed'); }) as never; + const browserWs = new MockWs(); + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + + bridge.handleDaemonConnection(daemonWs as never, db, {} as never); + daemonWs.emit('message', JSON.stringify({ type: 'auth', serverId, token: 't' })); + await flushAsync(); + + bridge.handleBrowserConnection(browserWs as never, 'user-1', db); + browserWs.emit('message', JSON.stringify({ type: 'terminal.subscribe', session: 'deck_proj_brain' })); + await flushAsync(); + browserWs.sent.length = 0; + + daemonWs.emit('message', JSON.stringify({ + type: 'timeline.event', + event: { + eventId: 'e1', + sessionId: 'deck_proj_brain', + ts: 1, + type: 'assistant.text', + payload: { text: 'still delivered' }, + }, + })); + await flushAsync(); + + expect(browserWs.sentStrings.some((msg) => msg.includes('"type":"timeline.event"'))).toBe(true); + expect(errorSpy).toHaveBeenCalled(); + }); }); }); diff --git a/server/test/db.integration.test.ts b/server/test/db.integration.test.ts index b67d555b1..7abb89170 100644 --- a/server/test/db.integration.test.ts +++ b/server/test/db.integration.test.ts @@ -52,6 +52,10 @@ import { getDiscussionsByServer, insertDiscussionRound, getDiscussionRounds, + classifySessionTextTailEvent, + getSessionTextTailCache, + upsertSessionTextTailCacheEvent, + SESSION_TEXT_TAIL_CACHE_LIMIT, upsertOrchestrationRun, getOrchestrationRunById, getActiveOrchestrationRuns, @@ -82,7 +86,7 @@ describe('runMigrations', () => { const tables = [ 'users', 'platform_identities', 'servers', 'channel_bindings', 'platform_bots', 'api_keys', 'refresh_tokens', 'idempotency_records', - 'auth_nonces', 'audit_log', 'pending_binds', 'sessions', 'cron_jobs', 'cron_executions', + 'auth_nonces', 'audit_log', 'pending_binds', 'sessions', 'session_text_tail_cache', 'cron_jobs', 'cron_executions', 'teams', 'team_members', 'push_subscriptions', ]; @@ -243,6 +247,16 @@ describe('runMigrations', () => { ); expect(idx?.indexname).toBe('idx_shared_context_projections_status'); }); + + it('session_text_tail_cache has updated_at index (migration 043)', async () => { + const idx = await db.queryOne<{ indexname: string }>( + `SELECT indexname FROM pg_indexes + WHERE tablename = 'session_text_tail_cache' + AND indexname = 'idx_session_text_tail_cache_updated_at'`, + [], + ); + expect(idx?.indexname).toBe('idx_session_text_tail_cache_updated_at'); + }); }); // ── 2. Database wrapper ───────────────────────────────────────────────────── @@ -274,6 +288,126 @@ describe('Database wrapper', () => { }); }); +describe('session_text_tail_cache', () => { + const serverId = `tail-srv-${Math.random().toString(36).slice(2)}`; + const userId = `tail-user-${Math.random().toString(36).slice(2)}`; + const sessionName = `deck_tail_${Math.random().toString(36).slice(2)}`; + + beforeAll(async () => { + await createUser(db, userId); + await createServer(db, serverId, userId, 'tail-server', 'hash-tail'); + }); + + it('classifies only completed non-empty text events', () => { + expect(classifySessionTextTailEvent({ + eventId: 'e-user', + sessionId: sessionName, + ts: 1, + type: 'user.message', + payload: { text: 'hello' }, + source: 'daemon', + confidence: 'high', + })).toEqual({ + sessionName, + item: { + eventId: 'e-user', + ts: 1, + type: 'user.message', + text: 'hello', + source: 'daemon', + confidence: 'high', + }, + }); + + expect(classifySessionTextTailEvent({ + eventId: 'e-stream', + sessionId: sessionName, + ts: 2, + type: 'assistant.text', + payload: { text: 'partial', streaming: true }, + })).toBeNull(); + + expect(classifySessionTextTailEvent({ + eventId: 'e-empty', + sessionId: sessionName, + ts: 3, + type: 'assistant.text', + payload: { text: ' ' }, + })).toBeNull(); + + expect(classifySessionTextTailEvent({ + eventId: 'e-tool', + sessionId: sessionName, + ts: 4, + type: 'tool.call', + payload: { text: 'nope' }, + })).toBeNull(); + }); + + it('overwrites by eventId and retains only the newest 50 cached entries', async () => { + await db.execute('DELETE FROM session_text_tail_cache WHERE server_id = $1 AND session_name = $2', [serverId, sessionName]); + + for (let i = 1; i <= SESSION_TEXT_TAIL_CACHE_LIMIT + 5; i++) { + await upsertSessionTextTailCacheEvent(db, serverId, { + eventId: `e-${i}`, + sessionId: sessionName, + ts: i, + type: i % 2 === 0 ? 'assistant.text' : 'user.message', + payload: { text: `message ${i}` }, + }); + } + + await upsertSessionTextTailCacheEvent(db, serverId, { + eventId: `e-${SESSION_TEXT_TAIL_CACHE_LIMIT + 5}`, + sessionId: sessionName, + ts: SESSION_TEXT_TAIL_CACHE_LIMIT + 5, + type: 'assistant.text', + payload: { text: 'updated latest message' }, + source: 'daemon', + confidence: 'high', + }); + + const events = await getSessionTextTailCache(db, serverId, sessionName); + expect(events).toHaveLength(SESSION_TEXT_TAIL_CACHE_LIMIT); + expect(events[0]?.eventId).toBe('e-6'); + expect(events.at(-1)).toEqual({ + eventId: `e-${SESSION_TEXT_TAIL_CACHE_LIMIT + 5}`, + ts: SESSION_TEXT_TAIL_CACHE_LIMIT + 5, + type: 'assistant.text', + text: 'updated latest message', + source: 'daemon', + confidence: 'high', + }); + }); + + it('treats malformed rows as empty and rebuilds from the current event', async () => { + const malformedSession = `${sessionName}-malformed`; + await db.execute('DELETE FROM session_text_tail_cache WHERE server_id = $1 AND session_name = $2', [serverId, malformedSession]); + await db.execute( + `INSERT INTO session_text_tail_cache (server_id, session_name, events, latest_ts, updated_at) + VALUES ($1, $2, $3::jsonb, $4, NOW())`, + [serverId, malformedSession, JSON.stringify({ bad: true }), 123], + ); + + await upsertSessionTextTailCacheEvent(db, serverId, { + eventId: 'e-rebuild', + sessionId: malformedSession, + ts: 999, + type: 'assistant.text', + payload: { text: 'rebuilt' }, + }); + + await expect(getSessionTextTailCache(db, serverId, malformedSession)).resolves.toEqual([ + { + eventId: 'e-rebuild', + ts: 999, + type: 'assistant.text', + text: 'rebuilt', + }, + ]); + }); +}); + // ── 3. ON CONFLICT ──────────────────────────────────────────────────────────── describe('ON CONFLICT', () => { @@ -593,6 +727,129 @@ describe('sessions', () => { }); }); +describe('session_text_tail_cache', () => { + let userId: string; + let serverId: string; + + beforeAll(async () => { + userId = 'tail-user-' + Math.random().toString(36).slice(2); + serverId = 'tail-srv-' + Math.random().toString(36).slice(2); + await createUser(db, userId); + await createServer(db, serverId, userId, 'tail-server', 'hash-tail'); + }); + + it('classifySessionTextTailEvent accepts non-empty user and completed assistant text only', () => { + expect(classifySessionTextTailEvent({ + sessionId: 'deck_proj_brain', + eventId: 'u1', + ts: 10, + type: 'user.message', + payload: { text: ' hello ' }, + source: 'daemon', + })).toEqual({ + sessionName: 'deck_proj_brain', + item: { eventId: 'u1', ts: 10, type: 'user.message', text: 'hello', source: 'daemon' }, + }); + expect(classifySessionTextTailEvent({ + sessionId: 'deck_proj_brain', + eventId: 'a1', + ts: 11, + type: 'assistant.text', + payload: { text: ' done ', streaming: false }, + confidence: 'high', + })).toEqual({ + sessionName: 'deck_proj_brain', + item: { eventId: 'a1', ts: 11, type: 'assistant.text', text: 'done', confidence: 'high' }, + }); + expect(classifySessionTextTailEvent({ + sessionId: 'deck_proj_brain', + eventId: 'a2', + ts: 12, + type: 'assistant.text', + payload: { text: 'stream', streaming: true }, + })).toBeNull(); + expect(classifySessionTextTailEvent({ + sessionId: 'deck_proj_brain', + eventId: 'x1', + ts: 13, + type: 'tool.call', + payload: { text: 'nope' }, + })).toBeNull(); + }); + + it('upsertSessionTextTailCacheEvent overwrites by eventId and returns ascending cached rows', async () => { + const sessionName = `deck_proj_tail_${Math.random().toString(36).slice(2)}`; + await upsertSessionTextTailCacheEvent(db, serverId, { + sessionId: sessionName, + eventId: 'dup-1', + ts: 100, + type: 'assistant.text', + payload: { text: 'first value' }, + }); + await upsertSessionTextTailCacheEvent(db, serverId, { + sessionId: sessionName, + eventId: 'dup-1', + ts: 110, + type: 'assistant.text', + payload: { text: 'final value' }, + confidence: 'high', + }); + await upsertSessionTextTailCacheEvent(db, serverId, { + sessionId: sessionName, + eventId: 'u-1', + ts: 90, + type: 'user.message', + payload: { text: 'older user' }, + }); + + const cached = await getSessionTextTailCache(db, serverId, sessionName); + expect(cached).toEqual([ + { eventId: 'u-1', ts: 90, type: 'user.message', text: 'older user' }, + { eventId: 'dup-1', ts: 110, type: 'assistant.text', text: 'final value', confidence: 'high' }, + ]); + }); + + it('treats malformed existing cache payloads as empty and rebuilds safely', async () => { + const sessionName = `deck_proj_malformed_${Math.random().toString(36).slice(2)}`; + await db.execute( + `INSERT INTO session_text_tail_cache (server_id, session_name, events, latest_ts, updated_at) + VALUES ($1, $2, $3::jsonb, $4, NOW())`, + [serverId, sessionName, JSON.stringify({ nope: true }), 1], + ); + + await upsertSessionTextTailCacheEvent(db, serverId, { + sessionId: sessionName, + eventId: 'rebuild-1', + ts: 200, + type: 'assistant.text', + payload: { text: 'rebuilt' }, + }); + + const cached = await getSessionTextTailCache(db, serverId, sessionName); + expect(cached).toEqual([ + { eventId: 'rebuild-1', ts: 200, type: 'assistant.text', text: 'rebuilt' }, + ]); + }); + + it('hard-retains only the newest 50 cached entries per session', async () => { + const sessionName = `deck_proj_limit_${Math.random().toString(36).slice(2)}`; + for (let i = 0; i < SESSION_TEXT_TAIL_CACHE_LIMIT + 5; i++) { + await upsertSessionTextTailCacheEvent(db, serverId, { + sessionId: sessionName, + eventId: `ev-${i}`, + ts: i, + type: i % 2 === 0 ? 'user.message' : 'assistant.text', + payload: { text: `msg-${i}` }, + }); + } + + const cached = await getSessionTextTailCache(db, serverId, sessionName); + expect(cached).toHaveLength(SESSION_TEXT_TAIL_CACHE_LIMIT); + expect(cached[0]?.eventId).toBe('ev-5'); + expect(cached.at(-1)?.eventId).toBe(`ev-${SESSION_TEXT_TAIL_CACHE_LIMIT + 4}`); + }); +}); + // ── 7. Quick data ──────────────────────────────────────────────────────────── describe('quick data', () => { diff --git a/server/test/push-notification.integration.test.ts b/server/test/push-notification.integration.test.ts index 3bfff1922..be05d7e66 100644 --- a/server/test/push-notification.integration.test.ts +++ b/server/test/push-notification.integration.test.ts @@ -152,7 +152,7 @@ describe('push notification content', () => { }); describe('push with mobile connected', () => { - it('sends push even when mobile client is connected (badge must increment)', async () => { + it('suppresses push when mobile client is connected', async () => { const { dispatchPush } = await import('../src/routes/push.js'); const { bridge, daemonWs } = await setupAuthenticatedDaemon(); @@ -165,7 +165,7 @@ describe('push with mobile connected', () => { })); await flushAsync(); - expect(dispatchPush).toHaveBeenCalled(); + expect(dispatchPush).not.toHaveBeenCalled(); }); it('sends push when only desktop browser is connected', async () => { @@ -185,16 +185,4 @@ describe('push with mobile connected', () => { expect(dispatchPush).toHaveBeenCalled(); }); - it('sends push even when mobile client is connected (badge must always increment)', async () => { - const { dispatchPush } = await import('../src/routes/push.js'); - const { bridge, daemonWs } = await setupAuthenticatedDaemon(); - - const mobileWs = new MockWs(); - bridge.handleBrowserConnection(mobileWs as never, userId, db, true); - - // Push should fire even with mobile connected - daemonWs.emit('message', JSON.stringify({ type: 'session.idle', session: 'deck_cd_brain' })); - await flushAsync(); - expect(dispatchPush).toHaveBeenCalled(); - }); }); diff --git a/server/test/watch-routes.test.ts b/server/test/watch-routes.test.ts index ef0fc9024..dc4c22a9c 100644 --- a/server/test/watch-routes.test.ts +++ b/server/test/watch-routes.test.ts @@ -8,6 +8,8 @@ const mockGetServersByUserId = vi.fn(); const mockGetDbSessionsByServer = vi.fn(); const mockGetSubSessionsByServer = vi.fn(); const mockGetUserPref = vi.fn(); +const mockGetSessionTextTailCache = vi.fn(); +const mockReplaceSessionTextTailCache = vi.fn(); const mockRequestTimelineHistory = vi.fn(); const mockGetRecentText = vi.fn(); const mockGetRecentTextForWatch = vi.fn(); @@ -25,13 +27,19 @@ vi.mock('../src/security/authorization.js', () => ({ resolveServerRole: (...args: unknown[]) => mockResolveServerRole(...args as []), })); -vi.mock('../src/db/queries.js', () => ({ - getServersByUserId: (...args: unknown[]) => mockGetServersByUserId(...args), - getDbSessionsByServer: (...args: unknown[]) => mockGetDbSessionsByServer(...args), - getSubSessionsByServer: (...args: unknown[]) => mockGetSubSessionsByServer(...args), - getUserPref: (...args: unknown[]) => mockGetUserPref(...args), - getServerById: vi.fn(async () => ({ id: 'srv-1' })), -})); +vi.mock('../src/db/queries.js', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + getServersByUserId: (...args: unknown[]) => mockGetServersByUserId(...args), + getDbSessionsByServer: (...args: unknown[]) => mockGetDbSessionsByServer(...args), + getSubSessionsByServer: (...args: unknown[]) => mockGetSubSessionsByServer(...args), + getUserPref: (...args: unknown[]) => mockGetUserPref(...args), + getSessionTextTailCache: (...args: unknown[]) => mockGetSessionTextTailCache(...args), + replaceSessionTextTailCache: (...args: unknown[]) => mockReplaceSessionTextTailCache(...args), + getServerById: vi.fn(async () => ({ id: 'srv-1' })), + }; +}); vi.mock('../src/ws/bridge.js', () => ({ WsBridge: { @@ -91,6 +99,8 @@ describe('Watch routes', () => { mockGetDbSessionsByServer.mockResolvedValue([]); mockGetSubSessionsByServer.mockResolvedValue([]); mockGetUserPref.mockResolvedValue(null); + mockGetSessionTextTailCache.mockResolvedValue([]); + mockReplaceSessionTextTailCache.mockResolvedValue(undefined); mockGetRecentText.mockReturnValue([]); mockGetRecentTextForWatch.mockResolvedValue([]); mockGetActiveMainSessions.mockReturnValue([]); @@ -335,12 +345,186 @@ describe('Watch routes', () => { await expect(res.json()).resolves.toEqual({ error: 'daemon_offline' }); }); + it('GET /api/server/:id/timeline/text-tail returns cached entries', async () => { + mockGetSessionTextTailCache.mockResolvedValue([ + { eventId: 'e1', ts: 100, type: 'user.message', text: 'hi' }, + { eventId: 'e2', ts: 200, type: 'assistant.text', text: 'hello', source: 'daemon', confidence: 'high' }, + ]); + + const app = await buildTestApp(); + const res = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); + + expect(res.status).toBe(200); + expect(res.headers.get(IMCODES_POD_HEADER)).toBe('pod-a'); + await expect(res.json()).resolves.toEqual({ + sessionName: 'deck_proj_brain', + events: [ + { eventId: 'e1', ts: 100, type: 'user.message', text: 'hi' }, + { eventId: 'e2', ts: 200, type: 'assistant.text', text: 'hello', source: 'daemon', confidence: 'high' }, + ], + }); + }); + + it('GET /api/server/:id/timeline/text-tail backfills missing recent text from daemon history and rewrites cache', async () => { + mockGetSessionTextTailCache.mockResolvedValue([ + { eventId: 'e-old', ts: 100, type: 'user.message', text: 'old cached' }, + ]); + mockRequestTimelineHistory.mockResolvedValue({ + epoch: 1, + events: [ + { eventId: 'e-old', sessionId: 'deck_proj_brain', ts: 100, type: 'user.message', payload: { text: 'old cached' } }, + { eventId: 'e-new', sessionId: 'deck_proj_brain', ts: 200, type: 'assistant.text', payload: { text: 'new live text' } }, + { eventId: 'e-stream', sessionId: 'deck_proj_brain', ts: 210, type: 'assistant.text', payload: { text: 'ignore me', streaming: true } }, + ], + }); + + const app = await buildTestApp(); + const res = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); + + expect(res.status).toBe(200); + await expect(res.json()).resolves.toEqual({ + sessionName: 'deck_proj_brain', + events: [ + { eventId: 'e-old', ts: 100, type: 'user.message', text: 'old cached' }, + { eventId: 'e-new', ts: 200, type: 'assistant.text', text: 'new live text' }, + ], + }); + expect(mockReplaceSessionTextTailCache).toHaveBeenCalledWith( + expect.anything(), + 'srv-1', + 'deck_proj_brain', + [ + { eventId: 'e-old', ts: 100, type: 'user.message', text: 'old cached' }, + { eventId: 'e-new', ts: 200, type: 'assistant.text', text: 'new live text' }, + ], + ); + }); + + it('GET /api/server/:id/timeline/text-tail paginates daemon history until it collects 50 recent text events', async () => { + mockGetSessionTextTailCache.mockResolvedValue([]); + + const pageOne = Array.from({ length: 500 }, (_, index) => { + const ts = 1000 + index; + if (index >= 475) { + return { + eventId: `text-${index - 475}`, + sessionId: 'deck_proj_brain', + ts, + type: index % 2 === 0 ? 'user.message' : 'assistant.text', + payload: { text: `page-one-${index - 475}` }, + }; + } + return { + eventId: `tool-${index}`, + sessionId: 'deck_proj_brain', + ts, + type: 'tool.result', + payload: { output: `tool-${index}` }, + }; + }); + const pageTwo = Array.from({ length: 500 }, (_, index) => { + const ts = 500 + index; + if (index >= 470) { + return { + eventId: `older-${index - 470}`, + sessionId: 'deck_proj_brain', + ts, + type: index % 2 === 0 ? 'assistant.text' : 'user.message', + payload: { text: `page-two-${index - 470}` }, + }; + } + return { + eventId: `state-${index}`, + sessionId: 'deck_proj_brain', + ts, + type: 'session.state', + payload: { state: 'idle' }, + }; + }); + + mockRequestTimelineHistory + .mockResolvedValueOnce({ epoch: 1, events: pageOne }) + .mockResolvedValueOnce({ epoch: 1, events: pageTwo }); + + const app = await buildTestApp(); + const res = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); + + expect(res.status).toBe(200); + const body = await res.json(); + expect(body.sessionName).toBe('deck_proj_brain'); + expect(body.events).toHaveLength(50); + expect(body.events[0]).toEqual({ eventId: 'older-5', ts: 975, type: 'user.message', text: 'page-two-5' }); + expect(body.events.at(-1)).toEqual({ eventId: 'text-24', ts: 1499, type: 'assistant.text', text: 'page-one-24' }); + expect(mockRequestTimelineHistory).toHaveBeenCalledTimes(2); + expect(mockRequestTimelineHistory).toHaveBeenNthCalledWith(1, { + sessionName: 'deck_proj_brain', + limit: 500, + timeoutMs: 1500, + }); + expect(mockRequestTimelineHistory).toHaveBeenNthCalledWith(2, { + sessionName: 'deck_proj_brain', + limit: 500, + timeoutMs: 1500, + beforeTs: 1001, + }); + expect(mockReplaceSessionTextTailCache).toHaveBeenCalledWith( + expect.anything(), + 'srv-1', + 'deck_proj_brain', + expect.arrayContaining([ + { eventId: 'older-25', ts: 995, type: 'user.message', text: 'page-two-25' }, + { eventId: 'text-24', ts: 1499, type: 'assistant.text', text: 'page-one-24' }, + ]), + ); + }); + + it('GET /api/server/:id/timeline/text-tail returns empty list when no cache exists', async () => { + mockGetSessionTextTailCache.mockResolvedValue([]); + + const app = await buildTestApp(); + const res = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); + + expect(res.status).toBe(200); + await expect(res.json()).resolves.toEqual({ + sessionName: 'deck_proj_brain', + events: [], + }); + }); + + it('GET /api/server/:id/timeline/text-tail isolates cache read failures', async () => { + mockGetSessionTextTailCache.mockRejectedValue(new Error('db down')); + + const app = await buildTestApp(); + const res = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); + + expect(res.status).toBe(500); + await expect(res.json()).resolves.toEqual({ error: 'cache_read_failed' }); + }); + + it('GET /api/server/:id/timeline/text-tail falls back to cached entries when daemon history backfill fails', async () => { + mockGetSessionTextTailCache.mockResolvedValue([ + { eventId: 'e1', ts: 100, type: 'user.message', text: 'cached only' }, + ]); + mockRequestTimelineHistory.mockRejectedValue(new Error('daemon_offline')); + + const app = await buildTestApp(); + const res = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); + + expect(res.status).toBe(200); + await expect(res.json()).resolves.toEqual({ + sessionName: 'deck_proj_brain', + events: [{ eventId: 'e1', ts: 100, type: 'user.message', text: 'cached only' }], + }); + expect(mockReplaceSessionTextTailCache).not.toHaveBeenCalled(); + }); + it('watch routes return 403 when the user has no access to the server', async () => { mockResolveServerRole.mockResolvedValue('none'); const app = await buildTestApp(); const sessionsRes = await app.request('/api/watch/sessions?serverId=srv-1'); const historyRes = await app.request('/api/server/srv-1/timeline/history?sessionName=deck_proj_brain'); + const tailRes = await app.request('/api/server/srv-1/timeline/text-tail?sessionName=deck_proj_brain'); const sendRes = await app.request('/api/server/srv-1/session/send', { method: 'POST', headers: { 'Content-Type': 'application/json' }, @@ -353,6 +537,9 @@ describe('Watch routes', () => { expect(historyRes.status).toBe(403); await expect(historyRes.json()).resolves.toEqual({ error: 'forbidden' }); + expect(tailRes.status).toBe(403); + await expect(tailRes.json()).resolves.toEqual({ error: 'forbidden' }); + expect(sendRes.status).toBe(403); await expect(sendRes.json()).resolves.toEqual({ error: 'forbidden', diff --git a/shared/cc-presets.ts b/shared/cc-presets.ts new file mode 100644 index 000000000..43eaefe32 --- /dev/null +++ b/shared/cc-presets.ts @@ -0,0 +1,32 @@ +export const CC_PRESET_MSG = { + LIST: 'cc.presets.list', + LIST_RESPONSE: 'cc.presets.list_response', + SAVE: 'cc.presets.save', + SAVE_RESPONSE: 'cc.presets.save_response', + DISCOVER_MODELS: 'cc.presets.discover_models', + DISCOVER_MODELS_RESPONSE: 'cc.presets.discover_models_response', +} as const; + +export type CcPresetTransportMode = + | 'qwen-compatible-api' + | 'claude-cli-preset'; + +export type CcPresetAuthType = 'anthropic'; + +export interface CcPresetModelInfo { + id: string; + name?: string; +} + +export interface CcPreset { + name: string; + env: Record; + contextWindow?: number; + initMessage?: string; + transportMode?: CcPresetTransportMode; + authType?: CcPresetAuthType; + availableModels?: CcPresetModelInfo[]; + defaultModel?: string; + lastDiscoveredAt?: number; + modelDiscoveryError?: string; +} diff --git a/shared/transport/file-transfer.ts b/shared/transport/file-transfer.ts index 069f14aca..ad24383d5 100644 --- a/shared/transport/file-transfer.ts +++ b/shared/transport/file-transfer.ts @@ -33,8 +33,8 @@ export interface PreviewMeta { // ── Phase 1 limits ──────────────────────────────────────────────────────────── export const FILE_TRANSFER_LIMITS = { - /** Maximum single file size in bytes (100 MB). */ - MAX_FILE_SIZE: 100 * 1024 * 1024, + /** Maximum single file size in bytes (2 GB). */ + MAX_FILE_SIZE: 2 * 1024 * 1024 * 1024, /** Server waits this long for daemon upload ack (ms). */ UPLOAD_TIMEOUT_MS: 300_000, /** Server waits this long for daemon download response (ms). */ diff --git a/src/agent/providers/qwen.ts b/src/agent/providers/qwen.ts index 309497262..3e03a18ac 100644 --- a/src/agent/providers/qwen.ts +++ b/src/agent/providers/qwen.ts @@ -32,6 +32,8 @@ import { normalizeTransportCwd, resolveExecutableForSpawn } from '../transport-p const execFileAsync = promisify(execFile); const QWEN_BIN = 'qwen'; +const TRANSIENT_RETRY_DELAY_MS = 250; +const TRANSIENT_RETRY_MAX_ATTEMPTS = 1; /** * Auth types accepted by the qwen CLI's `--auth-type` flag. @@ -353,6 +355,7 @@ export class QwenProvider implements TransportProvider { _attachments?: TransportAttachment[], extraSystemPrompt?: string, allowResumeFallback = true, + transientRetryBudget = TRANSIENT_RETRY_MAX_ATTEMPTS, ): Promise { if (!this.config) { throw this.makeError(PROVIDER_ERROR_CODES.CONNECTION_LOST, 'Qwen provider not connected', false); @@ -444,6 +447,26 @@ export class QwenProvider implements TransportProvider { let completed = false; let sawError = false; let stderrBuf = ''; + let retryScheduled = false; + + const sawVisibleTurnProgress = (): boolean => { + return state.currentText.length > 0 + || !!state.pendingFinalText + || state.toolUseById.size > 0 + || state.emittedToolSignatures.size > 0; + }; + + const maybeRetryTransientError = async (messageText: string, details?: unknown): Promise => { + if (retryScheduled || transientRetryBudget <= 0) return false; + if (sawVisibleTurnProgress()) return false; + if (!this.isRetryableTransientError(messageText)) return false; + retryScheduled = true; + state.child = null; + logger.info({ provider: this.id, sessionId, message: messageText }, 'Qwen transient provider error; retrying turn once'); + await new Promise((resolve) => setTimeout(resolve, TRANSIENT_RETRY_DELAY_MS)); + await this.send(sessionId, payload, _attachments, extraSystemPrompt, allowResumeFallback, transientRetryBudget - 1); + return true; + }; const emitError = (messageText: string, details?: unknown): void => { if (sawError || completed) return; @@ -667,7 +690,10 @@ export class QwenProvider implements TransportProvider { if (payload.type === 'result') { this.clearStatus(sessionId, state); if (payload.is_error) { - emitError(payload.error?.message || stderrBuf || 'Qwen execution failed', payload); + const errorText = payload.error?.message || stderrBuf || 'Qwen execution failed'; + void maybeRetryTransientError(errorText, payload).then((retried) => { + if (!retried) emitError(errorText, payload); + }); return; } const resultText = typeof payload.result === 'string' && payload.result.trim() @@ -718,7 +744,10 @@ export class QwenProvider implements TransportProvider { }); return; } - emitError(stderrBuf.trim() || `Qwen exited with code ${code ?? 'null'}${signal ? ` (${signal})` : ''}`); + const errorText = stderrBuf.trim() || `Qwen exited with code ${code ?? 'null'}${signal ? ` (${signal})` : ''}`; + void maybeRetryTransientError(errorText, { code, signal, stderr: stderrBuf }).then((retried) => { + if (!retried) emitError(errorText); + }); } }); @@ -730,7 +759,9 @@ export class QwenProvider implements TransportProvider { // uncaughtException and crash the daemon. child.on('error', (err) => { logger.error({ provider: this.id, err }, 'Qwen child process error'); - emitError(err.message, err); + void maybeRetryTransientError(err.message, err).then((retried) => { + if (!retried) emitError(err.message, err); + }); }); } @@ -758,6 +789,10 @@ export class QwenProvider implements TransportProvider { return { code, message, recoverable, details }; } + private isRetryableTransientError(message: string): boolean { + return /premature close|fetch failed|connection error|socket hang up|econnreset|etimedout|network error/i.test(message); + } + private emitStatus(sessionId: string, state: QwenSessionState, status: ProviderStatusUpdate): void { const signature = JSON.stringify({ status: status.status, diff --git a/src/agent/session-manager.ts b/src/agent/session-manager.ts index 66125ad74..62a54ddcb 100644 --- a/src/agent/session-manager.ts +++ b/src/agent/session-manager.ts @@ -9,7 +9,7 @@ import type { AgentDriver } from './drivers/base.js'; import type { AgentType } from './detect.js'; import { isTransportAgent } from './detect.js'; import { RUNTIME_TYPES } from './session-runtime.js'; -import { TransportSessionRuntime } from './transport-session-runtime.js'; +import { TransportSessionRuntime, type PendingTransportMessage } from './transport-session-runtime.js'; import { ensureProviderConnected, getProvider } from './provider-registry.js'; import type { SessionInfoUpdate } from './transport-provider.js'; import { setupCCStopHook } from './signal.js'; @@ -45,7 +45,7 @@ import { getAgentVersion } from './agent-version.js'; import { repoCache } from '../repo/cache.js'; import { closeSingleSession, collectProjectCloseTargets, type CloseFailure, type CloseTreeResult } from './session-close.js'; import { cleanupKnownTestTerminalSessions } from './startup-test-session-cleanup.js'; -import { clearResend, drainResend, getResendCount } from '../daemon/transport-resend-queue.js'; +import { clearResend, drainResend, enqueueResend, getResendCount, getResendEntries } from '../daemon/transport-resend-queue.js'; /** Start JSONL watcher for a CC session — uses specific file if ccSessionId known, else directory scan. */ function startCCWatcher(sessionName: string, projectDir: string, ccSessionId?: string): void { @@ -927,6 +927,111 @@ export async function relaunchSessionWithSettings( /** In-memory map of active transport session runtimes */ const transportRuntimes = new Map(); +const transportErrorRecoveryInFlight = new Map>(); +const transportErrorRecoveryTimestamps = new Map(); + +function queueTransportErrorResendEntries(sessionName: string, entries: PendingTransportMessage[]): number { + if (entries.length === 0) return getResendCount(sessionName); + const existingCommandIds = new Set(getResendEntries(sessionName).map((entry) => entry.commandId)); + for (const entry of entries) { + if (existingCommandIds.has(entry.clientMessageId)) continue; + enqueueResend(sessionName, { + text: entry.text, + commandId: entry.clientMessageId, + ...(entry.attachments?.length ? { attachments: entry.attachments } : {}), + queuedAt: Date.now(), + }); + existingCommandIds.add(entry.clientMessageId); + } + return getResendCount(sessionName); +} + +async function recoverTransportRuntimeAfterError( + sessionName: string, + runtime: TransportSessionRuntime, +): Promise { + const existingRecovery = transportErrorRecoveryInFlight.get(sessionName); + if (existingRecovery) return existingRecovery; + + const recovery = (async () => { + const record = getSession(sessionName); + if (!record || record.runtimeType !== RUNTIME_TYPES.TRANSPORT || !isTransportAgent(record.agentType as AgentType)) { + return false; + } + + const now = Date.now(); + const windowStart = now - RESTART_WINDOW_MS; + const recentRecoveries = (transportErrorRecoveryTimestamps.get(sessionName) ?? []).filter((ts) => ts > windowStart); + if (recentRecoveries.length >= MAX_RESTARTS) { + logger.error({ sessionName }, 'Transport error recovery loop detected — refusing auto-restart'); + timelineEmitter.emit(sessionName, 'assistant.text', { + text: `⚠️ Transport recovery stopped after ${MAX_RESTARTS} automatic restart attempts in 5 minutes.`, + streaming: false, + memoryExcluded: true, + }, { source: 'daemon', confidence: 'high' }); + return false; + } + transportErrorRecoveryTimestamps.set(sessionName, [...recentRecoveries, now]); + + const failedEntries = runtime.activeDispatchEntries; + const pendingCount = queueTransportErrorResendEntries(sessionName, failedEntries); + if (pendingCount > 0) { + const queued = getResendEntries(sessionName); + timelineEmitter.emit(sessionName, 'assistant.text', { + text: `⏳ Provider error detected — restarting and auto-resending ${pendingCount} queued message${pendingCount === 1 ? '' : 's'}.`, + streaming: false, + memoryExcluded: true, + }, { source: 'daemon', confidence: 'high' }); + timelineEmitter.emit(sessionName, 'session.state', { + state: 'queued', + pendingCount, + pendingMessages: queued.map((entry) => entry.text), + pendingMessageEntries: queued.map((entry) => ({ clientMessageId: entry.commandId, text: entry.text })), + }, { source: 'daemon', confidence: 'high' }); + } + + await stopTransportRuntimeSession(sessionName).catch((err) => { + logger.warn({ err, sessionName }, 'Failed to stop errored transport runtime before auto-restart'); + }); + + await launchTransportSession({ + name: record.name, + projectName: record.projectName, + role: record.role, + agentType: record.agentType as AgentType, + projectDir: record.projectDir, + label: record.label, + description: record.description, + requestedModel: record.requestedModel, + effort: record.effort, + transportConfig: record.transportConfig, + ccPreset: (record.agentType === 'claude-code-sdk' || record.agentType === 'qwen') ? record.ccPreset : undefined, + ...(record.agentType === 'claude-code-sdk' && record.ccSessionId ? { ccSessionId: record.ccSessionId } : {}), + ...(record.agentType === 'codex-sdk' && record.codexSessionId ? { codexSessionId: record.codexSessionId } : {}), + ...((record.agentType === 'cursor-headless' || record.agentType === 'copilot-sdk') && record.providerResumeId + ? { providerResumeId: record.providerResumeId } + : {}), + ...(record.agentType === 'openclaw' && record.providerSessionId ? { bindExistingKey: record.providerSessionId } : {}), + ...(record.agentType === 'qwen' && record.providerSessionId ? { bindExistingKey: record.providerSessionId } : {}), + ...(record.parentSession ? { parentSession: record.parentSession } : {}), + ...(record.userCreated ? { userCreated: true } : {}), + }); + return true; + })().catch((err) => { + logger.error({ err, sessionName }, 'Transport auto-restart after error failed'); + timelineEmitter.emit(sessionName, 'assistant.text', { + text: `⚠️ Auto-restart failed: ${err instanceof Error ? err.message : String(err)}`, + streaming: false, + memoryExcluded: true, + }, { source: 'daemon', confidence: 'high' }); + return false; + }).finally(() => { + transportErrorRecoveryInFlight.delete(sessionName); + }); + + transportErrorRecoveryInFlight.set(sessionName, recovery); + return recovery; +} /** Wire up onStatusChange and onDrain callbacks for a transport runtime. */ function wireTransportCallbacks(runtime: TransportSessionRuntime, sessionName: string): void { @@ -947,6 +1052,9 @@ function wireTransportCallbacks(runtime: TransportSessionRuntime, sessionName: s payload.pendingMessageEntries = runtime.pendingEntries; } timelineEmitter.emit(sessionName, 'session.state', payload, { source: 'daemon', confidence: 'high' }); + if (status === 'error') { + void recoverTransportRuntimeAfterError(sessionName, runtime); + } }; runtime.onDrain = (messages, merged, count) => { for (const entry of messages) { @@ -1227,14 +1335,9 @@ export async function restoreTransportSessions(providerId: string): Promise 0 && !availableQwenModels.includes(effectiveRequestedModel))) { + effectiveRequestedModel = presetConfig.model ?? availableQwenModels[0] ?? effectiveRequestedModel; } transportSettings = presetConfig.settings; // Override the qwen CLI's built-in "I am Qwen Code" identity with the @@ -1440,27 +1543,22 @@ export async function launchTransportSession(opts: LaunchOpts): Promise { }); const contextBootstrap = await resolveRuntimeContextBootstrap(); runtime.setContextBootstrapResolver(resolveRuntimeContextBootstrap); - if (agentType === 'qwen') { - const qwenRuntime = await getQwenRuntimeConfig().catch(() => null); - qwenAuthType = qwenRuntime?.authType; - qwenAuthLimit = qwenRuntime?.authLimit; - availableQwenModels = qwenRuntime?.availableModels ?? []; - if (effectiveCcPreset) { - const { getQwenPresetTransportConfig } = await import('../daemon/cc-presets.js'); - const presetConfig = await getQwenPresetTransportConfig(effectiveCcPreset); - transportEnv = { ...(transportEnv ?? {}), ...presetConfig.env }; - // Preset is authoritative — its model overrides any stored/requested - // model, and we restrict the available list so the fallback below can't - // revert to the OAuth placeholder (`coder-model`). We're spawning qwen - // with `--auth-type anthropic` against a BYO API key, so the OAuth tier - // labels ("Free", "No longer available") don't apply — clear them. - if (presetConfig.model) { - requestedTransportModel = presetConfig.model; - availableQwenModels = [presetConfig.model]; - } - presetContextWindow = presetConfig.contextWindow; - if (presetConfig.settings) transportSettings = presetConfig.settings; - if (presetConfig.systemPrompt) transportSystemPrompt = presetConfig.systemPrompt; + if (agentType === 'qwen') { + const qwenRuntime = await getQwenRuntimeConfig().catch(() => null); + qwenAuthType = qwenRuntime?.authType; + qwenAuthLimit = qwenRuntime?.authLimit; + availableQwenModels = qwenRuntime?.availableModels ?? []; + if (effectiveCcPreset) { + const { getQwenPresetTransportConfig } = await import('../daemon/cc-presets.js'); + const presetConfig = await getQwenPresetTransportConfig(effectiveCcPreset); + transportEnv = { ...(transportEnv ?? {}), ...presetConfig.env }; + if (presetConfig.availableModels?.length) availableQwenModels = presetConfig.availableModels; + if (!requestedTransportModel || (availableQwenModels.length > 0 && !availableQwenModels.includes(requestedTransportModel))) { + requestedTransportModel = presetConfig.model ?? availableQwenModels[0] ?? requestedTransportModel; + } + presetContextWindow = presetConfig.contextWindow; + if (presetConfig.settings) transportSettings = presetConfig.settings; + if (presetConfig.systemPrompt) transportSystemPrompt = presetConfig.systemPrompt; qwenAuthType = QWEN_AUTH_TYPES.API_KEY; qwenAuthLimit = undefined; } diff --git a/src/agent/transport-session-runtime.ts b/src/agent/transport-session-runtime.ts index 7e9bc276f..21398e822 100644 --- a/src/agent/transport-session-runtime.ts +++ b/src/agent/transport-session-runtime.ts @@ -95,6 +95,8 @@ export class TransportSessionRuntime implements SessionRuntime { /** Messages queued while a turn is in flight. Drained and merged on turn completion. */ private _pendingMessages: PendingTransportMessage[] = []; + /** Original message entries for the currently in-flight dispatch. */ + private _activeDispatchEntries: PendingTransportMessage[] = []; /** Callback fired when pending messages are drained into a new turn. */ private _onDrain?: (messages: PendingTransportMessage[], mergedMessage: string, count: number) => void; @@ -120,6 +122,7 @@ export class TransportSessionRuntime implements SessionRuntime { this._history.push(message); this._activeTurn?.resolve(); this._activeTurn = null; + this._activeDispatchEntries = []; // Drain pending messages before transitioning to idle. // If there are queued messages, merge and send — status stays running. if (!this._drainPending()) { @@ -134,7 +137,10 @@ export class TransportSessionRuntime implements SessionRuntime { // Only drain pending on recoverable/cancel errors — unrecoverable errors // (auth failure, provider down) would just fail again and consume queued messages. const canDrain = error.code === 'CANCELLED' || error.recoverable; - if (canDrain && this._drainPending()) return; + if (canDrain) { + this._activeDispatchEntries = []; + if (this._drainPending()) return; + } this.setStatus(error.code === 'CANCELLED' ? 'idle' : 'error'); }), ...(this.provider.onSessionInfo ? [this.provider.onSessionInfo((sid: string, info: SessionInfoUpdate) => { @@ -188,6 +194,8 @@ export class TransportSessionRuntime implements SessionRuntime { get pendingMessages(): string[] { return this._pendingMessages.map((entry) => entry.text); } /** Snapshot of queued messages waiting to be drained (stable entity ids for UI/edit/undo). */ get pendingEntries(): PendingTransportMessage[] { return this._pendingMessages.map((entry) => ({ ...entry })); } + /** Snapshot of the message entries currently being dispatched. */ + get activeDispatchEntries(): PendingTransportMessage[] { return this._activeDispatchEntries.map((entry) => ({ ...entry })); } setContextBootstrapResolver( resolver: (() => Promise) | undefined, @@ -253,16 +261,18 @@ export class TransportSessionRuntime implements SessionRuntime { throw new Error('TransportSessionRuntime not initialized — call initialize() first'); } + const entry: PendingTransportMessage = { + clientMessageId: clientMessageId ?? randomUUID(), + text: message, + ...(attachments?.length ? { attachments } : {}), + }; + if (this._sending) { - this._pendingMessages.push({ - clientMessageId: clientMessageId ?? randomUUID(), - text: message, - ...(attachments?.length ? { attachments } : {}), - }); + this._pendingMessages.push(entry); return 'queued'; } - this._dispatchTurn(message, clientMessageId, attachments); + this._dispatchTurn(message, entry.clientMessageId, attachments, [entry]); return 'sent'; } @@ -307,6 +317,7 @@ export class TransportSessionRuntime implements SessionRuntime { this.setStatus('idle'); this._sending = false; this._activeTurn = null; + this._activeDispatchEntries = []; this._pendingMessages = []; // Per-session memory injection history is daemon-scoped to this session; // a kill ends that scope. clear() is called on session.clear separately. @@ -324,7 +335,12 @@ export class TransportSessionRuntime implements SessionRuntime { } /** Dispatch a single turn to the provider. Assumes _sending is false. */ - private _dispatchTurn(message: string, clientMessageId?: string, attachments?: TransportAttachment[]): void { + private _dispatchTurn( + message: string, + clientMessageId?: string, + attachments?: TransportAttachment[], + dispatchedEntries?: PendingTransportMessage[], + ): void { this._history.push({ id: randomUUID(), sessionId: this._providerSessionId!, @@ -337,6 +353,11 @@ export class TransportSessionRuntime implements SessionRuntime { this.setStatus('thinking'); this._sending = true; + this._activeDispatchEntries = (dispatchedEntries ?? [{ + clientMessageId: clientMessageId ?? randomUUID(), + text: message, + ...(attachments?.length ? { attachments } : {}), + }]).map((entry) => ({ ...entry })); let resolve!: () => void; let reject!: (err: ProviderError) => void; @@ -429,6 +450,8 @@ export class TransportSessionRuntime implements SessionRuntime { : { code: 'PROVIDER_ERROR', message: String(err), recoverable: false }), ); this._activeTurn = null; + // Preserve the in-flight payload so session-manager can replay it + // after automatically rebuilding the transport runtime. // Don't drain on async send failure — the provider is likely broken. }); } @@ -454,6 +477,7 @@ export class TransportSessionRuntime implements SessionRuntime { merged, messages.length === 1 ? messages[0]?.clientMessageId : undefined, attachments.length > 0 ? attachments : undefined, + messages, ); return true; } diff --git a/src/daemon/cc-presets.ts b/src/daemon/cc-presets.ts index c88dd856b..0e1964c03 100644 --- a/src/daemon/cc-presets.ts +++ b/src/daemon/cc-presets.ts @@ -10,19 +10,11 @@ import { promises as fs } from 'node:fs'; import { join } from 'node:path'; import { homedir } from 'node:os'; +import type { CcPreset, CcPresetModelInfo } from '../../shared/cc-presets.js'; import logger from '../util/logger.js'; const PRESETS_PATH = join(homedir(), '.imcodes', 'cc-presets.json'); -export interface CcPreset { - name: string; - env: Record; - /** Context window size for this model (e.g. 200000, 1000000). Used for UI progress bar accuracy. */ - contextWindow?: number; - /** Message injected into the session after launch (e.g. search instructions for non-Anthropic providers). */ - initMessage?: string; -} - let cachedPresets: CcPreset[] | null = null; /** ccSessionId → contextWindow (set when preset env is resolved for a session). */ @@ -36,11 +28,66 @@ const MODEL_ALIASES = [ 'ANTHROPIC_DEFAULT_HAIKU_MODEL', ]; +function normalizePresetModel(raw: unknown): CcPresetModelInfo | null { + if (typeof raw === 'string') { + const id = raw.trim(); + return id ? { id } : null; + } + if (!raw || typeof raw !== 'object') return null; + const record = raw as Record; + const id = typeof record.id === 'string' ? record.id.trim() : ''; + if (!id) return null; + const name = typeof record.name === 'string' ? record.name.trim() : ''; + return name ? { id, name } : { id }; +} + +function normalizePreset(raw: unknown): CcPreset | null { + if (!raw || typeof raw !== 'object') return null; + const record = raw as Record; + const name = typeof record.name === 'string' ? record.name.trim() : ''; + if (!name) return null; + const envRecord = record.env && typeof record.env === 'object' + ? Object.entries(record.env as Record).reduce>((acc, [key, value]) => { + if (typeof value === 'string') acc[key] = value; + return acc; + }, {}) + : {}; + const availableModels = Array.isArray(record.availableModels) + ? record.availableModels + .map((item) => normalizePresetModel(item)) + .filter((item): item is CcPresetModelInfo => item !== null) + : undefined; + const defaultModel = typeof record.defaultModel === 'string' + ? record.defaultModel.trim() + : ''; + return { + name, + env: envRecord, + ...(typeof record.contextWindow === 'number' ? { contextWindow: record.contextWindow } : {}), + ...(typeof record.initMessage === 'string' ? { initMessage: record.initMessage } : {}), + ...(record.transportMode === 'qwen-compatible-api' || record.transportMode === 'claude-cli-preset' + ? { transportMode: record.transportMode } + : {}), + ...(record.authType === 'anthropic' ? { authType: record.authType } : {}), + ...(availableModels?.length ? { availableModels } : {}), + ...(defaultModel ? { defaultModel } : {}), + ...(typeof record.lastDiscoveredAt === 'number' ? { lastDiscoveredAt: record.lastDiscoveredAt } : {}), + ...(typeof record.modelDiscoveryError === 'string' ? { modelDiscoveryError: record.modelDiscoveryError } : {}), + }; +} + +function normalizePresets(raw: unknown): CcPreset[] { + if (!Array.isArray(raw)) return []; + return raw + .map((item) => normalizePreset(item)) + .filter((item): item is CcPreset => item !== null); +} + export async function loadPresets(): Promise { if (cachedPresets) return cachedPresets; try { const raw = await fs.readFile(PRESETS_PATH, 'utf8'); - cachedPresets = JSON.parse(raw) as CcPreset[]; + cachedPresets = normalizePresets(JSON.parse(raw)); return cachedPresets; } catch { cachedPresets = []; @@ -49,8 +96,8 @@ export async function loadPresets(): Promise { } export async function savePresets(presets: CcPreset[]): Promise { - cachedPresets = presets; - await fs.writeFile(PRESETS_PATH, JSON.stringify(presets, null, 2), 'utf8'); + cachedPresets = normalizePresets(presets); + await fs.writeFile(PRESETS_PATH, JSON.stringify(cachedPresets, null, 2), 'utf8'); } function normalizePresetName(name: string): string { @@ -63,6 +110,20 @@ export async function getPreset(name: string): Promise { return presets.find((p) => normalizePresetName(p.name) === normalized); } +export function getPresetEffectiveModel(preset: Pick): string | undefined { + const model = preset.defaultModel?.trim() || preset.env['ANTHROPIC_MODEL']?.trim() || ''; + return model || undefined; +} + +export function getPresetAvailableModelIds(preset: Pick): string[] { + const discovered = preset.availableModels + ?.map((item) => item.id.trim()) + .filter(Boolean) ?? []; + if (discovered.length > 0) return [...new Set(discovered)]; + const fallback = getPresetEffectiveModel(preset); + return fallback ? [fallback] : []; +} + /** * Resolve a preset name to env vars ready for session launch. * Auto-fills MODEL_ALIASES from ANTHROPIC_MODEL if set. @@ -76,6 +137,8 @@ export async function resolvePresetEnv(presetName: string, ccSessionId?: string) if (env['ANTHROPIC_AUTH_TOKEN'] && !env['ANTHROPIC_API_KEY']) { env['ANTHROPIC_API_KEY'] = env['ANTHROPIC_AUTH_TOKEN']; } + const effectiveModel = getPresetEffectiveModel(preset); + if (effectiveModel) env['ANTHROPIC_MODEL'] = effectiveModel; // Auto-fill model aliases from ANTHROPIC_MODEL if (env['ANTHROPIC_MODEL']) { for (const alias of MODEL_ALIASES) { @@ -100,7 +163,7 @@ export async function getPresetTransportOverrides(presetName: string): Promise<{ const preset = await getPreset(presetName); if (!preset) return {}; const env = await resolvePresetEnv(presetName); - const configuredModel = env['ANTHROPIC_MODEL']?.trim() || undefined; + const configuredModel = getPresetEffectiveModel(preset); const configuredBaseUrl = env['ANTHROPIC_BASE_URL']?.trim() || undefined; const runtimeFacts = [ `Authoritative runtime fact: this session is using the Claude Code preset "${preset.name}".`, @@ -122,6 +185,7 @@ export async function getQwenPresetTransportConfig(presetName: string): Promise< env: Record; settings?: Record; model?: string; + availableModels?: string[]; systemPrompt?: string; contextWindow?: number; }> { @@ -129,7 +193,8 @@ export async function getQwenPresetTransportConfig(presetName: string): Promise< if (!preset) return { env: {} }; const resolvedEnv = await resolvePresetEnv(presetName); - const model = resolvedEnv['ANTHROPIC_MODEL']?.trim() || undefined; + const availableModels = getPresetAvailableModelIds(preset); + const model = getPresetEffectiveModel(preset) ?? availableModels[0]; const baseUrl = resolvedEnv['ANTHROPIC_BASE_URL']?.trim() || undefined; const apiKey = resolvedEnv['ANTHROPIC_API_KEY']?.trim() || resolvedEnv['ANTHROPIC_AUTH_TOKEN']?.trim() @@ -150,7 +215,8 @@ export async function getQwenPresetTransportConfig(presetName: string): Promise< } if (model) env['ANTHROPIC_MODEL'] = model; - const settings: Record | undefined = (baseUrl && apiKey && model) + const providerModels = availableModels.length > 0 ? availableModels : (model ? [model] : []); + const settings: Record | undefined = (baseUrl && apiKey && providerModels.length > 0) ? { security: { auth: { @@ -158,24 +224,22 @@ export async function getQwenPresetTransportConfig(presetName: string): Promise< }, }, model: { - name: model, + name: model ?? providerModels[0], }, modelProviders: { - anthropic: [ - { - id: model, - name: preset.name, - envKey: 'ANTHROPIC_API_KEY', - baseUrl, - ...(preset.contextWindow - ? { - generationConfig: { - contextWindowSize: preset.contextWindow, - }, - } - : {}), - }, - ], + anthropic: providerModels.map((providerModelId) => ({ + id: providerModelId, + name: preset.availableModels?.find((item) => item.id === providerModelId)?.name?.trim() || providerModelId, + envKey: 'ANTHROPIC_API_KEY', + baseUrl, + ...(preset.contextWindow + ? { + generationConfig: { + contextWindowSize: preset.contextWindow, + }, + } + : {}), + })), }, } : undefined; @@ -201,11 +265,91 @@ export async function getQwenPresetTransportConfig(presetName: string): Promise< env, ...(settings ? { settings } : {}), ...(model ? { model } : {}), + ...(availableModels.length ? { availableModels } : {}), ...(runtimeFacts ? { systemPrompt: runtimeFacts } : {}), ...(preset.contextWindow ? { contextWindow: preset.contextWindow } : {}), }; } +function getDiscoveryCandidates(baseUrl: string): string[] { + const trimmed = baseUrl.trim().replace(/\/+$/, ''); + if (!trimmed) return []; + const candidates = new Set(); + if (trimmed.endsWith('/models')) { + candidates.add(trimmed); + } else { + candidates.add(`${trimmed}/models`); + if (!/\/v\d+(?:$|\/)/.test(trimmed)) candidates.add(`${trimmed}/v1/models`); + } + return [...candidates]; +} + +function parseDiscoveredModels(payload: unknown): CcPresetModelInfo[] { + const record = payload && typeof payload === 'object' ? payload as Record : {}; + const rawModels = Array.isArray(record.data) + ? record.data + : Array.isArray(record.models) + ? record.models + : []; + const seen = new Set(); + const models: CcPresetModelInfo[] = []; + for (const item of rawModels) { + if (!item || typeof item !== 'object') continue; + const model = item as Record; + const id = typeof model.id === 'string' ? model.id.trim() : ''; + if (!id || seen.has(id)) continue; + const displayName = typeof model.display_name === 'string' + ? model.display_name.trim() + : typeof model.name === 'string' + ? model.name.trim() + : ''; + seen.add(id); + models.push(displayName ? { id, name: displayName } : { id }); + } + return models; +} + +export async function discoverPresetModels(preset: CcPreset): Promise<{ + availableModels: CcPresetModelInfo[]; + defaultModel?: string; + endpoint: string; +}> { + const env = { ...preset.env }; + const baseUrl = env['ANTHROPIC_BASE_URL']?.trim() || ''; + const apiKey = env['ANTHROPIC_API_KEY']?.trim() || env['ANTHROPIC_AUTH_TOKEN']?.trim() || ''; + if (!baseUrl) throw new Error('Preset is missing ANTHROPIC_BASE_URL'); + if (!apiKey) throw new Error('Preset is missing ANTHROPIC_API_KEY / ANTHROPIC_AUTH_TOKEN'); + + let lastError: Error | null = null; + for (const endpoint of getDiscoveryCandidates(baseUrl)) { + try { + const response = await fetch(endpoint, { + headers: { + 'x-api-key': apiKey, + 'anthropic-version': '2023-06-01', + accept: 'application/json', + }, + }); + if (!response.ok) { + throw new Error(`HTTP ${response.status} ${response.statusText}`.trim()); + } + const payload = await response.json() as unknown; + const availableModels = parseDiscoveredModels(payload); + if (availableModels.length === 0) { + throw new Error('No models returned by compatible API'); + } + const existingModel = getPresetEffectiveModel(preset); + const defaultModel = availableModels.some((item) => item.id === existingModel) + ? existingModel + : (availableModels[0]?.id ?? undefined); + return { availableModels, defaultModel, endpoint }; + } catch (error) { + lastError = error instanceof Error ? error : new Error(String(error)); + } + } + throw lastError ?? new Error('Failed to discover models'); +} + /** Default init message for non-Anthropic providers (no native web search). */ const DEFAULT_INIT_MESSAGE = 'For web searches, use: curl -s "https://html.duckduckgo.com/html/?q=QUERY" | head -200. Replace QUERY with URL-encoded search terms.'; diff --git a/src/daemon/command-handler.ts b/src/daemon/command-handler.ts index d277280ee..979c3f9e7 100644 --- a/src/daemon/command-handler.ts +++ b/src/daemon/command-handler.ts @@ -60,6 +60,7 @@ import { getClaudeSdkRuntimeConfig, normalizeClaudeSdkModelForProvider } from '. import { getCodexRuntimeConfig } from '../agent/codex-runtime-config.js'; import { P2P_TERMINAL_RUN_STATUSES } from '../../shared/p2p-status.js'; import { DAEMON_MSG } from '../../shared/daemon-events.js'; +import { CC_PRESET_MSG, type CcPreset } from '../../shared/cc-presets.js'; import { MEMORY_WS } from '../../shared/memory-ws.js'; import { P2P_CONFIG_ERROR, P2P_CONFIG_MSG } from '../../shared/p2p-config-events.js'; import { DAEMON_COMMAND_TYPES } from '../../shared/daemon-command-types.js'; @@ -168,10 +169,14 @@ function emitCommandAckReliable( * is computed FRESH (same as buildSessionList for main sessions) rather than * reading stale values from the session store. */ -async function buildSubSessionSync(id: string, overrides?: Partial): Promise> { +async function buildSubSessionSync(id: string, overrides?: Partial): Promise | null> { const sessionName = subSessionName(id); const record = getSession(sessionName); const r = { ...record, ...overrides }; + if (!r?.agentType) { + logger.warn({ id, sessionName }, 'Skipping subsession.sync without agentType'); + return null; + } // Compute transport display metadata fresh — matches session-list.ts hydration logic. // The session store may have stale or missing metadata during early launch/update windows. @@ -199,7 +204,7 @@ async function buildSubSessionSync(id: string, overrides?: Partial, +): Promise { + const payload = await buildSubSessionSync(id, overrides); + if (!payload) return; + serverLink.send(payload); +} + function normalizeTransportConfigUpdate(value: unknown): Record | undefined { return value && typeof value === 'object' && !Array.isArray(value) ? value as Record @@ -300,7 +315,7 @@ async function handleSubSessionTransportConfigUpdate(cmd: Record { if (!sessionName.startsWith('deck_sub_')) return; const subId = sessionName.slice('deck_sub_'.length); - try { serverLink.send(await buildSubSessionSync(subId)); } catch { /* ignore */ } + try { await sendSubSessionSync(serverLink, subId); } catch { /* ignore */ } } /** @@ -543,9 +558,7 @@ function refreshQwenQuotaUsageLabels(serverLink?: ServerLink): void { // Re-sync sub-sessions so their quota usage labels update in the browser if (session.name.startsWith('deck_sub_')) { const subId = session.name.replace(/^deck_sub_/, ''); - if (serverLink) void buildSubSessionSync(subId).then((payload) => { - serverLink.send(payload); - }).catch(() => { /* not connected */ }); + if (serverLink) void sendSubSessionSync(serverLink, subId).catch(() => { /* not connected */ }); } } if (serverLink) void handleGetSessions(serverLink); @@ -567,7 +580,7 @@ export async function refreshCodexQuotaMetadata(serverLink?: ServerLink): Promis if (!session.name.startsWith('deck_sub_')) continue; const subId = session.name.replace(/^deck_sub_/, ''); try { - serverLink.send(await buildSubSessionSync(subId)); + await sendSubSessionSync(serverLink, subId); } catch { // not connected } @@ -974,12 +987,8 @@ export function handleWebCommand(msg: unknown, serverLink: ServerLink): void { upsertSession({ ...record, label: nextLabel, updatedAt: Date.now() }); logger.info({ sessionName: sName, label }, 'subsession.rename: label updated'); const id = sName.replace(/^deck_sub_/, ''); - void buildSubSessionSync(id, { label: nextLabel }).then((payload) => { - try { - serverLink.send(payload); - } catch { - // not connected - } + void sendSubSessionSync(serverLink, id, { label: nextLabel }).catch(() => { + // not connected }); } } @@ -1094,12 +1103,15 @@ export function handleWebCommand(msg: unknown, serverLink: ServerLink): void { case 'p2p.status': void handleP2pStatus(cmd, serverLink); break; - case 'cc.presets.list': + case CC_PRESET_MSG.LIST: void handleCcPresetsList(serverLink); break; - case 'cc.presets.save': + case CC_PRESET_MSG.SAVE: void handleCcPresetsSave(cmd, serverLink); break; + case CC_PRESET_MSG.DISCOVER_MODELS: + void handleCcPresetsDiscoverModels(cmd, serverLink); + break; case SHARED_CONTEXT_RUNTIME_CONFIG_MSG.APPLY: void handleSharedContextRuntimeConfigApply(cmd); break; @@ -2889,7 +2901,7 @@ async function handleTimelineHistory(cmd: Record, serverLink: S // Do NOT filter by epoch — history should include events across daemon restarts. const readLimit = Math.min(limit * 6, 10000); const tRead0 = Date.now(); - const events = timelineStore.read(sessionName, { limit: readLimit, afterTs, beforeTs }); + const events = await timelineStore.readPreferred(sessionName, { limit: readLimit, afterTs, beforeTs }); readMs = Date.now() - tRead0; // Content-aware limit: session.state events don't count toward the budget. @@ -3038,7 +3050,7 @@ async function handleSubSessionStart(cmd: Record, serverLink: S }); // Sync to server DB try { - serverLink.send(await buildSubSessionSync(id)); + await sendSubSessionSync(serverLink, id); } catch { /* not connected */ } } catch (e: unknown) { logger.error({ err: e, id, type }, 'subsession.start failed (transport)'); @@ -3070,7 +3082,7 @@ async function handleSubSessionStart(cmd: Record, serverLink: S }); // Sync to server DB so frontend can see the sub-session try { - serverLink.send(await buildSubSessionSync(id)); + await sendSubSessionSync(serverLink, id); } catch { /* not connected */ } } catch (e: unknown) { logger.error({ err: e, id }, 'subsession.start failed'); @@ -3117,7 +3129,7 @@ async function handleSubSessionRestart(cmd: Record, serverLink: transportConfig: ('transportConfig' in cmd ? (cmd.transportConfig as Record | null) : undefined), }); try { - serverLink.send(await buildSubSessionSync(id)); + await sendSubSessionSync(serverLink, id); } catch { /* not connected */ } } catch (e: unknown) { logger.error({ err: e, sessionName: sName }, 'subsession.restart failed'); @@ -3135,7 +3147,7 @@ async function handleSubSessionRebuildAll(cmd: Record, serverLi await rebuildSubSessions(subSessions).catch((e: unknown) => logger.error({ err: e }, 'subsession.rebuild_all failed')); for (const sub of subSessions) { try { - serverLink.send(await buildSubSessionSync(sub.id)); + await sendSubSessionSync(serverLink, sub.id); } catch (e) { logger.warn({ err: e, id: sub.id }, 'Failed to sync rebuilt sub-session'); } @@ -3173,7 +3185,7 @@ async function handleSubSessionSetModel(cmd: Record, serverLink await startSubSession({ id, type: 'codex', cwd: cwd ?? null, codexModel: model }); // Sync restarted sub-session to server DB try { - serverLink.send(await buildSubSessionSync(id)); + await sendSubSessionSync(serverLink, id); } catch { /* not connected */ } } catch (e: unknown) { logger.error({ err: e, sessionName, model }, 'subsession.set_model restart failed'); @@ -3692,6 +3704,100 @@ async function handleFileSearch(cmd: Record, serverLink: Server } const FS_LIST_DEADLINE_MS = 10_000; +const FS_LIST_CACHE_TTL_MS = 5_000; + +interface FsLsSnapshot { + resolvedPath: string; + dirSignature: string; + entries: Array>; +} + +const fsListCache = new Map(); +const fsListInflight = new Map>(); +const fsListGenerations = new Map(); + +function getFsListCacheKey(realPath: string, includeFiles: boolean, includeMetadata: boolean): string { + return `${realPath}::${includeFiles ? 'files' : 'dirs'}::${includeMetadata ? 'meta' : 'plain'}`; +} + +async function loadFsListSnapshot(real: string, includeFiles: boolean, includeMetadata: boolean): Promise { + const dirents = await fsReaddir(real, { withFileTypes: true }); + const filtered = dirents.filter((d) => d.isDirectory() || (includeFiles && d.isFile())); + + const entries = await Promise.all(filtered.map(async (d) => { + const entry: Record = { name: d.name, path: nodePath.join(real, d.name), isDir: d.isDirectory(), hidden: d.name.startsWith('.') }; + if (includeMetadata && !d.isDirectory()) { + try { + const filePath = nodePath.join(real, d.name); + const fileStat = await fsStat(filePath); + entry.size = fileStat.size; + const ext = nodePath.extname(d.name).toLowerCase().slice(1); + entry.mime = MIME_MAP[ext] || undefined; + const handle = createProjectFileHandle(filePath, d.name, entry.mime as string | undefined, fileStat.size); + entry.downloadId = handle.id; + } catch { /* stat failed, skip metadata */ } + } + return entry; + })); + + entries.sort((a, b) => { + if (a.isDir !== b.isDir) return a.isDir ? -1 : 1; + if (a.hidden !== b.hidden) return (a.hidden ? 1 : 0) - (b.hidden ? 1 : 0); + return (a.name as string).localeCompare(b.name as string); + }); + + return { + resolvedPath: real, + dirSignature: await safeStatSignature(real), + entries, + }; +} + +async function getFsListSnapshot(real: string, includeFiles: boolean, includeMetadata: boolean): Promise { + const dirSignature = await safeStatSignature(real); + const cacheKey = getFsListCacheKey(real, includeFiles, includeMetadata); + const cached = fsListCache.get(cacheKey); + if (cached && cached.expiresAt > Date.now() && cached.value.dirSignature === dirSignature) { + return cached.value; + } + + const generation = getResourceGeneration(fsListGenerations, real); + const inflightKey = `${cacheKey}::${generation}`; + const inflight = fsListInflight.get(inflightKey); + if (inflight) return await inflight; + + const promise = loadFsListSnapshot(real, includeFiles, includeMetadata) + .then(async (value) => { + const currentSignature = await safeStatSignature(real); + if (getResourceGeneration(fsListGenerations, real) === generation && currentSignature === value.dirSignature) { + fsListCache.set(cacheKey, { value, expiresAt: Date.now() + FS_LIST_CACHE_TTL_MS }); + } + return value; + }) + .finally(() => { + fsListInflight.delete(inflightKey); + }); + fsListInflight.set(inflightKey, promise); + return await promise; +} + +function invalidateFsListCachesForPath(targetPath: string): void { + const realTarget = normalizeFsPath(targetPath); + bumpResourceGeneration(fsListGenerations, realTarget); + fsListCache.delete(getFsListCacheKey(realTarget, false, false)); + fsListCache.delete(getFsListCacheKey(realTarget, true, false)); + fsListCache.delete(getFsListCacheKey(realTarget, false, true)); + fsListCache.delete(getFsListCacheKey(realTarget, true, true)); + + const parent = nodePath.dirname(realTarget); + if (parent !== realTarget) { + bumpResourceGeneration(fsListGenerations, parent); + fsListCache.delete(getFsListCacheKey(parent, false, false)); + fsListCache.delete(getFsListCacheKey(parent, true, false)); + fsListCache.delete(getFsListCacheKey(parent, false, true)); + fsListCache.delete(getFsListCacheKey(parent, true, true)); + } +} async function handleFsList(cmd: Record, serverLink: ServerLink): Promise { const rawPath = cmd.path as string | undefined; @@ -3768,36 +3874,12 @@ async function handleFsListInner(resolved: string, rawPath: string, requestId: s return; } - const dirents = await fsReaddir(real, { withFileTypes: true }); - const filtered = dirents.filter((d) => d.isDirectory() || (includeFiles && d.isFile())); - - const entries = await Promise.all(filtered.map(async (d) => { - const entry: Record = { name: d.name, path: nodePath.join(real, d.name), isDir: d.isDirectory(), hidden: d.name.startsWith('.') }; - if (includeMetadata && !d.isDirectory()) { - try { - const filePath = nodePath.join(real, d.name); - const fileStat = await fsStat(filePath); - entry.size = fileStat.size; - const ext = nodePath.extname(d.name).toLowerCase().slice(1); - entry.mime = MIME_MAP[ext] || undefined; - // Generate a short-lived download handle - const handle = createProjectFileHandle(filePath, d.name, entry.mime as string | undefined, fileStat.size); - entry.downloadId = handle.id; - } catch { /* stat failed, skip metadata */ } - } - return entry; - })); - - entries.sort((a, b) => { - if (a.isDir !== b.isDir) return a.isDir ? -1 : 1; - if (a.hidden !== b.hidden) return (a.hidden ? 1 : 0) - (b.hidden ? 1 : 0); - return (a.name as string).localeCompare(b.name as string); - }); + const snapshot = await getFsListSnapshot(real, includeFiles, includeMetadata); - try { serverLink.send({ type: 'fs.ls_response', requestId, path: rawPath, resolvedPath: real, status: 'ok', entries }); } catch { /* ignore */ } + try { serverLink.send({ type: 'fs.ls_response', requestId, path: rawPath, resolvedPath: snapshot.resolvedPath, status: 'ok', entries: snapshot.entries }); } catch { /* ignore */ } } -const FS_READ_SIZE_LIMIT = 512 * 1024; // 512 KB +const FS_READ_SIZE_LIMIT = 5 * 1024 * 1024; // 5 MB interface FsReadSnapshot { path: string; @@ -4485,6 +4567,7 @@ async function handleFsMkdir(cmd: Record, serverLink: ServerLin const { mkdir } = await import('fs/promises'); await mkdir(resolved, { recursive: true }); const real = await fsRealpath(resolved); + invalidateFsListCachesForPath(real); try { serverLink.send({ type: 'fs.mkdir_response', requestId, path: rawPath, resolvedPath: real, status: 'ok' }); } catch { /* ignore */ } } catch (err) { try { serverLink.send({ type: 'fs.mkdir_response', requestId, path: rawPath, status: 'error', error: err instanceof Error ? err.message : String(err) }); } catch { /* ignore */ } @@ -4553,6 +4636,7 @@ async function handleFsWrite(cmd: Record, serverLink: ServerLin // Write the file await fsWriteFile(real, content, 'utf-8'); const newStats = await fsStat(real); + invalidateFsListCachesForPath(real); invalidateGitCachesForPath(real); try { serverLink.send({ type: 'fs.write_response', requestId, path: rawPath, resolvedPath: real, status: 'ok', mtime: newStats.mtimeMs }); } catch { /* ignore */ } } catch (err) { @@ -4572,6 +4656,7 @@ async function handleFsWrite(cmd: Record, serverLink: ServerLin await fsWriteFile(resolved, content, 'utf-8'); const newStats = await fsStat(resolved); const real = await fsRealpath(resolved); + invalidateFsListCachesForPath(real); invalidateGitCachesForPath(real); try { serverLink.send({ type: 'fs.write_response', requestId, path: rawPath, resolvedPath: real, status: 'ok', mtime: newStats.mtimeMs }); } catch { /* ignore */ } } catch (err) { @@ -4769,16 +4854,88 @@ export async function listProviderSessions(providerId: string): Promise { const { loadPresets } = await import('./cc-presets.js'); const presets = await loadPresets(); - serverLink.send({ type: 'cc.presets.list_response', presets }); + serverLink.send({ type: CC_PRESET_MSG.LIST_RESPONSE, presets }); } async function handleCcPresetsSave(cmd: Record, serverLink: ServerLink): Promise { - const presets = cmd.presets as Array<{ name: string; env: Record }> | undefined; + const presets = cmd.presets as CcPreset[] | undefined; if (!presets) return; const { savePresets, invalidateCache } = await import('./cc-presets.js'); invalidateCache(); await savePresets(presets); - serverLink.send({ type: 'cc.presets.save_response', ok: true }); + serverLink.send({ type: CC_PRESET_MSG.SAVE_RESPONSE, ok: true }); +} + +async function handleCcPresetsDiscoverModels(cmd: Record, serverLink: ServerLink): Promise { + const requestId = typeof cmd.requestId === 'string' ? cmd.requestId : undefined; + const presetName = typeof cmd.presetName === 'string' ? cmd.presetName.trim() : ''; + if (!presetName) { + serverLink.send({ + type: CC_PRESET_MSG.DISCOVER_MODELS_RESPONSE, + ...(requestId ? { requestId } : {}), + presetName, + ok: false, + error: 'presetName is required', + }); + return; + } + + const { discoverPresetModels, loadPresets, savePresets, getPreset } = await import('./cc-presets.js'); + const presets = await loadPresets(); + const preset = await getPreset(presetName); + if (!preset) { + serverLink.send({ + type: CC_PRESET_MSG.DISCOVER_MODELS_RESPONSE, + ...(requestId ? { requestId } : {}), + presetName, + ok: false, + error: `Preset "${presetName}" not found`, + }); + return; + } + + const normalizedName = preset.name.trim().toLowerCase(); + try { + const discovered = await discoverPresetModels(preset); + const updatedPreset: CcPreset = { + ...preset, + transportMode: preset.transportMode ?? 'qwen-compatible-api', + authType: preset.authType ?? 'anthropic', + availableModels: discovered.availableModels, + ...(discovered.defaultModel ? { defaultModel: discovered.defaultModel } : {}), + lastDiscoveredAt: Date.now(), + modelDiscoveryError: undefined, + }; + await savePresets(presets.map((item) => ( + item.name.trim().toLowerCase() === normalizedName ? updatedPreset : item + ))); + serverLink.send({ + type: CC_PRESET_MSG.DISCOVER_MODELS_RESPONSE, + ...(requestId ? { requestId } : {}), + presetName: updatedPreset.name, + ok: true, + preset: updatedPreset, + models: discovered.availableModels, + endpoint: discovered.endpoint, + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + const updatedPreset: CcPreset = { + ...preset, + modelDiscoveryError: message, + }; + await savePresets(presets.map((item) => ( + item.name.trim().toLowerCase() === normalizedName ? updatedPreset : item + ))); + serverLink.send({ + type: CC_PRESET_MSG.DISCOVER_MODELS_RESPONSE, + ...(requestId ? { requestId } : {}), + presetName: updatedPreset.name, + ok: false, + error: message, + preset: updatedPreset, + }); + } } async function handleSharedContextRuntimeConfigApply(cmd: Record): Promise { diff --git a/src/daemon/file-change-normalizer.ts b/src/daemon/file-change-normalizer.ts index d913ac5c1..3195b89d0 100644 --- a/src/daemon/file-change-normalizer.ts +++ b/src/daemon/file-change-normalizer.ts @@ -35,6 +35,18 @@ function asNumber(value: unknown): number | undefined { return undefined; } +function looksLikeUnifiedDiff(value: string | undefined): boolean { + if (!value) return false; + const normalized = value.replace(/\r\n/g, '\n'); + return normalized.startsWith('@@ ') + || normalized.startsWith('diff ') + || normalized.startsWith('--- ') + || normalized.startsWith('+++ ') + || /\n@@ -\d/.test(normalized) + || /\n--- /.test(normalized) + || /\n\+\+\+ /.test(normalized); +} + function detectOperation(value: unknown, fallback: FileChangeOperation = 'unknown'): FileChangeOperation { const normalized = String(value ?? '').toLowerCase(); if (!normalized) return fallback; @@ -162,12 +174,13 @@ function normalizeGenericToolPatch( rawRecord?.content, rawRecord?.text, ); - const unifiedDiff = asStringAny( + const diffText = asStringAny( inputRecord?.diff, inputRecord?.patch, rawRecord?.diff, rawRecord?.patch, ); + const unifiedDiff = looksLikeUnifiedDiff(diffText) ? diffText : undefined; const hunks = firstDefinedHunks( inputRecord?.hunks, inputRecord?.ranges, @@ -179,14 +192,25 @@ function normalizeGenericToolPatch( asRecord(rawRecord?.toolUseResult)?.ranges, ); const operation = detectOperation( - inputRecord?.operation ?? inputRecord?.op ?? inputRecord?.type ?? rawRecord?.operation ?? rawRecord?.op ?? rawRecord?.type ?? toolName, - beforeText && afterText ? 'update' : afterText ? 'update' : 'unknown', + inputRecord?.operation + ?? inputRecord?.op + ?? inputRecord?.type + ?? asRecord(inputRecord?.kind)?.type + ?? rawRecord?.operation + ?? rawRecord?.op + ?? rawRecord?.type + ?? asRecord(rawRecord?.kind)?.type + ?? toolName, + beforeText && afterText ? 'update' : afterText || diffText ? 'update' : 'unknown', ); - const confidence: FileChangeConfidence = beforeText && afterText + const inlineText = unifiedDiff ? undefined : diffText; + const normalizedBeforeText = beforeText ?? (inlineText && operation === 'delete' ? inlineText : undefined); + const normalizedAfterText = afterText ?? (inlineText && operation !== 'delete' ? inlineText : undefined); + const confidence: FileChangeConfidence = normalizedBeforeText && normalizedAfterText ? 'exact' : unifiedDiff ? 'exact' - : afterText + : normalizedBeforeText || normalizedAfterText ? 'derived' : 'coarse'; @@ -195,8 +219,8 @@ function normalizeGenericToolPatch( operation, confidence, ...(oldPath ? { oldPath } : {}), - ...(beforeText ? { beforeText } : {}), - ...(afterText ? { afterText } : {}), + ...(normalizedBeforeText ? { beforeText: normalizedBeforeText } : {}), + ...(normalizedAfterText ? { afterText: normalizedAfterText } : {}), ...(unifiedDiff ? { unifiedDiff } : {}), ...(hunks ? { hunks } : {}), ...(toolCallId ? { toolCallId } : {}), @@ -217,16 +241,20 @@ function normalizeCodexFileChangePatch(change: unknown, toolCallId?: string): Fi if (!filePath) return null; const beforeText = asStringAny(record.beforeText, record.before, record.oldText, record.oldContent); const afterText = asStringAny(record.afterText, record.after, record.newText, record.newContent, record.content); - const unifiedDiff = asStringAny(record.unifiedDiff, record.patch, record.diff); + const diffText = asStringAny(record.unifiedDiff, record.patch, record.diff); + const unifiedDiff = looksLikeUnifiedDiff(diffText) ? diffText : undefined; const hunks = normalizeHunks(record.hunks ?? record.ranges); const oldPath = asStringAny(record.oldPath, record.previousPath, record.fromPath); - const operation = detectOperation(record.operation ?? record.op ?? record.kind ?? record.type, - oldPath ? 'rename' : beforeText || afterText || unifiedDiff ? 'update' : 'unknown'); - const confidence: FileChangeConfidence = beforeText && afterText + const operation = detectOperation(record.operation ?? record.op ?? asRecord(record.kind)?.type ?? record.kind ?? record.type, + oldPath ? 'rename' : beforeText || afterText || diffText ? 'update' : 'unknown'); + const inlineText = unifiedDiff ? undefined : diffText; + const normalizedBeforeText = beforeText ?? (inlineText && operation === 'delete' ? inlineText : undefined); + const normalizedAfterText = afterText ?? (inlineText && operation !== 'delete' ? inlineText : undefined); + const confidence: FileChangeConfidence = normalizedBeforeText && normalizedAfterText ? 'exact' : unifiedDiff ? 'exact' - : afterText + : normalizedBeforeText || normalizedAfterText ? 'derived' : 'coarse'; @@ -235,8 +263,8 @@ function normalizeCodexFileChangePatch(change: unknown, toolCallId?: string): Fi operation, confidence, ...(oldPath ? { oldPath } : {}), - ...(beforeText ? { beforeText } : {}), - ...(afterText ? { afterText } : {}), + ...(normalizedBeforeText ? { beforeText: normalizedBeforeText } : {}), + ...(normalizedAfterText ? { afterText: normalizedAfterText } : {}), ...(unifiedDiff ? { unifiedDiff } : {}), ...(hunks ? { hunks } : {}), ...(toolCallId ? { toolCallId } : {}), diff --git a/src/daemon/lifecycle.ts b/src/daemon/lifecycle.ts index de463191e..8ceefdaa5 100644 --- a/src/daemon/lifecycle.ts +++ b/src/daemon/lifecycle.ts @@ -39,9 +39,9 @@ import { isKnownTestSessionLike } from '../../shared/test-session-guard.js'; import { isTransportAgent } from '../agent/detect.js'; /** Get the last assistant.text from a session's timeline (for push notification context). */ -function getLastAssistantText(sessionName: string): string | undefined { +async function getLastAssistantText(sessionName: string): Promise { try { - const events = timelineStore.read(sessionName, { limit: 100 }); + const events = await timelineStore.readByTypesPreferred(sessionName, ['assistant.text'], { limit: 100 }); for (let i = events.length - 1; i >= 0; i--) { if (events[i].type === 'assistant.text') { const text = (events[i].payload as Record)?.text; @@ -476,6 +476,11 @@ export async function startup(): Promise { for (const session of listSessions()) { if (!session.name.startsWith('deck_sub_')) continue; if (session.state === 'stopped') continue; + const sessionType = typeof session.agentType === 'string' && session.agentType ? session.agentType : null; + if (!sessionType) { + logger.warn({ sessionName: session.name }, 'Skipping subsession.sync during lifecycle restore without agentType'); + continue; + } const id = session.name.slice('deck_sub_'.length); try { serverLink.send({ @@ -485,7 +490,7 @@ export async function startup(): Promise { // gray after reconnect" — see buildSubSessionSync for the // equivalent fix on the regular sync path. state: session.state ?? null, - sessionType: session.agentType, + sessionType, cwd: session.projectDir || null, label: session.label ?? null, ccSessionId: session.ccSessionId ?? null, @@ -559,7 +564,7 @@ export async function startup(): Promise { }); for (const session of listSessions()) { - const history = timelineStore.read(session.name, { limit: 100 }); + const history = await timelineStore.readPreferred(session.name, { limit: 100 }); if (history.length === 0) continue; void liveContextIngestion.backfillSessionFromEvents(session.name, history).catch((err) => { logger.warn({ err, session: session.name }, 'Shared-context timeline backfill failed'); @@ -675,8 +680,11 @@ export async function startup(): Promise { if (event.type === 'session.state' && (event.payload as Record).state === 'idle') { const rec = listSessions().find((s) => s.name === event.sessionId); if (rec?.agentType === 'shell' || rec?.agentType === 'script') return; - const lastText = getLastAssistantText(event.sessionId); - serverLink!.send({ type: 'timeline.event', event, ...(lastText ? { lastText } : {}) }); + void getLastAssistantText(event.sessionId).then((lastText) => { + serverLink!.send({ type: 'timeline.event', event, ...(lastText ? { lastText } : {}) }); + }).catch(() => { + serverLink!.send({ type: 'timeline.event', event }); + }); } else { serverLink!.sendTimelineEvent(event); } @@ -727,15 +735,25 @@ export async function startup(): Promise { if (record?.agentType === 'shell' || record?.agentType === 'script') return; // notifySessionIdle is handled by the unified timeline listener below // Include last assistant text for push notification context - const lastText = getLastAssistantText(payload.session); - serverLink.send({ - type: 'session.idle', - session: payload.session, - project: display.project, - agentType: payload.agentType, - ...(lastText ? { lastText } : {}), - ...(display.label ? { label: display.label } : {}), - ...(display.parentLabel ? { parentLabel: display.parentLabel } : {}), + void getLastAssistantText(payload.session).then((lastText) => { + serverLink.send({ + type: 'session.idle', + session: payload.session, + project: display.project, + agentType: payload.agentType, + ...(lastText ? { lastText } : {}), + ...(display.label ? { label: display.label } : {}), + ...(display.parentLabel ? { parentLabel: display.parentLabel } : {}), + }); + }).catch(() => { + serverLink.send({ + type: 'session.idle', + session: payload.session, + project: display.project, + agentType: payload.agentType, + ...(display.label ? { label: display.label } : {}), + ...(display.parentLabel ? { parentLabel: display.parentLabel } : {}), + }); }); } else if (payload.event === 'notification') { serverLink.send({ diff --git a/src/daemon/opencode-watcher.ts b/src/daemon/opencode-watcher.ts index fa83a620b..e87820b2c 100644 --- a/src/daemon/opencode-watcher.ts +++ b/src/daemon/opencode-watcher.ts @@ -112,7 +112,7 @@ async function pollTick(sessionName: string, state: WatcherState): Promise let sessionId = record?.opencodeSessionId; if (!record?.projectDir || !sessionId) return; - const recentTimeline = timelineStore.read(sessionName, { limit: 200 }); + const recentTimeline = await timelineStore.readPreferred(sessionName, { limit: 200 }); const hasAssistantHistory = hasAssistantLikeTimeline(recentTimeline); if (!hasAssistantHistory) { const latestUserTs = getLatestUserMessageTs(recentTimeline); diff --git a/src/daemon/session-list.ts b/src/daemon/session-list.ts index 1b8b29959..ac27b67c3 100644 --- a/src/daemon/session-list.ts +++ b/src/daemon/session-list.ts @@ -149,13 +149,16 @@ export async function buildSessionList(): Promise { // a synchronous .map() callback. The preset model takes priority over // qwenRuntime available models for display so preset sessions (e.g. MiniMax) // show the correct model even when qwenRuntime hasn't loaded yet. - const presetModelBySession = new Map(); + const presetModelBySession = new Map(); if (needsQwenHydration) { - const { getPreset } = await import('./cc-presets.js'); + const { getPreset, getPresetAvailableModelIds, getPresetEffectiveModel } = await import('./cc-presets.js'); for (const s of sessions) { if (s.agentType === 'qwen' && s.ccPreset) { const preset = await getPreset(s.ccPreset); - presetModelBySession.set(s.name, preset?.env?.['ANTHROPIC_MODEL']?.trim() || undefined); + presetModelBySession.set(s.name, { + defaultModel: preset ? getPresetEffectiveModel(preset) : undefined, + availableModels: preset ? getPresetAvailableModelIds(preset) : [], + }); } } } @@ -224,7 +227,9 @@ export async function buildSessionList(): Promise { // No longer available". Non-preset qwen sessions keep the OAuth-derived // tier labels so users see the real state of their CLI auth. const presetActive = !!s.ccPreset; - const presetModel = presetModelBySession.get(s.name); + const presetConfig = presetModelBySession.get(s.name); + const presetModel = presetConfig?.defaultModel; + const presetModels = presetConfig?.availableModels ?? []; const qwenAuthType = presetActive ? QWEN_AUTH_TYPES.API_KEY @@ -232,18 +237,25 @@ export async function buildSessionList(): Promise { const qwenAuthLimit = presetActive ? undefined : (s.qwenAuthLimit ?? qwenRuntime?.authLimit); - const qwenAvailableModels = presetActive && presetModel - ? [presetModel] + const qwenAvailableModels = presetActive + ? (presetModels.length + ? presetModels + : (s.qwenAvailableModels?.length + ? s.qwenAvailableModels + : (qwenRuntime?.availableModels?.length ? qwenRuntime.availableModels : undefined))) : (s.qwenAvailableModels?.length ? s.qwenAvailableModels : (qwenRuntime?.availableModels?.length ? qwenRuntime.availableModels : undefined)); - const qwenModel = presetModel ?? s.qwenModel ?? qwenAvailableModels?.[0]; - // modelDisplay: prefer preset's pinned model, then session's existing - // modelDisplay, then the effective qwenModel. This ensures the preset - // model (MiniMax-M2.7) displays correctly even when qwenRuntime's - // availableModels hasn't loaded yet or the session was restored from - // persisted state without the preset context. - const displayModel = presetModel ?? s.modelDisplay ?? qwenModel; + const qwenModel = presetActive + ? ((s.qwenModel && qwenAvailableModels?.includes(s.qwenModel)) + ? s.qwenModel + : (presetModel ?? qwenAvailableModels?.[0] ?? s.qwenModel)) + : (s.qwenModel ?? qwenAvailableModels?.[0]); + // For preset-backed sessions, keep a valid user-selected model visible. + // Fall back to the preset default only when the stored selection is stale. + const displayModel = presetActive + ? (qwenModel ?? presetModel ?? s.modelDisplay) + : (s.modelDisplay ?? qwenModel); const displayMetadata = getQwenDisplayMetadata({ model: displayModel, authType: qwenAuthType, diff --git a/src/daemon/subsession-manager.ts b/src/daemon/subsession-manager.ts index 2787dfbfc..edc9e4d0b 100644 --- a/src/daemon/subsession-manager.ts +++ b/src/daemon/subsession-manager.ts @@ -471,7 +471,7 @@ export async function readSubSessionResponse(sessionName: string): Promise<{ sta ? (record.state === 'idle' ? 'idle' : 'thinking') : detectStatus(lines, agentType); if (status !== 'idle') return { status: 'working' }; - const events = timelineStore.read(sessionName); + const events = await timelineStore.readPreferred(sessionName); const lastUserMsgIdx = events.map((e) => e.type).lastIndexOf('user.message'); const responseEvents = lastUserMsgIdx >= 0 ? events.slice(lastUserMsgIdx + 1) : events; const textParts = responseEvents.filter((e) => e.type === 'assistant.text').map((e) => String(e.payload.text ?? '')); diff --git a/src/daemon/timeline-projection-types.ts b/src/daemon/timeline-projection-types.ts new file mode 100644 index 000000000..210fab373 --- /dev/null +++ b/src/daemon/timeline-projection-types.ts @@ -0,0 +1,82 @@ +import type { TimelineEvent, TimelineEventType } from './timeline-event.js'; + +export type ProjectionSessionStatus = 'missing' | 'building' | 'ready' | 'stale' | 'corrupt'; + +export interface ProjectionSessionMeta { + sessionId: string; + lastProjectedAppendOrdinal: number; + sourceFileSizeBytes: number; + sourceFileMtimeMs: number; + projectionVersion: number; + status: ProjectionSessionStatus; + lastRebuiltAt: number | null; +} + +export interface TimelineProjectionQuery { + sessionId: string; + limit?: number; + afterTs?: number; + beforeTs?: number; + types?: TimelineEventType[]; +} + +export interface TimelineProjectionQueryResult { + source: 'sqlite'; + events: TimelineEvent[]; +} + +export interface TimelineProjectionCompletedText { + source: 'sqlite'; + events: TimelineEvent[]; +} + +export interface ProjectionWorkerRequestMap { + recordAppendedEvent: { event: TimelineEvent }; + queryHistory: TimelineProjectionQuery; + queryByTypes: Required> & Omit; + queryCompletedTextTail: { sessionId: string; limit?: number }; + queryLatest: { sessionId: string }; + rebuildSession: { sessionId: string }; + pruneSessionToAuthoritative: { sessionId: string; keepLast: number }; + deleteSession: { sessionId: string }; + checkpointIfNeeded: Record; + shutdown: Record; +} + +export interface ProjectionWorkerResponseMap { + recordAppendedEvent: boolean; + queryHistory: TimelineProjectionQueryResult; + queryByTypes: TimelineProjectionQueryResult; + queryCompletedTextTail: TimelineProjectionCompletedText; + queryLatest: { epoch: number; seq: number } | null; + rebuildSession: boolean; + pruneSessionToAuthoritative: boolean; + deleteSession: boolean; + checkpointIfNeeded: boolean; + shutdown: true; +} + +export type ProjectionWorkerRequestType = keyof ProjectionWorkerRequestMap; + +export interface ProjectionWorkerEnvelope { + id: number; + type: T; + payload: ProjectionWorkerRequestMap[T]; +} + +export interface ProjectionWorkerSuccess { + id: number; + ok: true; + type: T; + result: ProjectionWorkerResponseMap[T]; +} + +export interface ProjectionWorkerFailure { + id: number; + ok: false; + type: T; + error: string; + code?: string; +} + +export type ProjectionWorkerResponse = ProjectionWorkerSuccess | ProjectionWorkerFailure; diff --git a/src/daemon/timeline-projection-worker.ts b/src/daemon/timeline-projection-worker.ts new file mode 100644 index 000000000..4151590d5 --- /dev/null +++ b/src/daemon/timeline-projection-worker.ts @@ -0,0 +1,516 @@ +import { parentPort, workerData } from 'node:worker_threads'; +import { createRequire } from 'node:module'; +import { mkdirSync, statSync, existsSync, readFileSync } from 'node:fs'; +import { dirname, join } from 'node:path'; +import { homedir } from 'node:os'; +import type { TimelineEvent, TimelineEventType } from './timeline-event.js'; +import type { + ProjectionSessionMeta, + ProjectionWorkerEnvelope, + ProjectionWorkerRequestType, + ProjectionWorkerRequestMap, + ProjectionWorkerResponse, +} from './timeline-projection-types.js'; + +const require = createRequire(import.meta.url); +const { DatabaseSync } = require('node:sqlite') as typeof import('node:sqlite'); +type DatabaseSyncInstance = InstanceType; + +type ProjectionStatus = 'missing' | 'building' | 'ready' | 'stale' | 'corrupt'; +type WorkerRequest = { + [K in ProjectionWorkerRequestType]: ProjectionWorkerEnvelope; +}[ProjectionWorkerRequestType]; + +const PROJECTION_VERSION = 1; +const TIMELINE_DIR = join(homedir(), '.imcodes', 'timeline'); +const dbPath = typeof workerData?.dbPath === 'string' && workerData.dbPath + ? workerData.dbPath + : join(homedir(), '.imcodes', 'timeline.sqlite'); + +let db: DatabaseSyncInstance | null = null; +const rebuildPromises = new Map>(); +let writesSinceCheckpoint = 0; + +function sessionFilePath(sessionId: string): string { + const safe = sessionId.replace(/[^a-zA-Z0-9_-]/g, '_'); + return join(TIMELINE_DIR, `${safe}.jsonl`); +} + +function ensureDb(): DatabaseSyncInstance { + if (db) return db; + mkdirSync(dirname(dbPath), { recursive: true }); + const instance = new DatabaseSync(dbPath); + instance.exec(` + PRAGMA journal_mode = WAL; + PRAGMA synchronous = NORMAL; + CREATE TABLE IF NOT EXISTS timeline_projection_events ( + session_id TEXT NOT NULL, + append_ordinal INTEGER NOT NULL, + event_id TEXT NOT NULL, + ts INTEGER NOT NULL, + seq INTEGER NOT NULL, + epoch INTEGER NOT NULL, + type TEXT NOT NULL, + source TEXT NOT NULL, + confidence TEXT NOT NULL, + streaming INTEGER NOT NULL DEFAULT 0, + hidden INTEGER NOT NULL DEFAULT 0, + text TEXT, + payload_json TEXT NOT NULL, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + PRIMARY KEY(session_id, append_ordinal) + ); + CREATE INDEX IF NOT EXISTS idx_timeline_projection_events_session_ts + ON timeline_projection_events(session_id, ts DESC, append_ordinal DESC); + CREATE INDEX IF NOT EXISTS idx_timeline_projection_events_session_type_ts + ON timeline_projection_events(session_id, type, ts DESC, append_ordinal DESC); + CREATE INDEX IF NOT EXISTS idx_timeline_projection_events_session_streaming_ts + ON timeline_projection_events(session_id, streaming, ts DESC, append_ordinal DESC); + + CREATE TABLE IF NOT EXISTS timeline_projection_sessions ( + session_id TEXT PRIMARY KEY, + last_projected_append_ordinal INTEGER NOT NULL, + source_file_size_bytes INTEGER NOT NULL, + source_file_mtime_ms INTEGER NOT NULL, + projection_version INTEGER NOT NULL, + status TEXT NOT NULL, + last_rebuilt_at INTEGER + ); + `); + db = instance; + return instance; +} + +function runInTransaction(work: () => void): void { + const database = ensureDb(); + database.exec('BEGIN IMMEDIATE'); + try { + work(); + database.exec('COMMIT'); + } catch (err) { + try { + database.exec('ROLLBACK'); + } catch { + // ignore rollback failures + } + throw err; + } +} + +function readSessionMeta(sessionId: string): ProjectionSessionMeta | null { + const row = ensureDb().prepare(` + SELECT session_id, last_projected_append_ordinal, source_file_size_bytes, source_file_mtime_ms, projection_version, status, last_rebuilt_at + FROM timeline_projection_sessions + WHERE session_id = ? + `).get(sessionId) as Record | undefined; + if (!row) return null; + return { + sessionId: String(row.session_id), + lastProjectedAppendOrdinal: Number(row.last_projected_append_ordinal), + sourceFileSizeBytes: Number(row.source_file_size_bytes), + sourceFileMtimeMs: Number(row.source_file_mtime_ms), + projectionVersion: Number(row.projection_version), + status: String(row.status) as ProjectionStatus, + lastRebuiltAt: typeof row.last_rebuilt_at === 'number' ? row.last_rebuilt_at : null, + }; +} + +function upsertSessionMeta(sessionId: string, meta: { + lastProjectedAppendOrdinal: number; + sourceFileSizeBytes: number; + sourceFileMtimeMs: number; + status: ProjectionStatus; + lastRebuiltAt?: number | null; +}): void { + ensureDb().prepare(` + INSERT INTO timeline_projection_sessions ( + session_id, last_projected_append_ordinal, source_file_size_bytes, source_file_mtime_ms, projection_version, status, last_rebuilt_at + ) VALUES (?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(session_id) DO UPDATE SET + last_projected_append_ordinal = excluded.last_projected_append_ordinal, + source_file_size_bytes = excluded.source_file_size_bytes, + source_file_mtime_ms = excluded.source_file_mtime_ms, + projection_version = excluded.projection_version, + status = excluded.status, + last_rebuilt_at = excluded.last_rebuilt_at + `).run( + sessionId, + meta.lastProjectedAppendOrdinal, + meta.sourceFileSizeBytes, + meta.sourceFileMtimeMs, + PROJECTION_VERSION, + meta.status, + meta.lastRebuiltAt ?? null, + ); +} + +function deleteSessionRows(sessionId: string): void { + const database = ensureDb(); + database.prepare('DELETE FROM timeline_projection_events WHERE session_id = ?').run(sessionId); + database.prepare('DELETE FROM timeline_projection_sessions WHERE session_id = ?').run(sessionId); +} + +function currentFileMeta(sessionId: string): { exists: boolean; size: number; mtimeMs: number } { + const filePath = sessionFilePath(sessionId); + if (!existsSync(filePath)) return { exists: false, size: 0, mtimeMs: 0 }; + const stat = statSync(filePath); + return { exists: true, size: stat.size, mtimeMs: Math.trunc(stat.mtimeMs) }; +} + +function parseLinesAscending(sessionId: string): TimelineEvent[] { + const filePath = sessionFilePath(sessionId); + if (!existsSync(filePath)) return []; + const raw = readFileSync(filePath, 'utf8'); + if (!raw.trim()) return []; + const events: TimelineEvent[] = []; + for (const line of raw.split('\n')) { + if (!line) continue; + try { + const event = JSON.parse(line) as TimelineEvent; + if (event.sessionId === sessionId) events.push(event); + } catch { + // preserve JSONL tolerance: corrupt lines are skipped + } + } + return events; +} + +function extractTextAndStreaming(event: TimelineEvent): { text: string | null; streaming: number } { + const text = typeof event.payload?.text === 'string' ? event.payload.text : null; + const streaming = event.payload?.streaming === true ? 1 : 0; + return { text, streaming }; +} + +function insertProjectedEvent(database: DatabaseSyncInstance, sessionId: string, appendOrdinal: number, event: TimelineEvent): void { + const { text, streaming } = extractTextAndStreaming(event); + database.prepare(` + INSERT INTO timeline_projection_events ( + session_id, append_ordinal, event_id, ts, seq, epoch, type, source, confidence, streaming, hidden, text, payload_json, created_at, updated_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `).run( + sessionId, + appendOrdinal, + event.eventId, + event.ts, + event.seq, + event.epoch, + event.type, + event.source, + event.confidence, + streaming, + event.hidden === true ? 1 : 0, + text, + JSON.stringify(event.payload), + event.ts, + Date.now(), + ); +} + +async function rebuildSessionInternal(sessionId: string): Promise { + const existing = rebuildPromises.get(sessionId); + if (existing) return existing; + const promise = Promise.resolve().then(() => { + const database = ensureDb(); + const fileMeta = currentFileMeta(sessionId); + if (!fileMeta.exists) { + deleteSessionRows(sessionId); + return true; + } + upsertSessionMeta(sessionId, { + lastProjectedAppendOrdinal: 0, + sourceFileSizeBytes: fileMeta.size, + sourceFileMtimeMs: fileMeta.mtimeMs, + status: 'building', + lastRebuiltAt: Date.now(), + }); + const events = parseLinesAscending(sessionId); + runInTransaction(() => { + database.prepare('DELETE FROM timeline_projection_events WHERE session_id = ?').run(sessionId); + let appendOrdinal = 0; + for (const event of events) { + appendOrdinal += 1; + insertProjectedEvent(database, sessionId, appendOrdinal, event); + } + upsertSessionMeta(sessionId, { + lastProjectedAppendOrdinal: appendOrdinal, + sourceFileSizeBytes: fileMeta.size, + sourceFileMtimeMs: fileMeta.mtimeMs, + status: 'ready', + lastRebuiltAt: Date.now(), + }); + }); + writesSinceCheckpoint += Math.max(events.length, 1); + maybeCheckpoint(); + return true; + }).finally(() => { + rebuildPromises.delete(sessionId); + }); + rebuildPromises.set(sessionId, promise); + return promise; +} + +async function ensureFreshSession(sessionId: string): Promise { + const meta = readSessionMeta(sessionId); + const fileMeta = currentFileMeta(sessionId); + if (!fileMeta.exists) { + deleteSessionRows(sessionId); + return false; + } + if (!meta) { + await rebuildSessionInternal(sessionId); + return true; + } + if (meta.status !== 'ready' + || meta.projectionVersion !== PROJECTION_VERSION + || meta.sourceFileSizeBytes !== fileMeta.size + || meta.sourceFileMtimeMs !== fileMeta.mtimeMs) { + upsertSessionMeta(sessionId, { + lastProjectedAppendOrdinal: meta.lastProjectedAppendOrdinal, + sourceFileSizeBytes: fileMeta.size, + sourceFileMtimeMs: fileMeta.mtimeMs, + status: 'stale', + lastRebuiltAt: meta.lastRebuiltAt, + }); + await rebuildSessionInternal(sessionId); + } + return true; +} + +function rowToEvent(row: Record): TimelineEvent { + const payload = JSON.parse(String(row.payload_json)) as Record; + return { + eventId: String(row.event_id), + sessionId: String(row.session_id), + ts: Number(row.ts), + seq: Number(row.seq), + epoch: Number(row.epoch), + source: String(row.source) as TimelineEvent['source'], + confidence: String(row.confidence) as TimelineEvent['confidence'], + type: String(row.type) as TimelineEvent['type'], + payload, + ...(Number(row.hidden) === 1 ? { hidden: true } : {}), + }; +} + +function maybeCheckpoint(): void { + if (writesSinceCheckpoint < 256) return; + writesSinceCheckpoint = 0; + ensureDb().exec('PRAGMA wal_checkpoint(TRUNCATE);'); +} + +async function handleRecordAppendedEvent(event: TimelineEvent): Promise { + const fileMeta = currentFileMeta(event.sessionId); + if (!fileMeta.exists) return false; + const meta = readSessionMeta(event.sessionId); + const serializedEvent = JSON.stringify(event) + '\n'; + const appendedBytes = Buffer.byteLength(serializedEvent); + if ( + !meta + || meta.status !== 'ready' + || meta.projectionVersion !== PROJECTION_VERSION + || fileMeta.size !== meta.sourceFileSizeBytes + appendedBytes + || fileMeta.mtimeMs < meta.sourceFileMtimeMs + ) { + await rebuildSessionInternal(event.sessionId); + return true; + } + const database = ensureDb(); + const nextOrdinal = (meta?.lastProjectedAppendOrdinal ?? 0) + 1; + runInTransaction(() => { + insertProjectedEvent(database, event.sessionId, nextOrdinal, event); + upsertSessionMeta(event.sessionId, { + lastProjectedAppendOrdinal: nextOrdinal, + sourceFileSizeBytes: fileMeta.size, + sourceFileMtimeMs: fileMeta.mtimeMs, + status: 'ready', + lastRebuiltAt: meta?.lastRebuiltAt ?? null, + }); + }); + writesSinceCheckpoint += 1; + maybeCheckpoint(); + return true; +} + +function buildRangeSql(base: string, afterTs?: number, beforeTs?: number): { sql: string; params: unknown[] } { + const clauses = [base]; + const params: unknown[] = []; + if (afterTs !== undefined) { + clauses.push('AND ts > ?'); + params.push(afterTs); + } + if (beforeTs !== undefined) { + clauses.push('AND ts < ?'); + params.push(beforeTs); + } + return { sql: clauses.join(' '), params }; +} + +async function handleQueryHistory(sessionId: string, afterTs?: number, beforeTs?: number, limit = 500): Promise<{ source: 'sqlite'; events: TimelineEvent[] }> { + const fresh = await ensureFreshSession(sessionId); + if (!fresh) return { source: 'sqlite', events: [] }; + const boundedLimit = Math.max(1, Math.min(limit, 10_000)); + const { sql, params } = buildRangeSql( + 'SELECT * FROM timeline_projection_events WHERE session_id = ?', + afterTs, + beforeTs, + ); + const rows = ensureDb().prepare(`${sql} ORDER BY ts DESC, append_ordinal DESC LIMIT ?`).all(...([sessionId, ...params, boundedLimit] as any[])) as Array>; + return { source: 'sqlite', events: rows.reverse().map(rowToEvent) }; +} + +async function handleQueryLatest(sessionId: string): Promise<{ epoch: number; seq: number } | null> { + const fresh = await ensureFreshSession(sessionId); + if (!fresh) return null; + const row = ensureDb().prepare(` + SELECT epoch, seq + FROM timeline_projection_events + WHERE session_id = ? + ORDER BY append_ordinal DESC + LIMIT 1 + `).get(sessionId) as Record | undefined; + if (!row) return null; + return { epoch: Number(row.epoch), seq: Number(row.seq) }; +} + +async function handleQueryCompletedTextTail(sessionId: string, limit = 50): Promise<{ source: 'sqlite'; events: TimelineEvent[] }> { + const fresh = await ensureFreshSession(sessionId); + if (!fresh) return { source: 'sqlite', events: [] }; + const boundedLimit = Math.max(1, Math.min(limit, 500)); + const rows = ensureDb().prepare(` + SELECT * + FROM timeline_projection_events + WHERE session_id = ? + AND ( + (type = 'user.message' AND text IS NOT NULL AND trim(text) <> '') + OR + (type = 'assistant.text' AND streaming = 0 AND text IS NOT NULL AND trim(text) <> '') + ) + ORDER BY ts DESC, append_ordinal DESC + LIMIT ? + `).all(sessionId, boundedLimit) as Array>; + return { source: 'sqlite', events: rows.reverse().map(rowToEvent) }; +} + +async function handleQueryByTypes(sessionId: string, types: TimelineEventType[], afterTs?: number, beforeTs?: number, limit = 500): Promise<{ source: 'sqlite'; events: TimelineEvent[] }> { + const fresh = await ensureFreshSession(sessionId); + if (!fresh) return { source: 'sqlite', events: [] }; + if (types.length === 0) return { source: 'sqlite', events: [] }; + const boundedLimit = Math.max(1, Math.min(limit, 10_000)); + const placeholders = types.map(() => '?').join(', '); + const { sql, params } = buildRangeSql( + `SELECT * FROM timeline_projection_events WHERE session_id = ? AND type IN (${placeholders})`, + afterTs, + beforeTs, + ); + const rows = ensureDb().prepare(`${sql} ORDER BY ts DESC, append_ordinal DESC LIMIT ?`) + .all(...([sessionId, ...types, ...params, boundedLimit] as any[])) as Array>; + return { source: 'sqlite', events: rows.reverse().map(rowToEvent) }; +} + +async function handlePruneSessionToAuthoritative(sessionId: string, keepLast: number): Promise { + const database = ensureDb(); + const boundedKeep = Math.max(1, keepLast); + database.prepare(` + DELETE FROM timeline_projection_events + WHERE session_id = ? + AND append_ordinal NOT IN ( + SELECT append_ordinal + FROM timeline_projection_events + WHERE session_id = ? + ORDER BY append_ordinal DESC + LIMIT ? + ) + `).run(sessionId, sessionId, boundedKeep); + const meta = readSessionMeta(sessionId); + if (meta) { + const fileMeta = currentFileMeta(sessionId); + const latestRow = database.prepare(` + SELECT append_ordinal + FROM timeline_projection_events + WHERE session_id = ? + ORDER BY append_ordinal DESC + LIMIT 1 + `).get(sessionId) as Record | undefined; + upsertSessionMeta(sessionId, { + lastProjectedAppendOrdinal: latestRow ? Number(latestRow.append_ordinal) : 0, + sourceFileSizeBytes: fileMeta.size, + sourceFileMtimeMs: fileMeta.mtimeMs, + status: fileMeta.exists ? 'ready' : 'missing', + lastRebuiltAt: meta.lastRebuiltAt, + }); + } + writesSinceCheckpoint += 1; + maybeCheckpoint(); + return true; +} + +async function handleDeleteSession(sessionId: string): Promise { + deleteSessionRows(sessionId); + writesSinceCheckpoint += 1; + maybeCheckpoint(); + return true; +} + +async function handleCheckpointIfNeeded(): Promise { + maybeCheckpoint(); + return true; +} + +async function handleShutdown(): Promise { + parentPort?.close(); + return true; +} + +async function handleRequest(message: WorkerRequest): Promise { + switch (message.type) { + case 'recordAppendedEvent': + return handleRecordAppendedEvent(message.payload.event); + case 'queryHistory': + return handleQueryHistory(message.payload.sessionId, message.payload.afterTs, message.payload.beforeTs, message.payload.limit); + case 'queryLatest': + return handleQueryLatest(message.payload.sessionId); + case 'queryCompletedTextTail': + return handleQueryCompletedTextTail(message.payload.sessionId, message.payload.limit); + case 'queryByTypes': + return handleQueryByTypes(message.payload.sessionId, message.payload.types, message.payload.afterTs, message.payload.beforeTs, message.payload.limit); + case 'rebuildSession': { + const existing = rebuildPromises.get(message.payload.sessionId); + if (existing) return existing; + try { + return rebuildSessionInternal(message.payload.sessionId); + } catch (result) { + if (result instanceof Promise) return result; + throw result; + } + } + case 'pruneSessionToAuthoritative': + return handlePruneSessionToAuthoritative(message.payload.sessionId, message.payload.keepLast); + case 'deleteSession': + return handleDeleteSession(message.payload.sessionId); + case 'checkpointIfNeeded': + return handleCheckpointIfNeeded(); + case 'shutdown': + return handleShutdown(); + } +} + +if (!parentPort) { + throw new Error('timeline-projection-worker requires parentPort'); +} + +parentPort.on('message', async (message: WorkerRequest) => { + try { + const result = await handleRequest(message); + const response: ProjectionWorkerResponse = { id: message.id, type: message.type, ok: true, result } as ProjectionWorkerResponse; + parentPort?.postMessage(response); + } catch (err) { + const response: ProjectionWorkerResponse = { + id: message.id, + type: message.type, + ok: false, + error: err instanceof Error ? err.message : String(err), + }; + parentPort?.postMessage(response); + } +}); diff --git a/src/daemon/timeline-projection.ts b/src/daemon/timeline-projection.ts new file mode 100644 index 000000000..929c2f545 --- /dev/null +++ b/src/daemon/timeline-projection.ts @@ -0,0 +1,213 @@ +import { Worker } from 'node:worker_threads'; +import { fileURLToPath } from 'node:url'; +import { extname, join } from 'node:path'; +import { homedir } from 'node:os'; +import type { TimelineEvent } from './timeline-event.js'; +import logger from '../util/logger.js'; +import type { + ProjectionSessionMeta, + ProjectionWorkerEnvelope, + ProjectionWorkerRequestMap, + ProjectionWorkerRequestType, + ProjectionWorkerResponse, + TimelineProjectionQuery, +} from './timeline-projection-types.js'; + +export type TimelineProjectionQueryOpts = Omit; + +export interface TimelineProjectionLatest { + epoch: number; + seq: number; +} + +export type TimelineProjectionStatus = 'missing' | 'building' | 'ready' | 'stale' | 'corrupt'; + +const DEFAULT_QUERY_TIMEOUT_MS = 75; +const DEFAULT_WRITE_TIMEOUT_MS = 2_000; + +function getProjectionDbPath(): string { + return process.env.IMCODES_TIMELINE_PROJECTION_DB_PATH?.trim() + || join(homedir(), '.imcodes', 'timeline.sqlite'); +} + +function getWorkerModuleUrl(): URL { + const selfPath = fileURLToPath(import.meta.url); + const ext = extname(selfPath); + return new URL(ext === '.ts' ? './timeline-projection-worker.ts' : './timeline-projection-worker.js', import.meta.url); +} + +class TimelineProjectionClient { + private worker: Worker | null = null; + private nextId = 1; + private readonly pending = new Map void; + reject: (error: Error) => void; + timer?: NodeJS.Timeout; + }>(); + private permanentlyDisabled = false; + + private ensureWorker(): Worker | null { + if (this.permanentlyDisabled) return null; + if (this.worker) return this.worker; + try { + const worker = new Worker(getWorkerModuleUrl(), { + workerData: { dbPath: getProjectionDbPath() }, + }); + worker.unref(); + worker.on('message', (message: ProjectionWorkerResponse) => this.handleWorkerMessage(message)); + worker.on('error', (err) => { + logger.warn({ err }, 'TimelineProjection: worker failed'); + this.failAllPending(err instanceof Error ? err : new Error(String(err))); + this.worker = null; + this.permanentlyDisabled = true; + }); + worker.on('exit', (code) => { + if (code !== 0) { + logger.warn({ code }, 'TimelineProjection: worker exited unexpectedly'); + } + this.failAllPending(new Error(`timeline_projection_worker_exit:${code}`)); + this.worker = null; + if (code !== 0) this.permanentlyDisabled = true; + }); + this.worker = worker; + return worker; + } catch (err) { + logger.warn({ err }, 'TimelineProjection: failed to start worker'); + this.permanentlyDisabled = true; + return null; + } + } + + private handleWorkerMessage(message: ProjectionWorkerResponse): void { + const pending = this.pending.get(message.id); + if (!pending) return; + this.pending.delete(message.id); + if (pending.timer) clearTimeout(pending.timer); + if (message.ok) pending.resolve(message.result); + else pending.reject(new Error(message.error)); + } + + private failAllPending(err: Error): void { + for (const [id, pending] of this.pending) { + this.pending.delete(id); + if (pending.timer) clearTimeout(pending.timer); + pending.reject(err); + } + } + + private request(type: TOp, payload: ProjectionWorkerRequestMap[TOp], timeoutMs: number): Promise { + const worker = this.ensureWorker(); + if (!worker) return Promise.reject(new Error('timeline_projection_unavailable')); + const id = this.nextId++; + return new Promise((resolve, reject) => { + const timer = timeoutMs > 0 ? setTimeout(() => { + this.pending.delete(id); + reject(new Error(`timeline_projection_timeout:${type}`)); + }, timeoutMs) : undefined; + this.pending.set(id, { resolve: resolve as (value: unknown) => void, reject, timer }); + const request: ProjectionWorkerEnvelope = { id, type, payload }; + worker.postMessage(request); + }); + } + + async recordAppendedEvent(event: TimelineEvent): Promise { + await this.request('recordAppendedEvent', { event }, DEFAULT_WRITE_TIMEOUT_MS).catch((err) => { + logger.debug({ err, sessionId: event.sessionId, eventId: event.eventId }, 'TimelineProjection: recordAppendedEvent failed'); + }); + } + + async queryHistory(query: ProjectionWorkerRequestMap['queryHistory']): Promise { + try { + const result = await this.request<{ source: 'sqlite'; events: TimelineEvent[] }, 'queryHistory'>('queryHistory', query, DEFAULT_QUERY_TIMEOUT_MS); + return result.events; + } catch (err) { + logger.debug({ err, sessionId: query.sessionId }, 'TimelineProjection: queryHistory fallback to JSONL'); + return null; + } + } + + async queryLatest(sessionId: string): Promise { + try { + return await this.request('queryLatest', { sessionId }, DEFAULT_QUERY_TIMEOUT_MS); + } catch (err) { + logger.debug({ err, sessionId }, 'TimelineProjection: queryLatest fallback to JSONL'); + return null; + } + } + + async getLatest(sessionId: string): Promise { + return this.queryLatest(sessionId); + } + + async queryCompletedTextTail(sessionId: string, limit = 50): Promise { + try { + const result = await this.request<{ source: 'sqlite'; events: TimelineEvent[] }, 'queryCompletedTextTail'>('queryCompletedTextTail', { sessionId, limit }, DEFAULT_QUERY_TIMEOUT_MS); + return result.events; + } catch (err) { + logger.debug({ err, sessionId }, 'TimelineProjection: queryCompletedTextTail fallback to JSONL'); + return null; + } + } + + async queryByTypes(query: ProjectionWorkerRequestMap['queryByTypes']): Promise { + try { + const result = await this.request<{ source: 'sqlite'; events: TimelineEvent[] }, 'queryByTypes'>('queryByTypes', query, DEFAULT_QUERY_TIMEOUT_MS); + return result.events; + } catch (err) { + logger.debug({ err, sessionId: query.sessionId, types: query.types }, 'TimelineProjection: queryByTypes fallback to JSONL'); + return null; + } + } + + async rebuildSession(sessionId: string): Promise { + try { + return await this.request('rebuildSession', { sessionId }, DEFAULT_WRITE_TIMEOUT_MS); + } catch (err) { + logger.debug({ err, sessionId }, 'TimelineProjection: rebuildSession failed'); + return false; + } + } + + async pruneSessionToAuthoritative(sessionId: string, keepLast = 5000): Promise { + try { + await this.request('pruneSessionToAuthoritative', { sessionId, keepLast }, DEFAULT_WRITE_TIMEOUT_MS); + } catch (err) { + logger.debug({ err, sessionId, keepLast }, 'TimelineProjection: pruneSessionToAuthoritative failed'); + } + } + + async deleteSession(sessionId: string): Promise { + try { + await this.request('deleteSession', { sessionId }, DEFAULT_WRITE_TIMEOUT_MS); + } catch (err) { + logger.debug({ err, sessionId }, 'TimelineProjection: deleteSession failed'); + } + } + + async checkpointIfNeeded(): Promise { + try { + await this.request('checkpointIfNeeded', {}, DEFAULT_WRITE_TIMEOUT_MS); + } catch (err) { + logger.debug({ err }, 'TimelineProjection: checkpointIfNeeded failed'); + } + } + + async shutdown(): Promise { + const worker = this.worker; + this.worker = null; + if (!worker) return; + this.failAllPending(new Error('timeline_projection_shutdown')); + try { + await worker.terminate(); + } catch (err) { + logger.debug({ err }, 'TimelineProjection: worker terminate failed'); + } + this.permanentlyDisabled = false; + } +} + +export const timelineProjection = new TimelineProjectionClient(); + +export type { + ProjectionSessionMeta, +}; diff --git a/src/daemon/timeline-store.ts b/src/daemon/timeline-store.ts index fd3a1b4a3..ca4bfb80f 100644 --- a/src/daemon/timeline-store.ts +++ b/src/daemon/timeline-store.ts @@ -9,8 +9,9 @@ import { join } from 'path'; import { homedir } from 'os'; import type { TimelineEvent } from './timeline-event.js'; import logger from '../util/logger.js'; +import { timelineProjection, type TimelineProjectionQueryOpts } from './timeline-projection.js'; -const TIMELINE_DIR = join(homedir(), '.imcodes', 'timeline'); +export const TIMELINE_DIR = join(homedir(), '.imcodes', 'timeline'); const MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000; // 7 days const MAX_EVENTS_PER_FILE = 5000; @@ -18,7 +19,7 @@ const MAX_EVENTS_PER_FILE = 5000; * Read the last N lines from a file by reading backward from the end in chunks. * Much faster than readFileSync + split for large files when only tail is needed. */ -function readTailLines(filePath: string, maxLines: number): string[] { +export function readTailLines(filePath: string, maxLines: number): string[] { let fd: number; try { fd = openSync(filePath, 'r'); @@ -73,7 +74,7 @@ class TimelineStore { this.initialized = true; } - private filePath(sessionName: string): string { + filePath(sessionName: string): string { // Sanitize session name for filesystem const safe = sessionName.replace(/[^a-zA-Z0-9_-]/g, '_'); return join(TIMELINE_DIR, `${safe}.jsonl`); @@ -84,6 +85,9 @@ class TimelineStore { this.ensureDir(); try { appendFileSync(this.filePath(event.sessionId), JSON.stringify(event) + '\n'); + void timelineProjection.recordAppendedEvent(event).catch((err) => { + logger.debug({ err, sessionId: event.sessionId, eventId: event.eventId }, 'TimelineProjection: append mirror failed'); + }); } catch (err) { logger.debug({ err, sessionId: event.sessionId }, 'TimelineStore: append failed'); } @@ -122,6 +126,52 @@ class TimelineStore { return events.reverse(); // restore ts order } + async readPreferred( + sessionName: string, + opts?: { afterTs?: number; beforeTs?: number; limit?: number }, + ): Promise { + const projected = await timelineProjection.queryHistory({ + sessionId: sessionName, + afterTs: opts?.afterTs, + beforeTs: opts?.beforeTs, + limit: opts?.limit, + }); + if (projected) return projected; + return this.read(sessionName, opts); + } + + async readByTypesPreferred( + sessionName: string, + types: TimelineEvent['type'][], + opts?: TimelineProjectionQueryOpts, + ): Promise { + const projected = await timelineProjection.queryByTypes({ + sessionId: sessionName, + types, + afterTs: opts?.afterTs, + beforeTs: opts?.beforeTs, + limit: opts?.limit, + }); + if (projected) return projected; + return this.read(sessionName, opts).filter((event) => types.includes(event.type)); + } + + async readCompletedTextTail(sessionName: string, limit = 50): Promise { + const projected = await timelineProjection.queryCompletedTextTail(sessionName, limit); + if (projected) return projected; + return this.read(sessionName, { limit: Math.max(limit * 6, 500) }).filter((event) => { + if (event.type === 'user.message') { + return typeof event.payload?.text === 'string' && event.payload.text.trim().length > 0; + } + if (event.type === 'assistant.text') { + return event.payload?.streaming !== true + && typeof event.payload?.text === 'string' + && event.payload.text.trim().length > 0; + } + return false; + }).slice(-limit); + } + /** * Get the latest epoch and seq for a session (from the last line). */ @@ -137,6 +187,16 @@ class TimelineStore { return null; } + async getLatestPreferred(sessionName: string): Promise<{ epoch: number; seq: number } | null> { + try { + const projected = await timelineProjection.getLatest(sessionName); + if (projected) return projected; + } catch { + // fall through to JSONL + } + return this.getLatest(sessionName); + } + /** * Truncate old events from a session file, keeping only the last N events. */ @@ -148,6 +208,9 @@ class TimelineStore { const kept = newestFirst.slice(0, keepLast).reverse(); try { writeFileSync(filePath, kept.join('\n') + '\n'); + void timelineProjection.pruneSessionToAuthoritative(sessionName, keepLast).catch((err) => { + logger.debug({ err, sessionName }, 'TimelineProjection: prune after truncate failed'); + }); logger.info({ sessionName, after: kept.length }, 'TimelineStore: truncated'); } catch (err) { logger.debug({ err, sessionName }, 'TimelineStore: truncate write failed'); @@ -185,6 +248,10 @@ class TimelineStore { const stat = statSync(fullPath); if (now - stat.mtimeMs > MAX_AGE_MS) { unlinkSync(fullPath); + const sessionName = file.replace('.jsonl', ''); + void timelineProjection.deleteSession(sessionName).catch((err) => { + logger.debug({ err, sessionName }, 'TimelineProjection: delete after cleanup failed'); + }); logger.info({ file }, 'TimelineStore: deleted old file'); } } catch { /* skip */ } @@ -192,6 +259,9 @@ class TimelineStore { } catch (err) { logger.debug({ err }, 'TimelineStore: cleanup failed'); } + void timelineProjection.checkpointIfNeeded().catch((err) => { + logger.debug({ err }, 'TimelineProjection: cleanup checkpoint failed'); + }); } } diff --git a/test/agent/codex-sdk-provider.test.ts b/test/agent/codex-sdk-provider.test.ts index df1e0f587..46d6fad68 100644 --- a/test/agent/codex-sdk-provider.test.ts +++ b/test/agent/codex-sdk-provider.test.ts @@ -555,6 +555,46 @@ describe('CodexSdkProvider', () => { expect(detail.meta?.actionType).toBe('other'); }); + it('surfaces the final WebSearch query on completion even if started emitted only a generic fallback', async () => { + const provider = new CodexSdkProvider(); + await provider.connect({ binaryPath: 'codex' }); + await provider.createSession({ sessionKey: 'route-websearch-late-query', cwd: '/tmp/project' }); + + const tools: Array<{ status: string; input: unknown; detail?: unknown }> = []; + provider.onToolCall((_, tool) => tools.push({ status: tool.status, input: tool.input, detail: tool.detail })); + + await provider.send('route-websearch-late-query', 'search'); + const child = childProcessMock.children[0]; + child.emits({ + method: 'item/started', + params: { threadId: 'thread-1', turnId: 'turn-1', item: { id: 'ws-late', type: 'webSearch', action: { type: 'other' } } }, + }); + child.emits({ + method: 'item/completed', + params: { + threadId: 'thread-1', + turnId: 'turn-1', + item: { + id: 'ws-late', + type: 'webSearch', + query: 'apple stock today', + action: { type: 'search', query: 'apple stock today' }, + }, + }, + }); + child.emits({ method: 'turn/completed', params: { threadId: 'thread-1', turn: { id: 'turn-1', status: 'completed', error: null } } }); + await flush(); + + expect(tools).toHaveLength(2); + expect(tools[0].status).toBe('running'); + expect(tools[0].input).toEqual({ query: '(other)' }); + expect(tools[1].status).toBe('complete'); + expect(tools[1].input).toEqual({ query: 'apple stock today' }); + const detail = tools[1].detail as { summary?: string; input?: Record }; + expect(detail.summary).toBe('apple stock today'); + expect(detail.input).toEqual({ query: 'apple stock today', action: { type: 'search', query: 'apple stock today' } }); + }); + it('applies thinking level to subsequent Codex SDK turns', async () => { const provider = new CodexSdkProvider(); await provider.connect({ binaryPath: 'codex' }); diff --git a/test/agent/qwen-provider.test.ts b/test/agent/qwen-provider.test.ts index 7c9bf71de..93a0c8953 100644 --- a/test/agent/qwen-provider.test.ts +++ b/test/agent/qwen-provider.test.ts @@ -708,6 +708,55 @@ describe('QwenProvider', () => { expect(errors).toEqual(['bad request']); }); + it('retries a transient Premature close once before surfacing an error', async () => { + const provider = new QwenProvider(); + await provider.connect({}); + await provider.createSession({ sessionKey: 'sess-transient-retry', cwd: '/tmp/project' }); + + const errors: string[] = []; + const completed: string[] = []; + provider.onError((_sid, err) => errors.push(err.message)); + provider.onComplete((_sid, msg) => completed.push(msg.content)); + + await provider.send('sess-transient-retry', 'retry me'); + await waitForSpawnCount(1); + const first = lastSpawn(); + first.child.stdout.write(`${JSON.stringify({ type: 'result', is_error: true, error: { message: 'API Error: Premature close' } })}\n`); + await new Promise((resolve) => setTimeout(resolve, 350)); + await waitForSpawnCount(2); + + const second = lastSpawn(); + second.child.stdout.write(`${JSON.stringify({ type: 'assistant', message: { id: 'msg-retry-ok', content: [{ type: 'text', text: 'OK' }] } })}\n`); + second.child.emit('close', 0, null); + await flushIO(); + await flushIO(); + + expect(childProcessMock.spawn).toHaveBeenCalledTimes(2); + expect(completed).toEqual(['OK']); + expect(errors).toEqual([]); + }); + + it('does not retry transient errors after partial output has streamed', async () => { + const provider = new QwenProvider(); + await provider.connect({}); + await provider.createSession({ sessionKey: 'sess-transient-partial', cwd: '/tmp/project' }); + + const errors: string[] = []; + provider.onError((_sid, err) => errors.push(err.message)); + + await provider.send('sess-transient-partial', 'partial first'); + await waitForSpawnCount(1); + const run = lastSpawn(); + run.child.stdout.write(`${JSON.stringify({ type: 'stream_event', event: { type: 'message_start', message: { id: 'msg-partial' } } })}\n`); + run.child.stdout.write(`${JSON.stringify({ type: 'stream_event', event: { type: 'content_block_delta', delta: { type: 'text_delta', text: 'Par' } } })}\n`); + run.child.stdout.write(`${JSON.stringify({ type: 'result', is_error: true, error: { message: 'API Error: Premature close' } })}\n`); + await flushIO(); + await flushIO(); + + expect(childProcessMock.spawn).toHaveBeenCalledTimes(1); + expect(errors).toEqual(['API Error: Premature close']); + }); + it('cancel() terminates the child and emits a cancelled error', async () => { const provider = new QwenProvider(); await provider.connect({}); diff --git a/test/daemon/cc-presets.test.ts b/test/daemon/cc-presets.test.ts index b46c69265..2bbdc29d1 100644 --- a/test/daemon/cc-presets.test.ts +++ b/test/daemon/cc-presets.test.ts @@ -86,7 +86,7 @@ describe('cc presets', () => { anthropic: [ { id: 'MiniMax-M2.7', - name: 'minimax', + name: 'MiniMax-M2.7', envKey: 'ANTHROPIC_API_KEY', baseUrl: 'https://api.minimax.io/anthropic', generationConfig: { @@ -104,4 +104,37 @@ describe('cc presets', () => { expect(result.systemPrompt).toContain('https://api.minimax.io/anthropic'); expect(result.systemPrompt).toMatch(/not running on Qwen/i); }); + + it('uses discovered compatible-api models when building qwen transport config', async () => { + const { savePresets, getQwenPresetTransportConfig } = await import('../../src/daemon/cc-presets.js'); + + await savePresets([ + { + name: 'minimax', + env: { + ANTHROPIC_BASE_URL: 'https://api.minimax.io/anthropic', + ANTHROPIC_AUTH_TOKEN: 'test-token', + ANTHROPIC_MODEL: 'MiniMax-M2.7', + }, + defaultModel: 'MiniMax-M2.7', + availableModels: [ + { id: 'MiniMax-M2.7', name: 'MiniMax M2.7' }, + { id: 'MiniMax-Text-01' }, + ], + }, + ]); + + const result = await getQwenPresetTransportConfig('minimax'); + expect(result.model).toBe('MiniMax-M2.7'); + expect(result.availableModels).toEqual(['MiniMax-M2.7', 'MiniMax-Text-01']); + expect(result.settings).toMatchObject({ + model: { name: 'MiniMax-M2.7' }, + modelProviders: { + anthropic: [ + expect.objectContaining({ id: 'MiniMax-M2.7', name: 'MiniMax M2.7' }), + expect.objectContaining({ id: 'MiniMax-Text-01', name: 'MiniMax-Text-01' }), + ], + }, + }); + }); }); diff --git a/test/daemon/command-handler-timeline-history-parity.test.ts b/test/daemon/command-handler-timeline-history-parity.test.ts new file mode 100644 index 000000000..a3764d783 --- /dev/null +++ b/test/daemon/command-handler-timeline-history-parity.test.ts @@ -0,0 +1,136 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +const { + readMock, + readPreferredMock, +} = vi.hoisted(() => ({ + readMock: vi.fn(), + readPreferredMock: vi.fn(), +})); + +vi.mock('../../src/store/session-store.js', () => ({ + listSessions: vi.fn(() => []), + getSession: vi.fn(() => null), + upsertSession: vi.fn(), + removeSession: vi.fn(), +})); + +vi.mock('../../src/agent/session-manager.js', () => ({ + startProject: vi.fn(), + stopProject: vi.fn(), + teardownProject: vi.fn(), + getTransportRuntime: vi.fn(() => undefined), + launchTransportSession: vi.fn(), + isProviderSessionBound: vi.fn(() => false), + persistSessionRecord: vi.fn(), + relaunchSessionWithSettings: vi.fn(), + stopTransportRuntimeSession: vi.fn(), +})); + +vi.mock('../../src/agent/tmux.js', () => ({ + sendKeys: vi.fn(), + sendKeysDelayedEnter: vi.fn(), + sendRawInput: vi.fn(), + resizeSession: vi.fn(), + sendKey: vi.fn(), + getPaneStartCommand: vi.fn(), +})); + +vi.mock('../../src/router/message-router.js', () => ({ routeMessage: vi.fn() })); +vi.mock('../../src/daemon/terminal-streamer.js', () => ({ terminalStreamer: { subscribe: vi.fn(), unsubscribe: vi.fn(), start: vi.fn(), stop: vi.fn() } })); +vi.mock('../../src/daemon/timeline-emitter.js', () => ({ timelineEmitter: { emit: vi.fn(), on: vi.fn(() => () => {}), off: vi.fn(), epoch: 5, replay: vi.fn(() => ({ events: [], truncated: false })) } })); +vi.mock('../../src/daemon/timeline-store.js', () => ({ + timelineStore: { + append: vi.fn(), + read: readMock, + readPreferred: readPreferredMock, + clear: vi.fn(), + }, +})); +vi.mock('../../src/daemon/subsession-manager.js', () => ({ startSubSession: vi.fn(), stopSubSession: vi.fn(), rebuildSubSessions: vi.fn(), detectShells: vi.fn().mockResolvedValue([]), readSubSessionResponse: vi.fn(), subSessionName: (id: string) => `deck_sub_${id}` })); +vi.mock('../../src/daemon/p2p-orchestrator.js', () => ({ startP2pRun: vi.fn(), cancelP2pRun: vi.fn(), getP2pRun: vi.fn(() => undefined), listP2pRuns: vi.fn(() => []), serializeP2pRun: vi.fn() })); +vi.mock('../../src/daemon/session-list.js', () => ({ buildSessionList: vi.fn(async () => []) })); +vi.mock('../../src/daemon/repo-handler.js', () => ({ handleRepoCommand: vi.fn() })); +vi.mock('../../src/daemon/file-transfer-handler.js', () => ({ handleFileUpload: vi.fn(), handleFileDownload: vi.fn(), createProjectFileHandle: vi.fn(), lookupAttachment: vi.fn(() => undefined) })); +vi.mock('../../src/daemon/preview-relay.js', () => ({ handlePreviewCommand: vi.fn() })); +vi.mock('../../src/daemon/provider-sessions.js', () => ({ listProviderSessions: vi.fn(() => []) })); +vi.mock('../../src/util/logger.js', () => ({ default: { info: vi.fn(), warn: vi.fn(), error: vi.fn(), debug: vi.fn() } })); +vi.mock('../../src/util/imc-dir.js', () => ({ ensureImcDir: vi.fn().mockResolvedValue('/tmp/imc'), imcSubDir: vi.fn((dir: string, sub: string) => `${dir}/.imc/${sub}`) })); +vi.mock('../../src/daemon/supervision-broker.js', () => ({ supervisionBroker: { decide: vi.fn() } })); +vi.mock('../../src/daemon/supervision-automation.js', () => ({ supervisionAutomation: { init: vi.fn(), setServerLink: vi.fn(), cancelSession: vi.fn(), queueTaskIntent: vi.fn(), updateQueuedTaskIntent: vi.fn(), removeQueuedTaskIntent: vi.fn(), registerTaskIntent: vi.fn(), applySnapshotUpdate: vi.fn() } })); + +import { handleWebCommand } from '../../src/daemon/command-handler.js'; + +const flushAsync = () => new Promise((resolve) => setTimeout(resolve, 0)); + +describe('command-handler timeline.history_request SQLite parity', () => { + const serverLink = { + send: vi.fn(), + sendBinary: vi.fn(), + sendTimelineEvent: vi.fn(), + daemonVersion: '0.1.0', + }; + + beforeEach(() => { + vi.clearAllMocks(); + readMock.mockReturnValue([]); + readPreferredMock.mockResolvedValue([ + { + eventId: 'user-1', + sessionId: 'deck_proj_brain', + ts: 100, + seq: 1, + epoch: 1, + source: 'daemon', + confidence: 'high', + type: 'user.message', + payload: { text: 'Question' }, + }, + { + eventId: 'state-1', + sessionId: 'deck_proj_brain', + ts: 101, + seq: 2, + epoch: 1, + source: 'daemon', + confidence: 'high', + type: 'session.state', + payload: { state: 'running' }, + }, + { + eventId: 'assistant-1', + sessionId: 'deck_proj_brain', + ts: 102, + seq: 3, + epoch: 1, + source: 'daemon', + confidence: 'high', + type: 'assistant.text', + payload: { text: 'Answer', streaming: false }, + }, + ]); + }); + + it('uses readPreferred for timeline.history_request while preserving current response shape', async () => { + handleWebCommand({ + type: 'timeline.history_request', + sessionName: 'deck_proj_brain', + requestId: 'req-history', + limit: 2, + }, serverLink as never); + await flushAsync(); + + expect(readPreferredMock).toHaveBeenCalledWith('deck_proj_brain', { limit: 12, afterTs: undefined, beforeTs: undefined }); + expect(readMock).not.toHaveBeenCalled(); + expect(serverLink.send).toHaveBeenCalledWith(expect.objectContaining({ + type: 'timeline.history', + sessionName: 'deck_proj_brain', + requestId: 'req-history', + events: expect.arrayContaining([ + expect.objectContaining({ eventId: 'user-1' }), + expect.objectContaining({ eventId: 'state-1' }), + expect.objectContaining({ eventId: 'assistant-1' }), + ]), + })); + }); +}); diff --git a/test/daemon/command-handler-timeline-history-projection.test.ts b/test/daemon/command-handler-timeline-history-projection.test.ts new file mode 100644 index 000000000..6bb781b45 --- /dev/null +++ b/test/daemon/command-handler-timeline-history-projection.test.ts @@ -0,0 +1,164 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +const { + getSessionMock, + upsertSessionMock, + readPreferredMock, + exportOpenCodeSessionMock, + buildTimelineEventsFromOpenCodeExportMock, + buildSessionListMock, +} = vi.hoisted(() => ({ + getSessionMock: vi.fn(), + upsertSessionMock: vi.fn(), + readPreferredMock: vi.fn(), + exportOpenCodeSessionMock: vi.fn(), + buildTimelineEventsFromOpenCodeExportMock: vi.fn(), + buildSessionListMock: vi.fn(async () => []), +})); + +vi.mock('../../src/store/session-store.js', () => ({ + listSessions: vi.fn(() => []), + getSession: getSessionMock, + upsertSession: upsertSessionMock, + removeSession: vi.fn(), + updateSessionState: vi.fn(), +})); + +vi.mock('../../src/agent/session-manager.js', () => ({ + startProject: vi.fn(), + stopProject: vi.fn(), + teardownProject: vi.fn(), + getTransportRuntime: vi.fn(() => undefined), + launchTransportSession: vi.fn(), + isProviderSessionBound: vi.fn(() => false), + persistSessionRecord: vi.fn(), + relaunchSessionWithSettings: vi.fn(), + stopTransportRuntimeSession: vi.fn(), +})); + +vi.mock('../../src/agent/tmux.js', () => ({ + sendKeys: vi.fn(), + sendKeysDelayedEnter: vi.fn(), + sendRawInput: vi.fn(), + resizeSession: vi.fn(), + sendKey: vi.fn(), + getPaneStartCommand: vi.fn(), +})); + +vi.mock('../../src/router/message-router.js', () => ({ routeMessage: vi.fn() })); +vi.mock('../../src/daemon/terminal-streamer.js', () => ({ terminalStreamer: { subscribe: vi.fn(), unsubscribe: vi.fn(), start: vi.fn(), stop: vi.fn() } })); +vi.mock('../../src/daemon/timeline-emitter.js', () => ({ timelineEmitter: { emit: vi.fn(), on: vi.fn(() => () => {}), off: vi.fn(), epoch: 99, replay: vi.fn(() => ({ events: [], truncated: false })) } })); +vi.mock('../../src/daemon/timeline-store.js', () => ({ + timelineStore: { + append: vi.fn(), + read: vi.fn(() => []), + readPreferred: readPreferredMock, + readCompletedTextTail: vi.fn(), + readByTypesPreferred: vi.fn(), + getLatest: vi.fn(() => null), + getLatestPreferred: vi.fn(() => null), + clear: vi.fn(), + }, +})); +vi.mock('../../src/daemon/subsession-manager.js', () => ({ startSubSession: vi.fn(), stopSubSession: vi.fn(), rebuildSubSessions: vi.fn(), detectShells: vi.fn().mockResolvedValue([]), readSubSessionResponse: vi.fn(), subSessionName: (id: string) => `deck_sub_${id}` })); +vi.mock('../../src/daemon/p2p-orchestrator.js', () => ({ startP2pRun: vi.fn(), cancelP2pRun: vi.fn(), getP2pRun: vi.fn(() => undefined), listP2pRuns: vi.fn(() => []), serializeP2pRun: vi.fn() })); +vi.mock('../../src/daemon/session-list.js', () => ({ buildSessionList: buildSessionListMock })); +vi.mock('../../src/daemon/repo-handler.js', () => ({ handleRepoCommand: vi.fn() })); +vi.mock('../../src/daemon/file-transfer-handler.js', () => ({ handleFileUpload: vi.fn(), handleFileDownload: vi.fn(), createProjectFileHandle: vi.fn(), lookupAttachment: vi.fn(() => undefined) })); +vi.mock('../../src/daemon/preview-relay.js', () => ({ handlePreviewCommand: vi.fn() })); +vi.mock('../../src/daemon/provider-sessions.js', () => ({ listProviderSessions: vi.fn(() => []) })); +vi.mock('../../src/util/logger.js', () => ({ default: { info: vi.fn(), warn: vi.fn(), error: vi.fn(), debug: vi.fn() } })); +vi.mock('../../src/util/imc-dir.js', () => ({ ensureImcDir: vi.fn().mockResolvedValue('/tmp/imc'), imcSubDir: vi.fn((dir: string, sub: string) => `${dir}/.imc/${sub}`) })); +vi.mock('../../src/daemon/supervision-broker.js', () => ({ supervisionBroker: { decide: vi.fn() } })); +vi.mock('../../src/daemon/supervision-automation.js', () => ({ supervisionAutomation: { init: vi.fn(), setServerLink: vi.fn(), cancelSession: vi.fn(), queueTaskIntent: vi.fn(), updateQueuedTaskIntent: vi.fn(), removeQueuedTaskIntent: vi.fn(), registerTaskIntent: vi.fn(), applySnapshotUpdate: vi.fn() } })); +vi.mock('../../src/daemon/opencode-history.js', () => ({ + exportOpenCodeSession: exportOpenCodeSessionMock, + buildTimelineEventsFromOpenCodeExport: buildTimelineEventsFromOpenCodeExportMock, + discoverLatestOpenCodeSessionId: vi.fn(), +})); + +import { handleWebCommand } from '../../src/daemon/command-handler.js'; + +const flushAsync = () => new Promise((resolve) => setTimeout(resolve, 0)); + +describe('command-handler timeline history with SQLite-preferred reads', () => { + const serverLink = { + send: vi.fn(), + sendBinary: vi.fn(), + sendTimelineEvent: vi.fn(), + daemonVersion: '0.1.0', + }; + + beforeEach(() => { + vi.clearAllMocks(); + getSessionMock.mockReturnValue(undefined); + buildTimelineEventsFromOpenCodeExportMock.mockReturnValue([]); + exportOpenCodeSessionMock.mockResolvedValue({}); + }); + + it('uses readPreferred and preserves substantive budgeting plus session.state interleaving', async () => { + readPreferredMock.mockResolvedValue([ + { eventId: 's0', sessionId: 'deck_hist', ts: 1000, seq: 1, epoch: 1, source: 'daemon', confidence: 'high', type: 'session.state', payload: { state: 'idle' } }, + { eventId: 'u1', sessionId: 'deck_hist', ts: 1010, seq: 2, epoch: 1, source: 'daemon', confidence: 'high', type: 'user.message', payload: { text: 'hello' } }, + { eventId: 's1', sessionId: 'deck_hist', ts: 1020, seq: 3, epoch: 1, source: 'daemon', confidence: 'high', type: 'session.state', payload: { state: 'running' } }, + { eventId: 'a1', sessionId: 'deck_hist', ts: 1030, seq: 4, epoch: 1, source: 'daemon', confidence: 'high', type: 'assistant.text', payload: { text: 'world', streaming: false } }, + ]); + + handleWebCommand({ + type: 'timeline.history_request', + sessionName: 'deck_hist', + requestId: 'hist-1', + limit: 2, + }, serverLink as any); + await flushAsync(); + + expect(readPreferredMock).toHaveBeenCalledWith('deck_hist', { limit: 12, afterTs: undefined, beforeTs: undefined }); + expect(serverLink.send).toHaveBeenCalledWith(expect.objectContaining({ + type: 'timeline.history', + sessionName: 'deck_hist', + requestId: 'hist-1', + epoch: 99, + events: [ + expect.objectContaining({ eventId: 'u1' }), + expect.objectContaining({ eventId: 's1' }), + expect.objectContaining({ eventId: 'a1' }), + ], + })); + }); + + it('keeps existing OpenCode synthesis/replacement behavior after SQLite-backed base retrieval', async () => { + readPreferredMock.mockResolvedValue([ + { eventId: 's0', sessionId: 'deck_oc', ts: 1000, seq: 1, epoch: 1, source: 'daemon', confidence: 'high', type: 'session.state', payload: { state: 'idle' } }, + ]); + getSessionMock.mockReturnValue({ + name: 'deck_oc', + agentType: 'opencode', + projectDir: '/tmp/project', + opencodeSessionId: 'oc-1', + }); + buildTimelineEventsFromOpenCodeExportMock.mockReturnValue([ + { eventId: 'u1', sessionId: 'deck_oc', ts: 1010, seq: 1, epoch: 99, source: 'daemon', confidence: 'high', type: 'user.message', payload: { text: 'hi' } }, + { eventId: 'a1', sessionId: 'deck_oc', ts: 1020, seq: 2, epoch: 99, source: 'daemon', confidence: 'high', type: 'assistant.text', payload: { text: 'hello', streaming: false } }, + ]); + + handleWebCommand({ + type: 'timeline.history_request', + sessionName: 'deck_oc', + requestId: 'hist-oc', + limit: 5, + }, serverLink as any); + await flushAsync(); + + expect(readPreferredMock).toHaveBeenCalledWith('deck_oc', { limit: 30, afterTs: undefined, beforeTs: undefined }); + expect(exportOpenCodeSessionMock).toHaveBeenCalledWith('/tmp/project', 'oc-1'); + expect(serverLink.send).toHaveBeenCalledWith(expect.objectContaining({ + type: 'timeline.history', + sessionName: 'deck_oc', + requestId: 'hist-oc', + events: [ + expect.objectContaining({ eventId: 'u1' }), + expect.objectContaining({ eventId: 'a1' }), + ], + })); + }); +}); diff --git a/test/daemon/file-change-normalizer.test.ts b/test/daemon/file-change-normalizer.test.ts index 7a4072092..9ea7cd5d3 100644 --- a/test/daemon/file-change-normalizer.test.ts +++ b/test/daemon/file-change-normalizer.test.ts @@ -97,6 +97,31 @@ describe('file-change-normalizer', () => { ]); }); + it('treats raw add diffs as file content instead of empty unified diff previews', () => { + const batch = normalizeCodexSdkFileChange({ + toolCallId: 'cx-add', + detail: { + input: { + changes: [ + { + path: 'src/new-file.ts', + kind: { type: 'add' }, + diff: 'export const created = true;\nconsole.log(created);\n', + }, + ], + }, + }, + }); + + expect(batch?.patches[0]).toEqual(expect.objectContaining({ + filePath: 'src/new-file.ts', + operation: 'create', + confidence: 'derived', + afterText: 'export const created = true;\nconsole.log(created);\n', + })); + expect(batch?.patches[0]?.unifiedDiff).toBeUndefined(); + }); + it('preserves explicit range metadata when providers include it directly', () => { const batch = normalizeOpenCodeFileChange({ id: 'oc-range', diff --git a/test/daemon/fs-list.test.ts b/test/daemon/fs-list.test.ts index eaf86e42e..0140b8ed3 100644 --- a/test/daemon/fs-list.test.ts +++ b/test/daemon/fs-list.test.ts @@ -19,9 +19,11 @@ const mockServerLink = { vi.mock('node:fs/promises', () => ({ readdir: vi.fn(), realpath: vi.fn(), + stat: vi.fn(), })); const mockReaddir = vi.mocked(fsp.readdir); const mockRealpath = vi.mocked(fsp.realpath); +const mockStat = vi.mocked(fsp.stat); // ── Pull the handler function out of command-handler indirectly ──────────── // We test via handleWebCommand to keep the test at the public API level. @@ -45,6 +47,7 @@ describe('fs.ls handler', () => { sent.length = 0; // Restore send implementation after clearAllMocks resets it mockServerLink.send.mockImplementation((msg: unknown) => { sent.push(msg); }); + mockStat.mockResolvedValue({ mtimeMs: 1, size: 0 } as any); }); afterEach(() => { @@ -171,6 +174,25 @@ describe('fs.ls handler', () => { expect(resp.entries.every((e: any) => e.isDir)).toBe(true); }); + it('reuses a hot directory listing cache for repeated requests', async () => { + const testDir = path.join(homedir(), 'cached-dir'); + mockRealpath.mockResolvedValue(testDir as unknown as string); + mockReaddir.mockResolvedValue([ + makeDirent('src', true), + makeDirent('README.md', false), + ] as unknown as fsp.Dirent[]); + + handleWebCommand({ type: 'fs.ls', path: testDir, requestId: 'req-cache-1', includeFiles: true }, mockServerLink as any); + await flushAsync(); + handleWebCommand({ type: 'fs.ls', path: testDir, requestId: 'req-cache-2', includeFiles: true }, mockServerLink as any); + await flushAsync(); + + expect(mockReaddir).toHaveBeenCalledTimes(1); + expect((sent[0] as any).status).toBe('ok'); + expect((sent[1] as any).status).toBe('ok'); + expect((sent[1] as any).entries.map((e: any) => e.name)).toEqual(['src', 'README.md']); + }); + it('includes files when includeFiles is true', async () => { const testDir = path.join(homedir(), 'test-dir'); mockRealpath.mockResolvedValue(testDir as unknown as string); diff --git a/test/daemon/hook-send.test.ts b/test/daemon/hook-send.test.ts index 609780af3..782eb88b3 100644 --- a/test/daemon/hook-send.test.ts +++ b/test/daemon/hook-send.test.ts @@ -62,7 +62,8 @@ function postSend(port: number, body: Record, headers?: Record< const data = JSON.stringify(body); const req = http.request({ hostname: '127.0.0.1', port, path: '/send', method: 'POST', - headers: { 'Content-Type': 'application/json', 'Content-Length': String(data.length), ...headers }, + agent: false, + headers: { 'Content-Type': 'application/json', 'Content-Length': String(data.length), Connection: 'close', ...headers }, }, (res) => { let body = ''; res.on('data', (chunk) => { body += chunk; }); @@ -79,9 +80,9 @@ function postSend(port: number, body: Record, headers?: Record< function postRaw(port: number, path: string, body: string, contentType?: string): Promise<{ status: number; body: string }> { return new Promise((resolve, reject) => { - const headers: Record = { 'Content-Length': String(Buffer.byteLength(body)) }; + const headers: Record = { 'Content-Length': String(Buffer.byteLength(body)), Connection: 'close' }; if (contentType) headers['Content-Type'] = contentType; - const req = http.request({ hostname: '127.0.0.1', port, path, method: 'POST', headers }, (res) => { + const req = http.request({ hostname: '127.0.0.1', port, path, method: 'POST', headers, agent: false }, (res) => { let respBody = ''; res.on('data', (chunk) => { respBody += chunk; }); res.on('end', () => resolve({ status: res.statusCode!, body: respBody })); @@ -127,8 +128,10 @@ describe('Hook server /send endpoint', () => { port = result.port; }); - afterEach(() => { - server.close(); + afterEach(async () => { + await new Promise((resolve) => { + server.close(() => resolve()); + }); }); // ── Content-Type validation ────────────────────────────────────────────── diff --git a/test/daemon/opencode-watcher.test.ts b/test/daemon/opencode-watcher.test.ts index c3ad4c734..f3e7a78d7 100644 --- a/test/daemon/opencode-watcher.test.ts +++ b/test/daemon/opencode-watcher.test.ts @@ -28,7 +28,7 @@ vi.mock('../../src/daemon/timeline-emitter.js', () => ({ })); vi.mock('../../src/daemon/timeline-store.js', () => ({ - timelineStore: { read: mocks.timelineRead }, + timelineStore: { readPreferred: mocks.timelineRead, read: mocks.timelineRead }, })); import { startWatching, stopWatching, isWatching, __testOnly } from '../../src/daemon/opencode-watcher.js'; @@ -41,7 +41,7 @@ describe('opencode-watcher', () => { mocks.readOpenCodeSessionMessagesSince.mockResolvedValue([]); mocks.buildTimelineEventsFromOpenCodeExport.mockReturnValue([]); mocks.discoverLatestOpenCodeSessionId.mockResolvedValue(undefined); - mocks.timelineRead.mockReturnValue([]); + mocks.timelineRead.mockResolvedValue([]); }); afterEach(() => { @@ -50,7 +50,7 @@ describe('opencode-watcher', () => { }); it('starts polling without replaying full history, then emits only new delta events', async () => { - mocks.timelineRead.mockReturnValue([{ type: 'assistant.text' }]); + mocks.timelineRead.mockResolvedValue([{ type: 'assistant.text' }]); await startWatching('deck_sub_oc', '/proj', 'sid-1'); expect(isWatching('deck_sub_oc')).toBe(true); @@ -100,7 +100,7 @@ describe('opencode-watcher', () => { it('bootstraps from earliest timeline user message when store createdAt is too new', async () => { mocks.getSession.mockReturnValue({ name: 'deck_sub_oc', projectDir: '/proj', opencodeSessionId: 'sid-1', createdAt: 900 }); - mocks.timelineRead.mockReturnValue([ + mocks.timelineRead.mockResolvedValue([ { type: 'user.message', ts: 600 }, { type: 'command.ack', ts: 601 }, { type: 'session.state', ts: 950 }, @@ -127,7 +127,7 @@ describe('opencode-watcher', () => { it('rebinds fresh session to latest sqlite session when store is still pinned to an older opencode session', async () => { mocks.getSession.mockReturnValue({ name: 'deck_sub_oc', projectDir: '/proj', opencodeSessionId: 'sid-old', createdAt: 900 }); - mocks.timelineRead.mockReturnValue([ + mocks.timelineRead.mockResolvedValue([ { type: 'user.message', ts: 1000 }, { type: 'command.ack', ts: 1001 }, ]); @@ -159,7 +159,7 @@ describe('opencode-watcher', () => { it('uses latest structured timeline timestamp to fetch missing assistant delta after restart', async () => { - mocks.timelineRead.mockReturnValue([ + mocks.timelineRead.mockResolvedValue([ { type: 'assistant.text', ts: 700 }, { type: 'user.message', ts: 1000 }, { type: 'command.ack', ts: 1001 }, @@ -185,7 +185,7 @@ describe('opencode-watcher', () => { }); it('does not advance cursor past assistant rows that exist before their parts are committed', async () => { - mocks.timelineRead.mockReturnValue([{ type: 'assistant.text', ts: 700 }]); + mocks.timelineRead.mockResolvedValue([{ type: 'assistant.text', ts: 700 }]); mocks.readOpenCodeSessionMessagesSince .mockResolvedValueOnce([ { info: { id: 'm-user', role: 'user', time: { created: 1000 } }, parts: [] }, diff --git a/test/daemon/sdk-transport-restore.test.ts b/test/daemon/sdk-transport-restore.test.ts index 8d9c0be5a..231aae10d 100644 --- a/test/daemon/sdk-transport-restore.test.ts +++ b/test/daemon/sdk-transport-restore.test.ts @@ -6,7 +6,8 @@ const mocks = vi.hoisted(() => { const store = new Map>(); const claudeRuns: Array<{ options: Record; prompt: string }> = []; const codexRuns: Array<{ mode: 'start' | 'resume'; id: string | null; options: Record; input: string }> = []; - return { store, claudeRuns, codexRuns }; + const claudeFailures = new Map(); + return { store, claudeRuns, codexRuns, claudeFailures }; }); const timelineEmitterEmitMock = vi.hoisted(() => vi.fn()); @@ -79,6 +80,13 @@ vi.mock('@anthropic-ai/claude-agent-sdk', () => ({ query: vi.fn(({ prompt, options }: { prompt: string; options: Record }) => { mocks.claudeRuns.push({ prompt, options }); async function* gen() { + if (prompt.includes('[transport-retry-once]')) { + const seen = mocks.claudeFailures.get(prompt) ?? 0; + mocks.claudeFailures.set(prompt, seen + 1); + if (seen === 0) { + throw new Error('simulated transport failure'); + } + } yield { type: 'system', subtype: 'init', session_id: String(options.resume ?? options.sessionId), model: 'claude-sonnet-4-6' }; yield { type: 'result', subtype: 'success', is_error: false, session_id: String(options.resume ?? options.sessionId), result: prompt.includes('token') ? 'BANANA' : 'ACK', usage: { input_tokens: 11, output_tokens: 2, cache_read_input_tokens: 0 } }; } @@ -137,6 +145,7 @@ vi.mock('../../src/agent/brain-dispatcher.js', () => ({ BrainDispatcher: vi.fn() import { connectProvider, disconnectAll } from '../../src/agent/provider-registry.js'; import { getTransportRuntime, launchTransportSession, relaunchSessionWithSettings, restoreTransportSessions, setSessionEventCallback } from '../../src/agent/session-manager.js'; import { newSession } from '../../src/agent/tmux.js'; +import { getResendCount } from '../../src/daemon/transport-resend-queue.js'; const flush = async () => { for (let i = 0; i < 4; i++) await new Promise((resolve) => setTimeout(resolve, 0)); @@ -149,6 +158,7 @@ describe('sdk transport session restore', () => { mocks.store.clear(); mocks.claudeRuns.length = 0; mocks.codexRuns.length = 0; + mocks.claudeFailures.clear(); setSessionEventCallback(() => {}); }); @@ -273,6 +283,43 @@ describe('sdk transport session restore', () => { expect(onSessionEvent).toHaveBeenCalledWith('started', 'deck_sdk_new_brain', 'idle'); }); + it('auto-restarts an errored transport runtime and replays the failed turn', async () => { + await connectProvider('claude-code-sdk', {}); + await launchTransportSession({ + name: 'deck_sdk_retry_brain', + projectName: 'sdkretry', + role: 'brain', + agentType: 'claude-code-sdk', + projectDir: '/tmp/sdk-retry', + requestedModel: 'sonnet', + ccSessionId: 'cc-session-retry', + }); + + const firstRuntime = getTransportRuntime('deck_sdk_retry_brain'); + expect(firstRuntime).toBeDefined(); + + firstRuntime!.send('Please retry me [transport-retry-once]', 'cmd-retry-1'); + + const deadline = Date.now() + 10_000; + while (Date.now() < deadline) { + if (mocks.claudeRuns.length >= 2 && getResendCount('deck_sdk_retry_brain') === 0) break; + await flush(); + } + + expect(mocks.claudeRuns).toHaveLength(2); + expect(mocks.claudeRuns[0]).toMatchObject({ + prompt: 'Please retry me [transport-retry-once]', + options: expect.objectContaining({ resume: 'cc-session-retry' }), + }); + expect(mocks.claudeRuns[1]).toMatchObject({ + prompt: 'Please retry me [transport-retry-once]', + options: expect.objectContaining({ resume: 'cc-session-retry' }), + }); + expect(getResendCount('deck_sdk_retry_brain')).toBe(0); + expect(getTransportRuntime('deck_sdk_retry_brain')).toBeDefined(); + expect(getTransportRuntime('deck_sdk_retry_brain')).not.toBe(firstRuntime); + }); + it('emits startup memory.context when the first transport turn carries the seeded memory', { timeout: 30_000 }, async () => { // NOTE: the "Historical context · injected" card is emitted at the same // commit boundary as the persisted `startupMemoryInjected` flag — i.e. diff --git a/test/daemon/session-list.test.ts b/test/daemon/session-list.test.ts index 253f58d76..3ccc0e20b 100644 --- a/test/daemon/session-list.test.ts +++ b/test/daemon/session-list.test.ts @@ -158,6 +158,8 @@ describe('buildSessionList', () => { getPreset: vi.fn(async (name: string) => name === 'minimax' ? { name: 'minimax', env: { ANTHROPIC_MODEL: 'MiniMax-M2.7' } } : undefined), + getPresetEffectiveModel: vi.fn((preset: { env?: Record }) => preset.env?.ANTHROPIC_MODEL), + getPresetAvailableModelIds: vi.fn((preset: { env?: Record }) => preset.env?.ANTHROPIC_MODEL ? [preset.env.ANTHROPIC_MODEL] : []), })); const { buildSessionList } = await import('../../src/daemon/session-list.js'); @@ -175,6 +177,51 @@ describe('buildSessionList', () => { expect(sessions[0].quotaUsageLabel).toBeUndefined(); }); + it('preset-backed qwen sessions keep discovered model lists and active selected model', async () => { + const store = await import('../../src/store/session-store.js'); + store.upsertSession({ + name: 'deck_qwen_multi_brain', + projectName: 'demo', + role: 'brain', + agentType: 'qwen', + runtimeType: 'transport', + providerId: 'qwen', + providerSessionId: 'sid-preset-multi', + state: 'idle', + restarts: 0, + restartTimestamps: [], + createdAt: Date.now(), + updatedAt: Date.now(), + ccPreset: 'minimax', + qwenModel: 'MiniMax-Text-01', + qwenAvailableModels: ['coder-model'], + }); + + vi.doMock('../../src/daemon/cc-presets.js', () => ({ + getPreset: vi.fn(async () => ({ + name: 'minimax', + env: { ANTHROPIC_MODEL: 'MiniMax-M2.7' }, + defaultModel: 'MiniMax-M2.7', + availableModels: [ + { id: 'MiniMax-M2.7', name: 'MiniMax M2.7' }, + { id: 'MiniMax-Text-01' }, + ], + })), + getPresetEffectiveModel: vi.fn((preset: { defaultModel?: string; env?: Record }) => preset.defaultModel ?? preset.env?.ANTHROPIC_MODEL), + getPresetAvailableModelIds: vi.fn((preset: { availableModels?: Array<{ id: string }> }) => preset.availableModels?.map((item) => item.id) ?? []), + })); + + const { buildSessionList } = await import('../../src/daemon/session-list.js'); + const sessions = await buildSessionList(); + expect(sessions[0]).toMatchObject({ + qwenAuthType: 'api-key', + qwenAvailableModels: ['MiniMax-M2.7', 'MiniMax-Text-01'], + qwenModel: 'MiniMax-Text-01', + modelDisplay: 'MiniMax-Text-01', + planLabel: 'BYO', + }); + }); + it('preserves the session transportConfig snapshot in the list surface', async () => { const store = await import('../../src/store/session-store.js'); store.upsertSession({ diff --git a/test/daemon/subsession-manager.test.ts b/test/daemon/subsession-manager.test.ts index d48e75da9..fde578830 100644 --- a/test/daemon/subsession-manager.test.ts +++ b/test/daemon/subsession-manager.test.ts @@ -23,7 +23,7 @@ const { getDriverMock: vi.fn(), getSessionMock: vi.fn(() => null), capturePaneMock: vi.fn().mockResolvedValue([]), - timelineReadMock: vi.fn(() => []), + timelineReadMock: vi.fn(() => Promise.resolve([])), geminiStartWatchingMock: vi.fn().mockResolvedValue(undefined), geminiIsWatchingMock: vi.fn().mockReturnValue(false), codexStartWatchingByIdMock: vi.fn().mockResolvedValue(undefined), @@ -115,7 +115,7 @@ vi.mock('../../src/daemon/memory-inject.js', () => ({ })); vi.mock('../../src/daemon/timeline-store.js', () => ({ - timelineStore: { read: timelineReadMock, append: vi.fn() }, + timelineStore: { readPreferred: timelineReadMock, read: timelineReadMock, append: vi.fn() }, })); vi.mock('../../src/daemon/timeline-emitter.js', () => ({ @@ -515,7 +515,7 @@ describe('readSubSessionResponse()', () => { vi.clearAllMocks(); sessionExistsMock.mockResolvedValue(true); capturePaneMock.mockResolvedValue(['still running']); - timelineReadMock.mockReturnValue([ + timelineReadMock.mockResolvedValue([ { type: 'user.message', payload: { text: 'hi' } }, { type: 'assistant.text', payload: { text: 'done' } }, ]); diff --git a/test/daemon/timeline-projection.test.ts b/test/daemon/timeline-projection.test.ts new file mode 100644 index 000000000..cccdac52e --- /dev/null +++ b/test/daemon/timeline-projection.test.ts @@ -0,0 +1,128 @@ +import { afterEach, describe, expect, it, vi } from 'vitest'; +import { appendFileSync, mkdirSync, mkdtempSync, rmSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import type { TimelineEvent } from '../../src/daemon/timeline-event.js'; + +const originalHome = process.env.HOME; +const originalUserProfile = process.env.USERPROFILE; +const originalDbPath = process.env.IMCODES_TIMELINE_PROJECTION_DB_PATH; + +function makeEvent(sessionId: string, seq: number, type: TimelineEvent['type'], payload: Record, ts = seq): TimelineEvent { + return { + eventId: `${sessionId}-${seq}-${type}`, + sessionId, + ts, + seq, + epoch: 1, + source: 'daemon', + confidence: 'high', + type, + payload, + }; +} + +describe('timeline projection', () => { + let tempHome: string | null = null; + let dbPath: string | null = null; + let importedProjection: typeof import('../../src/daemon/timeline-projection.js').timelineProjection | null = null; + + afterEach(async () => { + if (importedProjection) { + await importedProjection.shutdown(); + } + importedProjection = null; + vi.restoreAllMocks(); + vi.resetModules(); + if (originalHome === undefined) delete process.env.HOME; + else process.env.HOME = originalHome; + if (originalUserProfile === undefined) delete process.env.USERPROFILE; + else process.env.USERPROFILE = originalUserProfile; + if (originalDbPath === undefined) delete process.env.IMCODES_TIMELINE_PROJECTION_DB_PATH; + else process.env.IMCODES_TIMELINE_PROJECTION_DB_PATH = originalDbPath; + if (tempHome) rmSync(tempHome, { recursive: true, force: true }); + tempHome = null; + dbPath = null; + }); + + async function loadModules() { + tempHome = mkdtempSync(join(tmpdir(), 'imcodes-timeline-projection-')); + dbPath = join(tempHome, '.imcodes', 'timeline-projection.sqlite'); + process.env.HOME = tempHome; + process.env.USERPROFILE = tempHome; + process.env.IMCODES_TIMELINE_PROJECTION_DB_PATH = dbPath; + const [{ timelineProjection }, { timelineStore }] = await Promise.all([ + import('../../src/daemon/timeline-projection.js'), + import('../../src/daemon/timeline-store.js'), + ]); + importedProjection = timelineProjection; + return { timelineProjection, timelineStore }; + } + + it('preserves append order for equal-ts events and honors afterTs / beforeTs exclusivity', async () => { + const { timelineProjection, timelineStore } = await loadModules(); + const sessionId = 'projection_order'; + timelineStore.append(makeEvent(sessionId, 1, 'assistant.text', { text: 'first' }, 1000)); + timelineStore.append(makeEvent(sessionId, 2, 'assistant.text', { text: 'second' }, 1000)); + timelineStore.append(makeEvent(sessionId, 3, 'assistant.text', { text: 'third' }, 1000)); + timelineStore.append(makeEvent(sessionId, 4, 'assistant.text', { text: 'fourth' }, 1001)); + + await timelineProjection.rebuildSession(sessionId); + + const full = await timelineStore.readPreferred(sessionId, { limit: 10 }); + expect(full.map((event) => event.seq)).toEqual([1, 2, 3, 4]); + + const after = await timelineStore.readPreferred(sessionId, { afterTs: 1000, limit: 10 }); + expect(after.map((event) => event.seq)).toEqual([4]); + + const before = await timelineStore.readPreferred(sessionId, { beforeTs: 1001, limit: 10 }); + expect(before.map((event) => event.seq)).toEqual([1, 2, 3]); + }); + + it('returns completed text tail only for non-empty completed text events', async () => { + const { timelineProjection, timelineStore } = await loadModules(); + const sessionId = 'projection_text_tail'; + timelineStore.append(makeEvent(sessionId, 1, 'user.message', { text: 'hello user' }, 1000)); + timelineStore.append(makeEvent(sessionId, 2, 'assistant.text', { text: 'typing', streaming: true }, 1001)); + timelineStore.append(makeEvent(sessionId, 3, 'assistant.text', { text: 'done', streaming: false }, 1002)); + timelineStore.append(makeEvent(sessionId, 4, 'assistant.text', { text: ' ', streaming: false }, 1003)); + timelineStore.append(makeEvent(sessionId, 5, 'tool.call', { tool: 'search' }, 1004)); + + await timelineProjection.rebuildSession(sessionId); + + const tail = await timelineStore.readCompletedTextTail(sessionId, 10); + expect(tail.map((event) => `${event.type}:${String(event.payload.text ?? '')}`)).toEqual([ + 'user.message:hello user', + 'assistant.text:done', + ]); + + const typed = await timelineStore.readByTypesPreferred(sessionId, ['tool.call', 'assistant.text'], { limit: 10 }); + expect(typed.map((event) => event.seq)).toEqual([2, 3, 4, 5]); + }); + + it('rebuilds stale sessions and prunes to authoritative truncation', async () => { + const { timelineProjection, timelineStore } = await loadModules(); + const sessionId = 'projection_stale'; + const timelineFile = timelineStore.filePath(sessionId); + mkdirSync(join(tempHome!, '.imcodes', 'timeline'), { recursive: true }); + + timelineStore.append(makeEvent(sessionId, 1, 'assistant.text', { text: 'one' }, 1000)); + timelineStore.append(makeEvent(sessionId, 2, 'assistant.text', { text: 'two' }, 1001)); + await timelineProjection.rebuildSession(sessionId); + + appendFileSync(timelineFile, `${JSON.stringify(makeEvent(sessionId, 3, 'assistant.text', { text: 'three' }, 1002))}\n`); + const rebuilt = await timelineStore.readPreferred(sessionId, { limit: 10 }); + expect(rebuilt.map((event) => event.seq)).toEqual([1, 2, 3]); + + timelineStore.truncate(sessionId, 2); + await timelineProjection.pruneSessionToAuthoritative(sessionId, 2); + + const pruned = await timelineStore.readPreferred(sessionId, { limit: 10 }); + expect(pruned.map((event) => event.seq)).toEqual([2, 3]); + + await timelineProjection.deleteSession(sessionId); + const rebuiltFromAuthoritative = await timelineProjection.queryHistory({ sessionId, limit: 10 }); + expect(rebuiltFromAuthoritative?.map((event) => event.seq)).toEqual([2, 3]); + }); +}); diff --git a/test/daemon/timeline-store.projection-fallback.test.ts b/test/daemon/timeline-store.projection-fallback.test.ts new file mode 100644 index 000000000..d9791bd1e --- /dev/null +++ b/test/daemon/timeline-store.projection-fallback.test.ts @@ -0,0 +1,194 @@ +import { afterEach, describe, expect, it, vi } from 'vitest'; +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +const projectionMocks = vi.hoisted(() => ({ + queryHistory: vi.fn(), + queryByTypes: vi.fn(), + queryCompletedTextTail: vi.fn(), + getLatest: vi.fn(), + recordAppendedEvent: vi.fn(), + pruneSessionToAuthoritative: vi.fn(), + deleteSession: vi.fn(), +})); + +vi.mock('../../src/daemon/timeline-projection.js', () => ({ + timelineProjection: projectionMocks, +})); + +describe('timeline-store projection fallbacks', () => { + const originalHome = process.env.HOME; + const originalUserProfile = process.env.USERPROFILE; + let tempHome: string | null = null; + + afterEach(() => { + vi.clearAllMocks(); + vi.resetModules(); + if (originalHome === undefined) delete process.env.HOME; + else process.env.HOME = originalHome; + if (originalUserProfile === undefined) delete process.env.USERPROFILE; + else process.env.USERPROFILE = originalUserProfile; + if (tempHome) rmSync(tempHome, { recursive: true, force: true }); + tempHome = null; + }); + + async function loadStoreWithHistory(lines: Array>, sessionId = 'fallback-session') { + tempHome = mkdtempSync(join(tmpdir(), 'imcodes-timeline-projection-fallback-')); + process.env.HOME = tempHome; + process.env.USERPROFILE = tempHome; + const filePath = join(tempHome, '.imcodes', 'timeline', `${sessionId}.jsonl`); + mkdirSync(join(tempHome, '.imcodes', 'timeline'), { recursive: true }); + writeFileSync(filePath, `${lines.map((line) => JSON.stringify(line)).join('\n')}\n`, 'utf8'); + const { timelineStore } = await import('../../src/daemon/timeline-store.js'); + return { timelineStore, sessionId }; + } + + it('does not mirror into the projection when the authoritative JSONL append fails', async () => { + tempHome = mkdtempSync(join(tmpdir(), 'imcodes-timeline-projection-fallback-')); + process.env.HOME = tempHome; + process.env.USERPROFILE = tempHome; + + vi.doMock('fs', async () => { + const actual = await vi.importActual('fs'); + return { + ...actual, + appendFileSync: vi.fn(() => { + throw new Error('append failed'); + }), + }; + }); + + const { timelineStore } = await import('../../src/daemon/timeline-store.js'); + timelineStore.append({ + eventId: 'evt-fail', + sessionId: 'append-failure', + ts: 1, + seq: 1, + epoch: 1, + source: 'daemon', + confidence: 'high', + type: 'assistant.text', + payload: { text: 'nope', streaming: false }, + }); + + expect(projectionMocks.recordAppendedEvent).not.toHaveBeenCalled(); + }); + + it('falls back to JSONL for readPreferred when projection history is unavailable', async () => { + projectionMocks.queryHistory.mockResolvedValue(null); + const sessionId = 'fallback-session'; + const { timelineStore } = await loadStoreWithHistory([ + { + eventId: 'evt-1', + sessionId, + ts: 1, + seq: 1, + epoch: 1, + source: 'daemon', + confidence: 'high', + type: 'user.message', + payload: { text: 'hi' }, + }, + { + eventId: 'evt-2', + sessionId, + ts: 2, + seq: 2, + epoch: 1, + source: 'daemon', + confidence: 'high', + type: 'assistant.text', + payload: { text: 'hello', streaming: false }, + }, + ], sessionId); + + const events = await timelineStore.readPreferred(sessionId, { limit: 10 }); + expect(events.map((event) => event.eventId)).toEqual(['evt-1', 'evt-2']); + expect(projectionMocks.queryHistory).toHaveBeenCalledWith({ + sessionId, + afterTs: undefined, + beforeTs: undefined, + limit: 10, + }); + }); + + it('falls back to JSONL for typed reads and completed text tails', async () => { + projectionMocks.queryByTypes.mockResolvedValue(null); + projectionMocks.queryCompletedTextTail.mockResolvedValue(null); + + const sessionId = 'fallback-session'; + const { timelineStore } = await loadStoreWithHistory([ + { + eventId: 'evt-1', + sessionId, + ts: 10, + seq: 1, + epoch: 7, + source: 'daemon', + confidence: 'high', + type: 'tool.call', + payload: { tool: 'Read' }, + }, + { + eventId: 'evt-2', + sessionId, + ts: 11, + seq: 2, + epoch: 7, + source: 'daemon', + confidence: 'high', + type: 'user.message', + payload: { text: 'question' }, + }, + { + eventId: 'evt-3', + sessionId, + ts: 12, + seq: 3, + epoch: 7, + source: 'daemon', + confidence: 'high', + type: 'assistant.text', + payload: { text: 'answer', streaming: false }, + }, + ], sessionId); + + const typed = await timelineStore.readByTypesPreferred(sessionId, ['assistant.text'], { limit: 10 }); + expect(typed.map((event) => event.eventId)).toEqual(['evt-3']); + + const completed = await timelineStore.readCompletedTextTail(sessionId, 10); + expect(completed.map((event) => event.eventId)).toEqual(['evt-2', 'evt-3']); + }); + + it('falls back to JSONL latest markers when the projection returns null without throwing', async () => { + projectionMocks.getLatest.mockResolvedValue(null); + const sessionId = 'fallback-session'; + const { timelineStore } = await loadStoreWithHistory([ + { + eventId: 'evt-1', + sessionId, + ts: 10, + seq: 1, + epoch: 7, + source: 'daemon', + confidence: 'high', + type: 'user.message', + payload: { text: 'question' }, + }, + { + eventId: 'evt-3', + sessionId, + ts: 12, + seq: 3, + epoch: 7, + source: 'daemon', + confidence: 'high', + type: 'assistant.text', + payload: { text: 'answer', streaming: false }, + }, + ], sessionId); + const latest = await timelineStore.getLatestPreferred(sessionId); + expect(latest).toEqual({ epoch: 7, seq: 3 }); + }); +}); diff --git a/test/daemon/timeline-store.tail-truncate.test.ts b/test/daemon/timeline-store.tail-truncate.test.ts index b81ba08a7..3d0e7b70f 100644 --- a/test/daemon/timeline-store.tail-truncate.test.ts +++ b/test/daemon/timeline-store.tail-truncate.test.ts @@ -7,8 +7,13 @@ describe('timeline-store truncate', () => { const originalHome = process.env.HOME; const originalUserProfile = process.env.USERPROFILE; let tempHome: string | null = null; + let importedProjection: typeof import('../../src/daemon/timeline-projection.js').timelineProjection | null = null; - afterEach(() => { + afterEach(async () => { + if (importedProjection) { + await importedProjection.shutdown(); + } + importedProjection = null; vi.restoreAllMocks(); vi.resetModules(); if (originalHome === undefined) delete process.env.HOME; @@ -33,7 +38,11 @@ describe('timeline-store truncate', () => { }), }; }); - const { timelineStore } = await import('../../src/daemon/timeline-store.js'); + const [{ timelineStore }, { timelineProjection }] = await Promise.all([ + import('../../src/daemon/timeline-store.js'), + import('../../src/daemon/timeline-projection.js'), + ]); + importedProjection = timelineProjection; const filePath = join(tempHome, '.imcodes', 'timeline', 'oversized_session.jsonl'); mkdirSync(join(tempHome, '.imcodes', 'timeline'), { recursive: true }); @@ -56,7 +65,11 @@ describe('timeline-store truncate', () => { process.env.HOME = tempHome; process.env.USERPROFILE = tempHome; - const { timelineStore } = await import('../../src/daemon/timeline-store.js'); + const [{ timelineStore }, { timelineProjection }] = await Promise.all([ + import('../../src/daemon/timeline-store.js'), + import('../../src/daemon/timeline-projection.js'), + ]); + importedProjection = timelineProjection; const filePath = join(tempHome, '.imcodes', 'timeline', 'tail_read_session.jsonl'); mkdirSync(join(tempHome, '.imcodes', 'timeline'), { recursive: true }); diff --git a/test/daemon/transport-session-runtime.test.ts b/test/daemon/transport-session-runtime.test.ts index 73d2a49a7..e0a13ca61 100644 --- a/test/daemon/transport-session-runtime.test.ts +++ b/test/daemon/transport-session-runtime.test.ts @@ -143,6 +143,20 @@ describe('TransportSessionRuntime', () => { expect(mock.provider.send).toHaveBeenCalledTimes(1); }); + it('tracks the active dispatch payload for restart-based replay', async () => { + runtime.send('retry me', 'msg-retry'); + await flushDispatch(); + + expect(runtime.activeDispatchEntries).toEqual([ + { clientMessageId: 'msg-retry', text: 'retry me' }, + ]); + + mock.fireError('sess-1'); + expect(runtime.activeDispatchEntries).toEqual([ + { clientMessageId: 'msg-retry', text: 'retry me' }, + ]); + }); + it('send() merges description and runtime prompt into normalized systemText', async () => { const r = new TransportSessionRuntime(mock.provider, 'x'); await r.initialize({ ...defaultConfig, description: 'expert', systemPrompt: 'runtime only' }); @@ -934,6 +948,7 @@ describe('TransportSessionRuntime', () => { expect(runtime.sending).toBe(false); expect(runtime.pendingCount).toBe(0); expect(runtime.pendingEntries).toEqual([]); + expect(runtime.activeDispatchEntries).toEqual([]); }); it('getHistory() returns a copy', () => { diff --git a/test/e2e/active-timeline-refresh.test.ts b/test/e2e/active-timeline-refresh.test.ts new file mode 100644 index 000000000..b7c69c420 --- /dev/null +++ b/test/e2e/active-timeline-refresh.test.ts @@ -0,0 +1,34 @@ +/** + * E2E gate for the native-resume timeline refresh chain. + * + * The root e2e project does not own the web app's Preact/jsdom dependency + * graph, so the actual activation-chain test lives under `web/test/`. + * This wrapper runs that test under the web Vitest config as part of the + * existing `npm run test:e2e` workflow, so the e2e stage still fails if the + * browser-side resume -> HTTP backfill chain regresses. + */ +import { describe, expect, it } from 'vitest'; +import { execFileSync } from 'node:child_process'; +import { join } from 'node:path'; + +function runWebActivationChainTest(): void { + const npxBin = process.platform === 'win32' ? 'npx.cmd' : 'npx'; + execFileSync( + npxBin, + ['vitest', 'run', 'test/app-resume-refresh.test.tsx'], + { + cwd: join(process.cwd(), 'web'), + stdio: 'inherit', + env: { + ...process.env, + CI: process.env.CI ?? '1', + }, + }, + ); +} + +describe('active timeline refresh e2e gate', () => { + it('passes the web activation-chain test under the real web test config', () => { + expect(runWebActivationChainTest).not.toThrow(); + }, 60_000); +}); diff --git a/test/e2e/qwen-transport-flow.test.ts b/test/e2e/qwen-transport-flow.test.ts index f419b15d8..f59e842f6 100644 --- a/test/e2e/qwen-transport-flow.test.ts +++ b/test/e2e/qwen-transport-flow.test.ts @@ -172,6 +172,7 @@ vi.mock('../../src/daemon/cc-presets.js', () => ({ OPENAI_API_KEY: 'test-token', }, model: 'MiniMax-M2.7', + availableModels: ['MiniMax-M2.7'], contextWindow: 200000, settings: { security: { auth: { selectedType: 'anthropic' } }, @@ -191,8 +192,15 @@ vi.mock('../../src/daemon/cc-presets.js', () => ({ getPreset: vi.fn(async (presetName: string) => presetName === 'MiniMax' ? ({ name: 'MiniMax', env: { ANTHROPIC_MODEL: 'MiniMax-M2.7' }, + defaultModel: 'MiniMax-M2.7', + availableModels: [{ id: 'MiniMax-M2.7', name: 'minimax' }], contextWindow: 200000, }) : null), + getPresetEffectiveModel: vi.fn((preset: { defaultModel?: string; env?: Record }) => preset.defaultModel ?? preset.env?.ANTHROPIC_MODEL), + getPresetAvailableModelIds: vi.fn((preset: { availableModels?: Array<{ id: string }>; defaultModel?: string; env?: Record }) => { + const discovered = preset.availableModels?.map((item) => item.id) ?? []; + return discovered.length > 0 ? discovered : (preset.defaultModel ?? preset.env?.ANTHROPIC_MODEL ? [preset.defaultModel ?? String(preset.env?.ANTHROPIC_MODEL)] : []); + }), getCachedPresetContextWindow: vi.fn((presetName: string) => presetName === 'MiniMax' ? 200000 : undefined), })); diff --git a/web/src/api.ts b/web/src/api.ts index 61edba2f2..ea1926d50 100644 --- a/web/src/api.ts +++ b/web/src/api.ts @@ -784,6 +784,45 @@ export async function fetchTimelineHistoryHttp( } } +export interface TimelineTextTailItem { + eventId: string; + ts: number; + type: 'user.message' | 'assistant.text'; + text: string; + source?: string; + confidence?: string; +} + +/** + * Fetch the PostgreSQL-backed recent text-tail cache for one session. + * + * This is a non-authoritative bootstrap path intended to surface the latest + * completed text messages quickly while the existing WS/full-history flow + * continues to reconcile authoritative state. + * + * Returns null (not throw) on expected transient failures so callers can fail + * open and continue with the normal timeline bootstrap. + */ +export async function fetchTimelineTextTailHttp( + serverId: string, + sessionName: string, +): Promise<{ events: TimelineTextTailItem[] } | null> { + const params = new URLSearchParams(); + params.set('sessionName', sessionName); + try { + const result = await apiFetch<{ sessionName: string; events: TimelineTextTailItem[] }>( + `/api/server/${encodeURIComponent(serverId)}/timeline/text-tail?${params.toString()}`, + { method: 'GET' }, + ); + return { + events: Array.isArray(result.events) ? result.events : [], + }; + } catch (err) { + if (err instanceof ApiError && (err.status === 401 || err.status === 403)) throw err; + return null; + } +} + export async function deleteSubSession(serverId: string, subId: string): Promise { await apiFetch(`/api/server/${serverId}/sub-sessions/${subId}`, { method: 'DELETE' }); } diff --git a/web/src/app-resume-refresh.ts b/web/src/app-resume-refresh.ts new file mode 100644 index 000000000..7094a2015 --- /dev/null +++ b/web/src/app-resume-refresh.ts @@ -0,0 +1,25 @@ +import { requestActiveTimelineRefresh } from './hooks/useTimeline.js'; + +export interface NativeAppStateApi { + addListener( + eventName: 'appStateChange', + listenerFunc: (state: { isActive: boolean }) => void, + ): Promise<{ remove: () => Promise | void }>; +} + +export async function installNativeAppResumeRefresh( + enabled: boolean, + reconnectNow: (force: boolean) => void, + appApi: NativeAppStateApi, +): Promise<() => void> { + if (!enabled) return () => {}; + const handle = await appApi.addListener('appStateChange', ({ isActive }) => { + if (!isActive) return; + reconnectNow(true); + requestActiveTimelineRefresh({ resetCooldowns: true }); + }); + return () => { + const result = handle.remove(); + if (result && typeof (result as Promise).then === 'function') void result; + }; +} diff --git a/web/src/app.tsx b/web/src/app.tsx index b0d767b62..dbc008487 100644 --- a/web/src/app.tsx +++ b/web/src/app.tsx @@ -86,17 +86,22 @@ import { mergeTransportPendingMessagesForRunningState, normalizeTransportPendingEntries, } from './transport-queue.js'; -import { ingestTimelineEventForCache, ACTIVE_TIMELINE_REFRESH_EVENT } from './hooks/useTimeline.js'; +import { ingestTimelineEventForCache } from './hooks/useTimeline.js'; import { getMobileKeyboardState } from './mobile-keyboard.js'; import { pickReadableSessionDisplay } from '@shared/session-display.js'; import { updateMainSessionLabel } from './session-label-api.js'; import { buildDocumentTitle } from './tab-title.js'; import { + getDaemonBadgeState, getSelectedServerName, hasResolvedActiveSession, + isServerOnline, shouldResetSelectedServer, shouldShowInitialConnectingGate, } from './server-selection.js'; +import { installNativeAppResumeRefresh } from './app-resume-refresh.js'; +import { markServerLive, markServerOffline } from './server-online-state.js'; +import { MSG_DAEMON_ONLINE, MSG_DAEMON_OFFLINE } from '@shared/ack-protocol.js'; const DashboardPage = lazy(() => import('./pages/DashboardPage.js').then((m) => ({ default: m.DashboardPage }))); const DiscussionsPage = lazy(() => import('./pages/DiscussionsPage.js').then((m) => ({ default: m.DiscussionsPage }))); @@ -165,12 +170,6 @@ interface ServerInfo { createdAt: number; } -function isServerOnline(s: ServerInfo): boolean { - if (s.status === 'offline') return false; - if (!s.lastHeartbeatAt) return false; - return Date.now() - s.lastHeartbeatAt < 60_000; // 60s — heartbeat is 5s, allow for network jitter -} - export function App() { const { t: trans } = useTranslation(); const [auth, setAuth] = useState(() => { @@ -1389,9 +1388,7 @@ export function App() { } setDaemonOnline(true); if (sessionListRetryRef.current) { clearTimeout(sessionListRetryRef.current); sessionListRetryRef.current = null; } - setServers((prev) => prev.map((s) => - s.id === selectedServerId ? { ...s, lastHeartbeatAt: Date.now() } : s, - )); + setServers((prev) => markServerLive(prev, selectedServerId)); const newSessions = msg.sessions.filter((s) => !s.name.startsWith('deck_sub_')); setSessions((prev) => newSessions.map((s) => { const existing = prev.find((p) => p.name === s.name); @@ -1793,8 +1790,26 @@ export function App() { daemonOfflineGraceTimerRef.current = setTimeout(() => { daemonOfflineGraceTimerRef.current = null; setDaemonOnline(false); + setServers((prev) => markServerOffline(prev, selectedServerId)); }, RECONNECT_GRACE_MS); } + if (msg.type === MSG_DAEMON_ONLINE || msg.type === DAEMON_MSG.RECONNECTED) { + if (daemonOfflineGraceTimerRef.current) { + clearTimeout(daemonOfflineGraceTimerRef.current); + daemonOfflineGraceTimerRef.current = null; + } + setDaemonOnline(true); + setServers((prev) => markServerLive(prev, selectedServerId)); + } + if (msg.type === MSG_DAEMON_OFFLINE) { + if (daemonOfflineGraceTimerRef.current) { + clearTimeout(daemonOfflineGraceTimerRef.current); + daemonOfflineGraceTimerRef.current = null; + } + setDaemonOnline(false); + setServers((prev) => markServerOffline(prev, selectedServerId)); + watchProjectionStore.setSnapshotStatus('stale'); + } if (msg.type === 'daemon.error') { // Surface uncaught daemon errors as a toast so users aren't left in the dark. const id = Date.now() + Math.random(); @@ -1899,21 +1914,12 @@ export function App() { let removeAppStateListener: (() => void) | null = null; if (isNative()) { - void import('@capacitor/app').then(({ App }) => - App.addListener('appStateChange', ({ isActive }) => { - if (isActive) { - ws.reconnectNow(true); - // Native resume: WebView `visibilitychange` is unreliable on some - // iOS versions, so explicitly signal the active timeline to - // force-pull history. Safe to fire even when visibilitychange - // also fires — useTimeline's listener is idempotent (cooldownMs=0 - // but rate-limited by the 200ms setTimeout in fireHttpBackfill). - try { window.dispatchEvent(new CustomEvent(ACTIVE_TIMELINE_REFRESH_EVENT)); } catch { /* ignore */ } - } - }).then((listener) => { - removeAppStateListener = () => { void listener.remove(); }; - }).catch(() => {}) - ).catch(() => {}); + void import('@capacitor/app') + .then(({ App }) => installNativeAppResumeRefresh(true, (force) => ws.reconnectNow(force), App)) + .then((cleanup) => { + removeAppStateListener = cleanup; + }) + .catch(() => {}); } return () => { @@ -2590,6 +2596,10 @@ export function App() { sessionsLoaded, ); const resolvedActiveSessionExists = hasResolvedActiveSession(activeSession, sessions); + const selectedServerInfo = selectedServerId + ? servers.find((server) => server.id === selectedServerId) ?? null + : null; + const daemonBadgeState = getDaemonBadgeState(connected, connecting, daemonOnline, selectedServerInfo); useEffect(() => { if (showInitialConnectingGate) { @@ -2892,8 +2902,12 @@ export function App() { )}
- - {connected ? (daemonOnline ? '● Online' : (<>{' Daemon Offline'})) : connecting ? (<>{' Connecting'}) : '○ Offline'} + + {daemonBadgeState === 'online' + ? '● Online' + : daemonBadgeState === 'connecting' + ? (<>{' Connecting'}) + : (<>{' Daemon Offline'})} {(() => { try { const d = new Date(__BUILD_TIME__); return `v${d.getMonth()+1}/${d.getDate()} ${d.getHours().toString().padStart(2,'0')}:${d.getMinutes().toString().padStart(2,'0')}`; } catch { return ''; } })()} diff --git a/web/src/components/ChatView.tsx b/web/src/components/ChatView.tsx index 65f5a93d0..db23ce458 100644 --- a/web/src/components/ChatView.tsx +++ b/web/src/components/ChatView.tsx @@ -242,6 +242,48 @@ function summarizeToolInput( return formatToolPayloadValue(rawRecord.input); } +function isGenericWebSearchLabel(value: string | undefined): boolean { + if (!value) return false; + return /^\((?:other|open_page|find_in_page|search|web_search)\)$/i.test(value.trim()); +} + +function pickMergedToolInput( + toolName: string, + callInput: string, + resultInput: string, +): string { + if (toolName === 'WebSearch' && resultInput) { + if (!callInput || isGenericWebSearchLabel(callInput)) return resultInput; + } + return callInput || resultInput; +} + +function pickMergedToolDetailInput( + toolName: string, + callDetail: unknown, + resultDetail: unknown, +): unknown { + const callInput = summarizeToolInput(undefined, callDetail); + const resultInput = summarizeToolInput((resultDetail as any)?.input, resultDetail); + if (toolName === 'WebSearch' && resultInput) { + if (!callInput || isGenericWebSearchLabel(callInput)) return (resultDetail as any)?.input; + } + return (callDetail as any)?.input ?? (resultDetail as any)?.input; +} + +function pickMergedToolDetailMeta( + toolName: string, + callDetail: unknown, + resultDetail: unknown, +): unknown { + const callInput = summarizeToolInput(undefined, callDetail); + const resultInput = summarizeToolInput((resultDetail as any)?.input, resultDetail); + if (toolName === 'WebSearch' && resultInput) { + if (!callInput || isGenericWebSearchLabel(callInput)) return (resultDetail as any)?.meta ?? (callDetail as any)?.meta; + } + return (callDetail as any)?.meta ?? (resultDetail as any)?.meta; +} + function formatToolDetailJson(value: unknown): string | null { if (value == null) return null; if (typeof value === 'string') return value; @@ -328,11 +370,12 @@ function buildViewItems(events: TimelineEvent[]): ViewItem[] { const toolName = String(ev.payload.tool ?? 'tool'); // tool.call from transport SDK may have no input yet (streamed incrementally). // Fall back to the result's detail.input which has the complete args. - const inputText = summarizeToolInput(ev.payload.input, ev.payload.detail) - || summarizeToolInput((next.payload.detail as any)?.input, next.payload.detail); + const callInput = summarizeToolInput(ev.payload.input, ev.payload.detail); + const resultInput = summarizeToolInput((next.payload.detail as any)?.input, next.payload.detail); + const inputText = pickMergedToolInput(toolName, callInput, resultInput); const input = inputText ? ` ${inputText}` : ''; const status = next.payload.error ? `✗ ${String(next.payload.error)}` : '✓'; - const output = !next.payload.error && next.payload.output ? String(next.payload.output) : undefined; + const output = !next.payload.error ? formatToolPayloadValue(next.payload.output) : undefined; consolidated.push({ ...ev, type: 'tool.call', @@ -525,7 +568,7 @@ function findScrollParent(start: HTMLElement): HTMLElement { return start; } -export function ChatView({ events, loading, refreshing: _refreshing, loadingOlder, hasOlderHistory = true, onLoadOlder, sessionState, sessionId, onScrollBottomFn, preview, ws, onInsertPath, workdir, serverId, onQuote, agentType: _agentType, onResendFailed }: Props) { +export function ChatView({ events, loading, refreshing = false, loadingOlder, hasOlderHistory = true, onLoadOlder, sessionState, sessionId, onScrollBottomFn, preview, ws, onInsertPath, workdir, serverId, onQuote, agentType: _agentType, onResendFailed }: Props) { const { t } = useTranslation(); const scrollRef = useRef(null); const bottomRef = useRef(null); @@ -1059,8 +1102,16 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde ⊞ )} - {/* refreshing indicator removed — gap-fill is invisible to the user */}
+ {!preview && refreshing && ( +
+
+ )} {pinnedAboveViewport && lastSentUserMessage && (
) )} - {last && } + {last && } {expanded && middle.length > 0 && (
))} + {newPresetAvailableModels.length > 0 && ( +
+
+ Discovered Models +
+ +
+ )}
+ {presetError && ( +
+ {presetError} +
+ )} + {/* Existing presets — edit/delete */} {ccPresets.length > 0 && ( @@ -941,34 +1093,8 @@ export function NewSessionDialog({ fontSize: 11, }} onClick={() => { - setNewPresetName(p.name); - setNewPresetBaseUrl( - p.env["ANTHROPIC_BASE_URL"] ?? "", - ); - setNewPresetToken( - p.env["ANTHROPIC_AUTH_TOKEN"] ?? "", - ); - setNewPresetModel(p.env["ANTHROPIC_MODEL"] ?? ""); - setNewPresetCtx( - p.contextWindow - ? String(p.contextWindow) - : "1000000", - ); - setNewPresetInit( - p.initMessage ?? DEFAULT_INIT_MSG, - ); - const knownKeys = new Set([ - "ANTHROPIC_BASE_URL", - "ANTHROPIC_AUTH_TOKEN", - "ANTHROPIC_MODEL", - "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC", - "CLAUDE_CODE_ATTRIBUTION_HEADER", - ]); - setNewPresetCustomEnv( - Object.entries(p.env) - .filter(([k]) => !knownKeys.has(k)) - .map(([key, value]) => ({ key, value })), - ); + applyPresetDraft(createCcPresetDraftFromPreset(p)); + setPresetError(p.modelDiscoveryError ?? ""); }} > Edit @@ -989,7 +1115,7 @@ export function NewSessionDialog({ setCcPresets(updated); try { ws?.send({ - type: "cc.presets.save", + type: CC_PRESET_MSG.SAVE, presets: updated, }); } catch {} diff --git a/web/src/components/ServerIconBar.tsx b/web/src/components/ServerIconBar.tsx index d0b2287d0..4dc6b1ca3 100644 --- a/web/src/components/ServerIconBar.tsx +++ b/web/src/components/ServerIconBar.tsx @@ -1,4 +1,5 @@ import { useTranslation } from 'react-i18next'; +import { isServerOnline } from '../server-selection.js'; interface ServerInfo { id: string; @@ -42,7 +43,7 @@ export function ServerIconBar({ servers, activeServerId, onSelectServer, onServe )} {servers.map((server) => { const isActive = server.id === activeServerId; - const isOnline = server.status !== 'offline' && server.lastHeartbeatAt != null && Date.now() - server.lastHeartbeatAt < 60_000; + const isOnline = isServerOnline(server); return ( @@ -332,7 +409,7 @@ export function StartSubSessionDialog({ ws, defaultCwd, isProviderConnected: _is {ccPresets.length > 0 ? ( ) : !showPresetEditor && (
{t('new_session.api_provider_default')}
@@ -354,6 +431,16 @@ export function StartSubSessionDialog({ ws, defaultCwd, isProviderConnected: _is set((e.target as HTMLInputElement).value)} style={{ width: '100%', fontSize: 11 }} />
))} + {newPresetAvailableModels.length > 0 && ( +
+
Discovered Models
+ +
+ )}
Context Window{newPresetCtx && {fmtCtx(newPresetCtx)}}
setNewPresetCtx((e.target as HTMLInputElement).value)} style={{ width: '100%', fontSize: 11 }} /> @@ -377,33 +464,48 @@ export function StartSubSessionDialog({ ws, defaultCwd, isProviderConnected: _is
+ {ccPresets.length > 0 && (
{ccPresets.map((p) => (
- {p.name} {p.env['ANTHROPIC_MODEL'] ?? ''} + {p.name} {p.defaultModel ?? p.env['ANTHROPIC_MODEL'] ?? ''}
- +
))} @@ -441,15 +543,28 @@ export function StartSubSessionDialog({ ws, defaultCwd, isProviderConnected: _is {supportsModelSelection && (
{t('session.supervision.model')}
- setRequestedModel((e.target as HTMLInputElement).value)} - style={{ width: '100%' }} - /> + {type === 'qwen' && modelSuggestions.length > 0 ? ( + + ) : ( + setRequestedModel((e.target as HTMLInputElement).value)} + style={{ width: '100%' }} + /> + )} {modelSuggestions.length > 0 && ( {modelSuggestions.map((model) => ( diff --git a/web/src/components/SubSessionCard.tsx b/web/src/components/SubSessionCard.tsx index bbf3a5c7f..115003bc3 100644 --- a/web/src/components/SubSessionCard.tsx +++ b/web/src/components/SubSessionCard.tsx @@ -86,7 +86,9 @@ export function SubSessionCard({ sub, ws, connected, isOpen, isFocused, idleFlas // daemon echo). const timeline = isShell ? { events: [], refreshing: false, addOptimisticUserMessage: undefined, removeOptimisticMessage: undefined } - : useTimeline(sub.sessionName, ws, serverId); + : useTimeline(sub.sessionName, ws, serverId, { + isActiveSession: !!isFocused, + }); const { events, refreshing } = timeline; const addOptimisticUserMessage = 'addOptimisticUserMessage' in timeline ? timeline.addOptimisticUserMessage : undefined; const removeOptimisticMessage = 'removeOptimisticMessage' in timeline ? timeline.removeOptimisticMessage : undefined; @@ -372,7 +374,7 @@ export function SubSessionCard({ sub, ws, connected, isOpen, isFocused, idleFlas || (typeof extras.p2pMode === 'string' && extras.p2pMode.length > 0) || (extras.p2pSessionConfig != null && typeof extras.p2pSessionConfig === 'object') ); - if (isP2pSend || isTransportRuntime(sub)) return; + if (isP2pSend) return; addOptimisticUserMessage?.(text, meta?.commandId, { ...(meta?.attachments ? { attachments: meta.attachments } : {}), ...(meta?.extra ? { resendExtra: meta.extra } : {}), diff --git a/web/src/components/SubSessionWindow.tsx b/web/src/components/SubSessionWindow.tsx index d036d8aa7..5bf53c2d9 100644 --- a/web/src/components/SubSessionWindow.tsx +++ b/web/src/components/SubSessionWindow.tsx @@ -58,6 +58,17 @@ interface Props { type ViewMode = 'terminal' | 'chat'; +const IDLE_HISTORY_STATUS = { + phase: 'idle', + steps: { + cache: 'skipped', + textTail: 'skipped', + daemon: 'skipped', + http: 'skipped', + older: 'skipped', + }, +} as const; + const LOCAL_KEY = (id: string) => `rcc_subsession_${id}`; const DEFAULT_W = 620; const DEFAULT_H = 620; @@ -115,9 +126,13 @@ export function SubSessionWindow({ const { events, refreshing, + historyStatus: timelineHistoryStatus, addOptimisticUserMessage, removeOptimisticMessage, - } = useTimeline(sub.sessionName, ws, serverId); + } = useTimeline(sub.sessionName, ws, serverId, { + isActiveSession: active, + }); + const historyStatus = timelineHistoryStatus ?? IDLE_HISTORY_STATUS; const quickData = useQuickData(); // Earliest ts of the current continuous thinking sequence (shared logic). @@ -501,7 +516,7 @@ export function SubSessionWindow({
{/* Usage footer — shared component */} - {(lastUsage || activeThinkingTs || activeToolCall || statusText || liveSessionState === 'running' || liveSessionState === 'idle' || sessionInfo?.planLabel || sessionInfo?.quotaLabel || sessionInfo?.quotaUsageLabel || sessionInfo?.quotaMeta) && ( + {(lastUsage || historyStatus.phase !== 'idle' || activeThinkingTs || activeToolCall || statusText || liveSessionState === 'running' || liveSessionState === 'idle' || sessionInfo?.planLabel || sessionInfo?.quotaLabel || sessionInfo?.quotaUsageLabel || sessionInfo?.quotaMeta) && ( )} @@ -543,7 +559,7 @@ export function SubSessionWindow({ || (typeof extras.p2pMode === 'string' && extras.p2pMode.length > 0) || (extras.p2pSessionConfig != null && typeof extras.p2pSessionConfig === 'object') ); - if (isP2pSend || effectiveRuntimeType === 'transport') return; + if (isP2pSend) return; addOptimisticUserMessage(text, meta?.commandId, { ...(meta?.attachments ? { attachments: meta.attachments } : {}), ...(meta?.extra ? { resendExtra: meta.extra } : {}), diff --git a/web/src/components/UsageFooter.tsx b/web/src/components/UsageFooter.tsx index a36fa2342..acbfd3e6d 100644 --- a/web/src/components/UsageFooter.tsx +++ b/web/src/components/UsageFooter.tsx @@ -9,6 +9,7 @@ import { shortModelLabel } from '../model-label.js'; import { getSessionCost, getWeeklyCost, getMonthlyCost, formatCost } from '../cost-tracker.js'; import type { UsageData } from '../usage-data.js'; import { formatProviderQuotaLabel, type ProviderQuotaMeta } from '@shared/provider-quota.js'; +import type { TimelineHistoryStatus, TimelineHistoryStepKey } from '../hooks/useTimeline.js'; interface Props { usage: UsageData; @@ -30,6 +31,8 @@ interface Props { activeToolCall?: boolean; /** Current timestamp for thinking timer (updated every second). */ now?: number; + /** Visible history-fetch progress beneath the ctx bar while waiting for history. */ + historyStatus?: TimelineHistoryStatus | null; } const fmt = (n: number) => @@ -37,7 +40,7 @@ const fmt = (n: number) => : n >= 1000 ? `${(n / 1000).toFixed(0)}k` : String(n); -export function UsageFooter({ usage, sessionName, sessionState, agentType, modelOverride, planLabel, quotaLabel, quotaUsageLabel, quotaMeta, showCost, activeThinkingTs, statusText, activeToolCall, now }: Props) { +export function UsageFooter({ usage, sessionName, sessionState, agentType, modelOverride, planLabel, quotaLabel, quotaUsageLabel, quotaMeta, showCost, activeThinkingTs, statusText, activeToolCall, now, historyStatus }: Props) { const { t } = useTranslation(); const isCodexFamily = agentType === 'codex' || agentType === 'codex-sdk'; const hasActiveLiveWork = !!activeToolCall || !!activeThinkingTs; @@ -119,6 +122,26 @@ export function UsageFooter({ usage, sessionName, sessionState, agentType, model const codexQuotaLines = (agentType === 'codex' || agentType === 'codex-sdk') ? (displayQuotaLabel ?? '').split(' · ').filter(Boolean) : []; + const historySteps = useMemo(() => { + if (!historyStatus || historyStatus.phase === 'idle') return []; + const order: TimelineHistoryStepKey[] = ['cache', 'textTail', 'daemon', 'http', 'older']; + return order + .map((key) => ({ key, state: historyStatus.steps[key] })) + .filter((step) => step.state !== 'skipped') + .map((step) => ({ + ...step, + label: step.key === 'cache' + ? t('session.history_step_cache') + : step.key === 'textTail' + ? t('session.history_step_text_tail') + : step.key === 'daemon' + ? t('session.history_step_daemon') + : step.key === 'http' + ? t('session.history_step_http') + : t('session.history_step_older'), + })); + }, [historyStatus, t]); + const showHistoryProgress = historySteps.some((step) => step.state === 'pending' || step.state === 'running'); return (