diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8e5debfa2..96ee7491a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -413,7 +413,38 @@ jobs: fi echo "npm_version=${NPM_VERSION}" >> "$GITHUB_OUTPUT" - - name: Build and push + # Build the image into the local docker daemon first so we can smoke-test + # it BEFORE pushing. Without this step, an image whose Node process + # crashes on startup (e.g. ERR_MODULE_NOT_FOUND from a missing prod dep) + # would still get pushed and auto-deployed to production. + - name: Build (load to local docker for smoke test) + uses: docker/build-push-action@v6 + with: + context: . + file: server/Dockerfile + platforms: linux/amd64 + tags: imcodes-smoke:test + build-args: | + BUILD_TIME=${{ steps.ts.outputs.value }} + OTA_VERSION=${{ steps.ota.outputs.version }} + APP_VERSION=${{ steps.version_meta.outputs.npm_version }} + cache-from: type=gha + cache-to: type=gha,mode=max + load: true + push: false + + - name: Container startup smoke test + run: | + set -euo pipefail + # Override entrypoint to run an import-only check inside the actual + # production image. index.ts has an isMain guard so the import + # resolves all static deps (including all routes/* modules) without + # binding ports or hitting the database. Any ERR_MODULE_NOT_FOUND or + # other top-level eval failure surfaces here, before the image ships. + docker run --rm --entrypoint node imcodes-smoke:test \ + -e "import('./dist/server/src/index.js').then(() => { console.log('OK: image loads cleanly'); process.exit(0); }).catch(e => { console.error('FAIL:', e.message); console.error(e.stack); process.exit(1); })" + + - name: Build and push (cache hit — only pushes layers) uses: docker/build-push-action@v6 with: context: . diff --git a/CLAUDE.md b/CLAUDE.md index 4bf64bce7..c0ba6ad5b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -52,7 +52,6 @@ Node.js process that manages AI agent sessions via tmux. Entry point: `src/index - `session-manager.ts` manages all sessions, auto-restart with loop prevention. `provider-registry.ts` manages transport provider lifecycle. - **Transport relay** (`src/daemon/transport-relay.ts`): Converts transport provider callbacks (`onDelta`, `onComplete`, `onError`) to unified timeline events (`assistant.text`, `session.state`, `tool.call`). - **Routing** (`src/router/`): `message-router.ts` routes inbound messages to the correct session. `command-parser.ts` handles `/bind`, `/status`, `/send`, etc. -- **Brain dispatcher** (`src/agent/brain-dispatcher.ts`): Parses `@w1`, `@status`, `@reply` commands from the brain session's output, dispatching to workers. - **Server link** (`src/daemon/server-link.ts`): WebSocket client connecting to the server at `/api/server/:id/ws`. Sends `{ type: 'auth', serverId, token }` on open. Credentials stored in `~/.imcodes/server.json` after `imcodes bind`. - **Session store** (`src/store/session-store.ts`): JSON file at `~/.imcodes/sessions.json`, debounced writes. diff --git a/README.i18n/README.es.md b/README.i18n/README.es.md index fffc70d4e..7d06322a9 100644 --- a/README.i18n/README.es.md +++ b/README.i18n/README.es.md @@ -84,8 +84,10 @@ Previsualiza tu servidor de desarrollo local desde cualquier dispositivo sin des ### Móvil, reloj y notificaciones Soporte completo para móvil, autenticación biométrica, notificaciones push, entrada interactiva para sesiones shell y respuestas rápidas desde Apple Watch. -### Discusiones y auditoría multiagente -Puedes ejecutar rondas rápidas de discusión donde varios agentes, incluso de distintos proveedores, revisan o auditan el mismo tema. +### Auditoría cross-modelo y discusiones P2P +La salida de un solo modelo no debería confiarse ciegamente. Las discusiones P2P permiten que múltiples agentes — de distintos proveedores y estilos de pensamiento — colaboren en el análisis del mismo código antes de escribir una sola línea. Cada ronda sigue un pipeline multifase personalizable, donde cada agente lee todas las contribuciones anteriores. Diferentes modelos detectan diferentes tipos de problemas. Esta revisión cruzada entre proveedores encuentra la mayoría de los problemas antes de la implementación, reduciendo drásticamente el retrabajo. + +Modos integrados: `audit` (pipeline estructurado audit → review → plan), `review`, `discuss` y `brainstorm` — o define tu propia secuencia de fases. Funciona con Claude Code, Codex, Gemini CLI y Qwen. ### Agentes transport con streaming Soporte nativo de streaming para agentes transport como OpenClaw y Qwen, sin scraping de terminal. diff --git a/README.i18n/README.ja.md b/README.i18n/README.ja.md index 73bfdde5b..ddfff26ac 100644 --- a/README.i18n/README.ja.md +++ b/README.i18n/README.ja.md @@ -80,8 +80,10 @@ SSH、VPN、ポート開放なしで、任意のブラウザから agent session ### モバイル、Watch、通知 生体認証、push 通知、shell session の入力、Apple Watch での素早い確認と返信に対応します。 -### マルチエージェント議論と監査 -複数の agent による discuss / audit / review / brainstorm ラウンドを実行できます。 +### クロスモデル監査と P2P ディスカッション +単一モデルの出力を盲信すべきではありません。P2P ディスカッションでは、異なるプロバイダーや思考スタイルを持つ複数の agent が、コードを書く前に同じコードベースで協調分析を行います。各ラウンドはカスタマイズ可能なマルチフェーズパイプラインに従い、各 agent は前の貢献をすべて読んだ上で出力します。異なるモデルは異なる種類の問題を発見します。このクロスプロバイダー相互審査により、実装前に大部分の問題を発見し、手戻りを大幅に削減できます。 + +組み込みモードは `audit`(構造化された audit → review → plan パイプライン)、`review`、`discuss`、`brainstorm` で、独自のフェーズ構成も定義可能。Claude Code、Codex、Gemini CLI、Qwen で動作します。 ### Streaming Transport Agents OpenClaw や Qwen のような transport 型 agent に対して、terminal scraping ではなくネイティブなストリーミングを提供します。 diff --git a/README.i18n/README.ko.md b/README.i18n/README.ko.md index 5c8fc34fd..146c1ae0e 100644 --- a/README.i18n/README.ko.md +++ b/README.i18n/README.ko.md @@ -80,8 +80,10 @@ SSH, VPN, 포트 포워딩 없이 브라우저에서 agent session의 터미널 ### 모바일, 워치, 알림 생체 인증, 푸시 알림, shell session 입력, Apple Watch 빠른 응답을 지원합니다. -### 멀티 에이전트 토론과 감사 -여러 agent가 discuss / audit / review / brainstorm 라운드를 수행할 수 있습니다. +### 크로스 모델 감사와 P2P 토론 +단일 모델의 출력을 맹목적으로 신뢰해서는 안 됩니다. P2P 토론은 서로 다른 프로바이더와 사고 방식을 가진 여러 agent가 코드 작성 전에 동일한 코드베이스에서 협력 분석을 수행합니다. 각 라운드는 커스터마이징 가능한 멀티 페이즈 파이프라인을 따르며, 각 agent는 이전 기여를 모두 읽은 후 출력합니다. 서로 다른 모델이 서로 다른 유형의 문제를 발견합니다. 이 크로스 프로바이더 교차 검토로 구현 전에 대부분의 문제를 찾아내어 재작업을 대폭 줄일 수 있습니다. + +내장 모드는 `audit`(구조화된 audit → review → plan 파이프라인), `review`, `discuss`, `brainstorm`이며, 사용자 정의 페이즈 구성도 가능합니다. Claude Code, Codex, Gemini CLI, Qwen에서 작동합니다. ### 스트리밍 Transport Agents OpenClaw, Qwen 같은 transport agent에 대해 네이티브 스트리밍을 제공합니다. diff --git a/README.i18n/README.ru.md b/README.i18n/README.ru.md index 0c5a7f0b0..a6b021b6c 100644 --- a/README.i18n/README.ru.md +++ b/README.i18n/README.ru.md @@ -80,8 +80,10 @@ Claude Code и Codex теперь поддерживают два способа ### Мобильные устройства, часы и уведомления Есть биометрическая аутентификация, push‑уведомления, ввод для shell‑сессий и быстрые ответы на Apple Watch. -### Многоагентные обсуждения и аудит -Можно запускать быстрые discuss / audit / review / brainstorm раунды между несколькими агентами. +### Кросс-модельный аудит и P2P обсуждения +Выходу одной модели нельзя доверять слепо. P2P обсуждения позволяют нескольким агентам — от разных провайдеров и с разными стилями мышления — совместно анализировать одну кодовую базу ещё до написания кода. Каждый раунд следует настраиваемому многоэтапному пайплайну, где каждый агент читает все предыдущие вклады. Разные модели находят разные типы проблем. Такая перекрёстная проверка выявляет большинство проблем до реализации, резко сокращая переделки. + +Встроенные режимы: `audit` (структурированный пайплайн audit → review → plan), `review`, `discuss` и `brainstorm` — или определите собственную последовательность фаз. Работает с Claude Code, Codex, Gemini CLI и Qwen. ### Потоковые transport‑агенты OpenClaw и Qwen работают через структурированный transport‑stream вместо terminal scraping. diff --git a/README.i18n/README.zh-CN.md b/README.i18n/README.zh-CN.md index a27aa115b..d910728de 100644 --- a/README.i18n/README.zh-CN.md +++ b/README.i18n/README.zh-CN.md @@ -88,9 +88,11 @@ Claude Code 和 Codex 现在都支持两种接入方式:CLI 和 SDK。 完整支持移动端,包含生物识别认证和推送通知。Shell 会话在手机上也支持交互式键盘输入(类似 SSH)。子会话预览卡始终显示最新消息。Toast 通知可直接跳转到对应会话。Apple Watch 支持会话快速查看、未读计数和快速回复。 -### 多 Agent 讨论与审计 +### 跨模型审计与 P2P 讨论 -单模型输出不应被盲目信任。你可以快速发起多轮讨论,让多个 agent——甚至跨 provider——围绕同一主题进行 review、audit 或 brainstorming。每个 agent 会读取前面的内容,再追加自己的分析。支持 `discuss`、`audit`、`review`、`brainstorm` 模式。侧边栏中的环形进度条会显示 round / hop 完成情况。支持 Claude Code、Codex、Gemini CLI,也兼容带 sandbox 的 agent。 +单模型输出不应被盲目信任。P2P 讨论让多个 agent——跨不同 provider 和思维风格——在写代码之前就对同一代码库进行协作分析。每轮遵循可自定义的多阶段流程,每个 agent 读取所有前序贡献并在此基础上输出。不同模型捕获不同类别的问题:一个发现竞态条件,另一个指出遗漏的 migration,第三个质疑 API 设计。这种跨 provider 交叉审查能在实现前发现绝大部分问题,大幅减少返工。 + +内置模式包括 `audit`(结构化 audit → review → plan 流水线)、`review`、`discuss` 和 `brainstorm`,也可以自定义阶段序列。侧边栏中的环形进度条会显示 round / hop 完成情况。支持 Claude Code、Codex、Gemini CLI 和 Qwen,也兼容带 sandbox 的 agent。通过 `@@all(config)` 或 UI 配置参与者、轮次、模式和 P2P 设置。 ### 流式 Transport Agents diff --git a/README.i18n/README.zh-TW.md b/README.i18n/README.zh-TW.md index f96330bd0..460c6c156 100644 --- a/README.i18n/README.zh-TW.md +++ b/README.i18n/README.zh-TW.md @@ -88,9 +88,11 @@ Claude Code 和 Codex 現在都支援兩種接入方式:CLI 和 SDK。 完整支持移动端,包含生物识别认证和推送通知。Shell 会话在手機上也支持交互式键盘输入(类似 SSH)。子会话預覽卡始终显示最新消息。Toast 通知可直接跳转到对应会话。Apple Watch 支持会话快速檢視、未读计数和快速回复。 -### 多 Agent 討論與審計 +### 跨模型稽核與 P2P 討論 -单模型输出不应被盲目信任。你可以快速发起多轮讨论,让多个 agent——甚至跨 provider——围绕同一主题进行 review、audit 或 brainstorming。每个 agent 会读取前面的内容,再追加自己的分析。支持 `discuss`、`audit`、`review`、`brainstorm` 模式。侧边栏中的环形进度条会显示 round / hop 完成情况。支持 Claude Code、Codex、Gemini CLI,也兼容带 sandbox 的 agent。 +單一模型的輸出不應被盲目信任。P2P 討論讓多個 agent——跨不同 provider 和思維風格——在寫程式之前就對同一代碼庫進行協作分析。每輪遵循可自訂的多階段流程,每個 agent 讀取所有前序貢獻並在此基礎上輸出。不同模型捕獲不同類別的問題:一個發現競態條件,另一個指出遺漏的 migration,第三個質疑 API 設計。這種跨 provider 交叉審查能在實現前發現絕大部分問題,大幅減少返工。 + +內建模式包括 `audit`(結構化 audit → review → plan 流水線)、`review`、`discuss` 和 `brainstorm`,也可以自訂階段序列。側邊欄中的環形進度條會顯示 round / hop 完成情況。支持 Claude Code、Codex、Gemini CLI 和 Qwen,也相容帶 sandbox 的 agent。透過 `@@all(config)` 或 UI 配置參與者、輪次、模式和 P2P 設定。 ### 串流 Transport Agents diff --git a/README.md b/README.md index bd30da01a..fa157e2a2 100644 --- a/README.md +++ b/README.md @@ -84,9 +84,11 @@ Preview your local dev server from any device — phone, tablet, or remote brows Full mobile support with biometric auth and push notifications. Shell sessions allow interactive keyboard input on mobile (SSH-like). Sub-session preview cards always show latest messages. Toast notifications navigate directly to the relevant session. Apple Watch support adds quick session monitoring, unread counts, and quick replies from the wrist. -### Multi-Agent Discussions & Audit +### Multi-Agent Discussions & Cross-Provider Audit -Single-model output shouldn't be trusted blindly. Spawn quick discussion rounds where multiple agents — across different providers — review, audit, or brainstorm on the same topic. Each agent reads prior contributions and adds their own. Modes include `discuss`, `audit`, `review`, and `brainstorm`. Ring progress indicator shows round/hop completion in the sidebar. Works across Claude Code, Codex, and Gemini CLI, including sandboxed agents. +Single-model output shouldn't be trusted blindly. P2P discussions let multiple agents — across different providers and thinking styles — collaborate on the same codebase before a single line is written. Each round follows a customizable multi-phase pipeline where every agent reads all prior contributions and builds on them. Different models catch different classes of issues: one spots a race condition, another flags a missing migration, a third questions the API design. This cross-provider scrutiny catches the majority of problems before implementation, dramatically reducing rework cycles. + +Built-in modes include `audit` (structured audit → review → plan pipeline), `review`, `discuss`, and `brainstorm` — or define your own phase sequence. Ring progress indicator shows round/hop completion in the sidebar. Works across Claude Code, Codex, Gemini CLI, and Qwen, including sandboxed agents. Configure participants, round counts, modes, and per-session P2P settings via `@@all(config)` or the UI. ### Streaming Transport Agents diff --git a/landing/index.html b/landing/index.html index 19877d229..ce668302c 100644 --- a/landing/index.html +++ b/landing/index.html @@ -304,7 +304,7 @@

features

file browser & git
tree view, upload/download, +/- stats, floating preview
local web preview
preview localhost from any device via secure tunnel, supports HMR
mobile, watch & notifications
biometric auth, push notifications, shell keyboard input, watch quick replies
-
multi-agent discussions
discuss, audit, review across providers with ring progress
+
cross-provider audit
multi-agent P2P discussions with customizable phases (audit, review, brainstorm…) — different models catch different issues before code is written, reducing rework
CLI + SDK agents
Claude Code and Codex work through both CLI and SDK integrations. OpenClaw and Qwen stream natively with real-time deltas, tool tracking, and session restore.
terminal + chat
raw CLI or structured view with parsed tool calls
Discord-style sidebar
server icons, session tree, unread badges, idle flash
@@ -478,7 +478,7 @@

about

not_1: '不是另一个 AI IDE', not_2: '不只是聊天壳', not_3: '不只是远程终端客户端', not_4: '不是 Claude Code、Codex、Gemini CLI、OpenClaw 或 Qwen 的替代品', not_5: '它是围绕这些 agent 的消息/控制层', f_remote_name: '远程终端', f_remote_desc: '浏览器和手机访问,无需 SSH/VPN,实时 PTY 推流', f_chat_name: '终端 + 聊天', f_chat_desc: '原生 CLI 或结构化视图,解析工具调用', - f_discuss_name: '多代理讨论', f_discuss_desc: '跨供应商讨论、审计、审查', + f_discuss_name: '跨模型审计', f_discuss_desc: '多 Agent P2P 讨论,可自定义阶段(audit、review、brainstorm…)— 不同模型在写代码前交叉审查,大幅减少返工', f_multi_name: '多服务器', f_multi_desc: '一个面板管理所有机器上的代理', f_sub_name: '子会话', f_sub_desc: '随时启动并行代理', f_file_name: '文件浏览器', f_file_desc: '浏览、上传下载、Git 差异、浮动预览', @@ -537,7 +537,7 @@

about

not_1: '不是另一個 AI IDE', not_2: '不只是聊天殼', not_3: '不只是遠端終端客戶端', not_4: '不是 Claude Code、Codex、Gemini CLI、OpenClaw 或 Qwen 的替代品', not_5: '它是圍繞這些 agent 的訊息/控制層', f_remote_name: '遠端終端機', f_remote_desc: '瀏覽器和手機存取,無需 SSH/VPN,即時 PTY 串流', f_chat_name: '終端機 + 聊天', f_chat_desc: '原生 CLI 或結構化檢視,解析工具呼叫', - f_discuss_name: '多代理討論', f_discuss_desc: '跨供應商討論、稽核、審查', + f_discuss_name: '跨模型稽核', f_discuss_desc: '多 Agent P2P 討論,可自訂階段(audit、review、brainstorm…)— 不同模型在寫程式前交叉審查,大幅減少返工', f_multi_name: '多伺服器', f_multi_desc: '一個面板管理所有機器上的代理', f_sub_name: '子會話', f_sub_desc: '隨時啟動並行代理', f_file_name: '檔案瀏覽器', f_file_desc: '瀏覽、上傳下載、Git 差異、浮動預覽', @@ -595,7 +595,7 @@

about

not_1: '別の AI IDE ではありません', not_2: '単なるチャットラッパーではありません', not_3: '単なるリモートターミナルクライアントではありません', not_4: 'Claude Code、Codex、Gemini CLI、OpenClaw、Qwen の置き換えではありません', not_5: 'それらを取り巻くメッセージング/コントロール層です', f_remote_name: 'リモートターミナル', f_remote_desc: 'ブラウザ&モバイル、SSH/VPN不要、リアルタイムPTYストリーミング', f_chat_name: 'ターミナル+チャット', f_chat_desc: 'ネイティブCLIまたは構造化ビュー', - f_discuss_name: 'マルチエージェント議論', f_discuss_desc: 'プロバイダー間で議論・監査・レビュー', + f_discuss_name: 'クロスモデル監査', f_discuss_desc: 'カスタマイズ可能なフェーズ(audit、review、brainstorm…)のマルチエージェント P2P 議論 — 異なるモデルが実装前に交差レビューし、手戻りを大幅削減', f_multi_name: 'マルチサーバー', f_multi_desc: '複数マシンのエージェントを一元管理', f_sub_name: 'サブセッション', f_sub_desc: '並列エージェントを即座に起動', f_file_name: 'ファイルブラウザ', f_file_desc: '閲覧、アップロード/ダウンロード、Git差分、フローティングプレビュー', @@ -654,7 +654,7 @@

about

not_1: '또 다른 AI IDE가 아닙니다', not_2: '단순한 채팅 래퍼가 아닙니다', not_3: '단순한 원격 터미널 클라이언트가 아닙니다', not_4: 'Claude Code, Codex, Gemini CLI, OpenClaw, Qwen의 대체품이 아닙니다', not_5: '그들을 둘러싼 메시징/컨트롤 레이어입니다', f_remote_name: '원격 터미널', f_remote_desc: '브라우저 & 모바일, SSH/VPN 불필요, 실시간 PTY 스트리밍', f_chat_name: '터미널 + 채팅', f_chat_desc: '네이티브 CLI 또는 구조화된 뷰', - f_discuss_name: '멀티 에이전트 토론', f_discuss_desc: '프로바이더 간 토론, 감사, 리뷰', + f_discuss_name: '크로스 모델 감사', f_discuss_desc: '커스터마이징 가능한 페이즈(audit, review, brainstorm…)의 멀티 에이전트 P2P 토론 — 서로 다른 모델이 코드 작성 전 교차 검토하여 재작업을 대폭 감소', f_multi_name: '멀티 서버', f_multi_desc: '하나의 대시보드에서 모든 머신의 에이전트 관리', f_sub_name: '서브 세션', f_sub_desc: '병렬 에이전트 즉시 생성', f_file_name: '파일 브라우저', f_file_desc: '탐색, 업로드/다운로드, Git 차이, 플로팅 미리보기', @@ -713,7 +713,7 @@

about

not_1: 'No es otro AI IDE', not_2: 'No es solo un wrapper de chat', not_3: 'No es solo un cliente de terminal remota', not_4: 'No reemplaza a Claude Code, Codex, Gemini CLI, OpenClaw ni Qwen', not_5: 'Es la capa de mensajería/control alrededor de ellos', f_remote_name: 'terminal remota', f_remote_desc: 'navegador y móvil, sin SSH/VPN, streaming PTY en tiempo real', f_chat_name: 'terminal + chat', f_chat_desc: 'CLI nativo o vista estructurada con llamadas de herramientas', - f_discuss_name: 'discusiones multi-agente', f_discuss_desc: 'discutir, auditar, revisar entre proveedores', + f_discuss_name: 'auditoría cross-modelo', f_discuss_desc: 'discusiones P2P multi-agente con fases personalizables (audit, review, brainstorm…) — distintos modelos detectan distintos problemas antes de escribir código, reduciendo retrabajo', f_multi_name: 'multi-servidor', f_multi_desc: 'gestiona agentes en todas las máquinas desde un panel', f_sub_name: 'sub-sesiones', f_sub_desc: 'lanza agentes paralelos al instante', f_file_name: 'explorador de archivos', f_file_desc: 'explorar, subir/descargar, diffs de Git, vista previa flotante', @@ -772,7 +772,7 @@

about

not_1: 'Это не ещё один AI IDE', not_2: 'Это не просто чат-обёртка', not_3: 'Это не просто клиент удалённого терминала', not_4: 'Это не замена Claude Code, Codex, Gemini CLI, OpenClaw или Qwen', not_5: 'Это слой сообщений/управления вокруг них', f_remote_name: 'удалённый терминал', f_remote_desc: 'браузер и мобильный, без SSH/VPN, PTY-стриминг в реальном времени', f_chat_name: 'терминал + чат', f_chat_desc: 'нативный CLI или структурированный вид', - f_discuss_name: 'мульти-агентные обсуждения', f_discuss_desc: 'обсуждение, аудит, ревью между провайдерами', + f_discuss_name: 'кросс-модельный аудит', f_discuss_desc: 'мульти-агентные P2P обсуждения с настраиваемыми фазами (audit, review, brainstorm…) — разные модели перекрёстно проверяют до написания кода, сокращая переделки', f_multi_name: 'мульти-сервер', f_multi_desc: 'управление агентами на всех машинах с одной панели', f_sub_name: 'под-сессии', f_sub_desc: 'мгновенный запуск параллельных агентов', f_file_name: 'файловый браузер', f_file_desc: 'просмотр, загрузка/скачивание, Git-диффы, плавающий превью', diff --git a/package.json b/package.json index 53cc891bf..c63c41db6 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "test:web": "vitest run --project web", "test:e2e": "vitest run --project e2e", "test:integration": "vitest run --workspace vitest.integration.config.ts", - "test:coverage": "vitest run --coverage && node scripts/write-coverage-summary.mjs", + "test:coverage": "vitest run --coverage --no-file-parallelism --maxWorkers 1 --testTimeout 60000 --hookTimeout 60000 && node scripts/write-coverage-summary.mjs", "test:watch": "vitest", "lint": "eslint src/", "typecheck": "tsc --noEmit", diff --git a/server/src/routes/cron-api.ts b/server/src/routes/cron-api.ts index c23ffe180..adfd21954 100644 --- a/server/src/routes/cron-api.ts +++ b/server/src/routes/cron-api.ts @@ -47,7 +47,7 @@ const cronJobCreateSchema = z.object({ serverId: z.string().min(1), projectName: z.string().min(1).max(64), targetRole: z.string().regex(rolePattern).default('brain'), - targetSessionName: z.string().regex(sessionNamePattern).optional(), + targetSessionName: z.string().regex(sessionNamePattern).nullable().optional(), action: cronActionSchema, timezone: z.string().min(1).max(64).optional(), expiresAt: z.number().nullable().optional(), diff --git a/server/src/routes/push.ts b/server/src/routes/push.ts index ef8b54abb..f58142fe1 100644 --- a/server/src/routes/push.ts +++ b/server/src/routes/push.ts @@ -92,6 +92,10 @@ pushRoutes.post('/relay', async (c) => { } catch (err) { const msg = err instanceof Error ? err.message : String(err); const unregistered = err instanceof PushError && err.unregistered; + logger.warn( + { err, platform: body.platform, token: body.token.slice(0, 10) + '...', unregistered }, + 'Push relay failed', + ); return c.json({ error: msg, unregistered }, unregistered ? 410 : 502); } }); @@ -145,7 +149,9 @@ export async function dispatchPush(payload: PushPayload, envOrDb: Env | Database [payload.userId], ); if (rows.length > 0) badgeCount = rows[0].badge_count; - } catch { /* fallback to 1 */ } + } catch (err) { + logger.warn({ err, userId: payload.userId }, 'Failed to increment badge_count — falling back to 1'); + } payload.badge = badgeCount; const hasApns = !!(env.APNS_KEY && env.APNS_KEY_ID && env.APNS_TEAM_ID); diff --git a/server/src/routes/quick-data.ts b/server/src/routes/quick-data.ts index e19c9caca..56e095557 100644 --- a/server/src/routes/quick-data.ts +++ b/server/src/routes/quick-data.ts @@ -22,21 +22,7 @@ quickDataRoutes.get('/', async (c) => { return c.json({ data }); }); -/** Merge two string arrays: deduplicate, preserve order (incoming first), cap at max. */ -function mergeArrays(incoming: string[], existing: string[], max: number): string[] { - const seen = new Set(); - const result: string[] = []; - for (const s of [...incoming, ...existing]) { - if (!seen.has(s)) { - seen.add(s); - result.push(s); - if (result.length >= max) break; - } - } - return result; -} - -/** PUT /api/quick-data — merge with existing data (not replace) */ +/** PUT /api/quick-data — replace the user's quick data snapshot */ quickDataRoutes.put('/', async (c) => { const userId = c.get('userId' as never) as string; @@ -52,24 +38,13 @@ quickDataRoutes.put('/', async (c) => { return c.json({ error: 'invalid_data', detail: parsed.error.flatten() }, 400); } - // Read existing data and merge to avoid cross-device overwrites - const existing = await getQuickData(c.env.DB, userId); - const merged = { - history: mergeArrays(parsed.data.history, existing.history ?? [], 50), - sessionHistory: { ...existing.sessionHistory, ...parsed.data.sessionHistory } as Record, - commands: mergeArrays(parsed.data.commands, existing.commands ?? [], 200), - phrases: mergeArrays(parsed.data.phrases, existing.phrases ?? [], 200), + const next = { + history: parsed.data.history, + sessionHistory: parsed.data.sessionHistory, + commands: parsed.data.commands, + phrases: parsed.data.phrases, }; - // Merge per-session histories too - for (const [key, arr] of Object.entries(existing.sessionHistory ?? {})) { - if (merged.sessionHistory[key]) { - merged.sessionHistory[key] = mergeArrays(merged.sessionHistory[key], arr, 50); - } else { - merged.sessionHistory[key] = arr; - } - } - - await upsertQuickData(c.env.DB, userId, merged); + await upsertQuickData(c.env.DB, userId, next); return c.json({ ok: true }); }); diff --git a/server/src/routes/session-mgmt.ts b/server/src/routes/session-mgmt.ts index f97cce10c..6097c8159 100644 --- a/server/src/routes/session-mgmt.ts +++ b/server/src/routes/session-mgmt.ts @@ -7,6 +7,7 @@ import { WsBridge } from '../ws/bridge.js'; import logger from '../util/logger.js'; import { IMCODES_POD_HEADER } from '../../../shared/http-header-names.js'; import { getPodIdentity } from '../util/pod-identity.js'; +import { isSessionAgentType } from '../../../shared/agent-types.js'; export const sessionMgmtRoutes = new Hono<{ Bindings: Env; Variables: { userId: string; role: string } }>(); @@ -130,6 +131,7 @@ sessionMgmtRoutes.patch('/:id/sessions/:name', async (c) => { label?: string | null; description?: string | null; cwd?: string | null; + agentType?: string | null; requestedModel?: string | null; activeModel?: string | null; effort?: string | null; @@ -150,6 +152,11 @@ sessionMgmtRoutes.patch('/:id/sessions/:name', async (c) => { effort?: string | null; transport_config?: Record | null; } = {}; + if ('agentType' in body && body.agentType != null) { + if (typeof body.agentType !== 'string' || !isSessionAgentType(body.agentType)) { + return c.json({ error: 'invalid_agent_type' }, 400); + } + } if ('label' in body) fields.label = body.label ?? null; if ('description' in body) fields.description = body.description ?? null; if ('cwd' in body) fields.project_dir = body.cwd ?? null; @@ -159,6 +166,26 @@ sessionMgmtRoutes.patch('/:id/sessions/:name', async (c) => { if ('transportConfig' in body) fields.transport_config = body.transportConfig ?? null; await updateSession(c.env.DB, serverId, sessionName, fields); + + if (typeof body.agentType === 'string') { + try { + WsBridge.get(serverId).sendToDaemon(JSON.stringify({ + type: 'session.restart', + sessionName, + agentType: body.agentType, + ...(body.label !== undefined ? { label: body.label } : {}), + ...(body.description !== undefined ? { description: body.description } : {}), + ...(body.cwd !== undefined ? { cwd: body.cwd } : {}), + ...(body.requestedModel !== undefined ? { requestedModel: body.requestedModel } : {}), + ...(body.activeModel !== undefined ? { activeModel: body.activeModel } : {}), + ...(body.effort !== undefined ? { effort: body.effort } : {}), + ...(body.transportConfig !== undefined ? { transportConfig: body.transportConfig } : {}), + })); + } catch (err) { + logger.error({ serverId, sessionName, err }, 'WsBridge session settings relay failed'); + return c.json({ error: 'relay_failed' }, 502); + } + } return c.json({ ok: true }); }); diff --git a/server/src/routes/sub-sessions.ts b/server/src/routes/sub-sessions.ts index 7e1f31598..a7a273354 100644 --- a/server/src/routes/sub-sessions.ts +++ b/server/src/routes/sub-sessions.ts @@ -9,6 +9,9 @@ import { reorderSubSessions, } from '../db/queries.js'; import { requireAuth, resolveServerRole } from '../security/authorization.js'; +import { WsBridge } from '../ws/bridge.js'; +import logger from '../util/logger.js'; +import { isSessionAgentType } from '../../../shared/agent-types.js'; export const subSessionRoutes = new Hono<{ Bindings: Env; Variables: { userId: string; role: string } }>(); @@ -54,8 +57,7 @@ subSessionRoutes.post('/:id/sub-sessions', async (c) => { } if (!body.type) return c.json({ error: 'missing_fields' }, 400); - const validTypes = ['claude-code', 'claude-code-sdk', 'codex', 'codex-sdk', 'opencode', 'shell', 'gemini', 'openclaw', 'qwen', 'script']; - if (!validTypes.includes(body.type)) return c.json({ error: 'invalid_type' }, 400); + if (!isSessionAgentType(body.type)) return c.json({ error: 'invalid_type' }, 400); // Generate 8-char id const id = Array.from(crypto.getRandomValues(new Uint8Array(6))) @@ -119,6 +121,7 @@ subSessionRoutes.patch('/:id/sub-sessions/:subId', async (c) => { if (!existing) return c.json({ error: 'not_found' }, 404); let body: { + type?: string | null; label?: string | null; closedAt?: number | null; description?: string | null; @@ -146,8 +149,15 @@ subSessionRoutes.patch('/:id/sub-sessions/:subId', async (c) => { effort?: string | null; transport_config?: Record | null; } = {}; + if ('type' in body && body.type != null) { + if (typeof body.type !== 'string' || !isSessionAgentType(body.type)) { + return c.json({ error: 'invalid_agent_type' }, 400); + } + } + if ('closedAt' in body) { + return c.json({ error: 'closed_at_managed_by_daemon' }, 400); + } if ('label' in body) fields.label = body.label ?? null; - if ('closedAt' in body) fields.closed_at = body.closedAt ?? null; if ('description' in body) fields.description = body.description ?? null; if ('cwd' in body) fields.cwd = body.cwd ?? null; if ('ccPresetId' in body) fields.cc_preset_id = body.ccPresetId ?? null; @@ -157,6 +167,26 @@ subSessionRoutes.patch('/:id/sub-sessions/:subId', async (c) => { if ('transportConfig' in body) fields.transport_config = body.transportConfig ?? null; await updateSubSession(c.env.DB, subId, serverId, fields); + + if (typeof body.type === 'string') { + try { + WsBridge.get(serverId).sendToDaemon(JSON.stringify({ + type: 'subsession.restart', + sessionName: `deck_sub_${subId}`, + agentType: body.type, + ...(body.label !== undefined ? { label: body.label } : {}), + ...(body.description !== undefined ? { description: body.description } : {}), + ...(body.cwd !== undefined ? { cwd: body.cwd } : {}), + ...(body.requestedModel !== undefined ? { requestedModel: body.requestedModel } : {}), + ...(body.activeModel !== undefined ? { activeModel: body.activeModel } : {}), + ...(body.effort !== undefined ? { effort: body.effort } : {}), + ...(body.transportConfig !== undefined ? { transportConfig: body.transportConfig } : {}), + })); + } catch (err) { + logger.error({ serverId, subId, err }, 'WsBridge sub-session settings relay failed'); + return c.json({ error: 'relay_failed' }, 502); + } + } return c.json({ ok: true }); }); diff --git a/server/src/ws/bridge.ts b/server/src/ws/bridge.ts index 3f2fa454b..7a774ee29 100644 --- a/server/src/ws/bridge.ts +++ b/server/src/ws/bridge.ts @@ -17,6 +17,7 @@ import type { Database } from '../db/client.js'; import type { Env } from '../env.js'; import { MemoryRateLimiter } from './rate-limiter.js'; import { sha256Hex } from '../security/crypto.js'; +import { DAEMON_MSG } from '../../../shared/daemon-events.js'; import { REPO_RELAY_TYPES } from '../../../shared/repo-types.js'; import { TRANSPORT_RELAY_TYPES, TRANSPORT_MSG } from '../../../shared/transport-events.js'; import { @@ -38,6 +39,7 @@ import { import { LocalWebPreviewRegistry } from '../preview/registry.js'; import { updateServerHeartbeat, updateServerStatus, upsertDiscussion, insertDiscussionRound, createSubSession, updateSubSession, upsertOrchestrationRun, updateProviderStatus, clearProviderStatus, updateProviderRemoteSessions } from '../db/queries.js'; import logger from '../util/logger.js'; +import { pickReadableSessionDisplay } from '../../../shared/session-display.js'; const AUTH_TIMEOUT_MS = 5000; const MAX_QUEUE_SIZE = 100; @@ -137,6 +139,13 @@ export interface WatchActiveMainSessionRow { label?: string; } +type WatchActiveSubSessionRow = { + name: string; + parentSession?: string; + agentType?: string; + label?: string; +}; + type PendingPreviewRequest = { readable: ReadableStream; controller: ReadableStreamDefaultController | null; @@ -277,6 +286,8 @@ export class WsBridge { /** Latest daemon-owned active main-session snapshot for watch list responses. */ private activeMainSessions = new Map(); + /** Latest daemon-owned active sub-session snapshot for push title resolution. */ + private activeSubSessions = new Map(); private hasActiveMainSessionSnapshot = false; /** @@ -437,7 +448,7 @@ export class WsBridge { } this.queue = []; - this.broadcastToBrowsers(JSON.stringify({ type: 'daemon.reconnected' })); + this.broadcastToBrowsers(JSON.stringify({ type: DAEMON_MSG.RECONNECTED })); // Re-subscribe daemon to all sessions that still have active browser subscribers for (const [sessionName, refs] of this.daemonSessionRefs) { @@ -501,6 +512,7 @@ export class WsBridge { this.authenticated = false; this.recentTextBySession.clear(); this.activeMainSessions.clear(); + this.activeSubSessions.clear(); this.hasActiveMainSessionSnapshot = false; this.rejectAllPendingFileTransfers('daemon_disconnected'); this.rejectAllPendingHttpTimelineRequests('daemon_disconnected'); @@ -512,7 +524,7 @@ export class WsBridge { this.broadcastToBrowsers(JSON.stringify({ type: TRANSPORT_MSG.PROVIDER_STATUS, providerId, connected: false })); } this.providerStatus.clear(); - this.broadcastToBrowsers(JSON.stringify({ type: 'daemon.disconnected' })); + this.broadcastToBrowsers(JSON.stringify({ type: DAEMON_MSG.DISCONNECTED })); void clearProviderStatus(db, this.serverId).catch(() => {}); updateServerStatus(db, this.serverId, 'offline').catch((err) => logger.error({ err }, 'Failed to mark server offline'), @@ -843,10 +855,10 @@ export class WsBridge { return; } - if (type === 'session_list') { - this.replaceActiveMainSessions(msg.sessions); - this.pruneMainSessionRecentText(msg.sessions); - this.broadcastToBrowsers(JSON.stringify({ + if (type === 'session_list') { + this.replaceActiveMainSessions(msg.sessions); + this.pruneMainSessionRecentText(msg.sessions); + this.broadcastToBrowsers(JSON.stringify({ ...msg, daemonVersion: typeof msg.daemonVersion === 'string' ? msg.daemonVersion : this.daemonVersion, })); @@ -937,6 +949,13 @@ export class WsBridge { // ── Sub-session sync: daemon creates sub-sessions → persist to DB ──────── if (type === 'subsession.sync' && this.db) { + const subSessionName = `deck_sub_${String(msg.id ?? '')}`; + if (subSessionName !== 'deck_sub_') { + const label = typeof msg.label === 'string' && msg.label.trim() ? msg.label.trim() : undefined; + const parentSession = typeof msg.parentSession === 'string' && msg.parentSession ? msg.parentSession : undefined; + const agentType = typeof msg.sessionType === 'string' && msg.sessionType ? msg.sessionType : undefined; + this.activeSubSessions.set(subSessionName, { name: subSessionName, label, parentSession, agentType }); + } void createSubSession( this.db, msg.id as string, @@ -983,7 +1002,7 @@ export class WsBridge { quotaLabel: msg.quotaLabel || null, quotaUsageLabel: msg.quotaUsageLabel || null, quotaMeta: msg.quotaMeta || null, - state: 'running', + state: (msg.state as string) || 'idle', })); }).catch((e) => logger.error({ err: e, id: msg.id }, 'Failed to sync sub-session to DB')); return; @@ -1053,10 +1072,17 @@ export class WsBridge { if (type === 'subsession.closed' && this.db) { const id = msg.id as string; if (id) { - this.recentTextBySession.delete(`deck_sub_${id}`); void this.db.execute('UPDATE sub_sessions SET closed_at = $1 WHERE id = $2 AND server_id = $3', - [Date.now(), id, this.serverId]).catch(() => {}); - this.broadcastToBrowsers(JSON.stringify({ type: 'subsession.removed', id, sessionName: msg.sessionName })); + [Date.now(), id, this.serverId]) + .then(() => { + const sessionName = `deck_sub_${id}`; + this.recentTextBySession.delete(sessionName); + this.activeSubSessions.delete(sessionName); + this.broadcastToBrowsers(JSON.stringify({ type: 'subsession.removed', id, sessionName: msg.sessionName })); + }) + .catch((err) => { + logger.error({ err, id, sessionName: msg.sessionName }, 'Failed to persist sub-session close from daemon'); + }); } return; } @@ -2123,6 +2149,88 @@ export class WsBridge { private lastPushAt = new Map(); private static readonly PUSH_DEDUP_MS = 10_000; + private async resolveReadablePushDisplay( + db: Database, + sessionName: string, + daemonLabel: string | undefined, + daemonParentLabel: string | undefined, + daemonProject: string | undefined, + ): Promise<{ + displayName: string; + agentType?: string; + }> { + const visited = new Set(); + const activeMainSession = this.activeMainSessions.get(sessionName); + let effectiveAgentType = pickReadableSessionDisplay([activeMainSession?.agentType], sessionName); + let currentSessionName: string | undefined = sessionName; + let displayName = pickReadableSessionDisplay([daemonLabel], sessionName); + + while (currentSessionName && !visited.has(currentSessionName)) { + visited.add(currentSessionName); + + const active = this.activeMainSessions.get(currentSessionName); + const activeSubSession = this.activeSubSessions.get(currentSessionName); + let sessionRow = await db.queryOne<{ project_name: string; agent_type: string; label: string | null }>( + 'SELECT project_name, agent_type, label FROM sessions WHERE server_id = $1 AND name = $2 LIMIT 1', + [this.serverId, currentSessionName], + ).catch(() => null); + + let parentSession: string | undefined; + let subType: string | undefined; + if (activeSubSession) { + subType = activeSubSession.agentType; + parentSession = activeSubSession.parentSession; + const activeSubDisplay = pickReadableSessionDisplay([activeSubSession.label], currentSessionName); + if (!displayName && activeSubDisplay) displayName = activeSubDisplay; + } + if (!sessionRow && currentSessionName.startsWith('deck_sub_')) { + const subRow: { type: string; label: string | null; parent_session: string | null } | null = await db + .queryOne<{ type: string; label: string | null; parent_session: string | null }>( + 'SELECT type, label, parent_session FROM sub_sessions WHERE server_id = $1 AND id = $2 LIMIT 1', + [this.serverId, currentSessionName.replace(/^deck_sub_/, '')], + ) + .catch(() => null); + if (subRow) { + subType = subRow.type; + parentSession = subRow.parent_session ?? undefined; + const subDisplay = pickReadableSessionDisplay([subRow.label], currentSessionName); + if (!displayName && subDisplay) displayName = subDisplay; + } + } + + effectiveAgentType = effectiveAgentType + || subType + || active?.agentType + || sessionRow?.agent_type + || undefined; + + if (!displayName) { + const candidate = pickReadableSessionDisplay( + [ + active?.label, + sessionRow?.label, + active?.project, + sessionRow?.project_name, + ], + currentSessionName, + ); + if (candidate) displayName = candidate; + } + + if (displayName) break; + currentSessionName = parentSession; + } + + displayName = displayName + || pickReadableSessionDisplay([activeMainSession?.label, activeMainSession?.project, daemonParentLabel, daemonProject], sessionName) + || sessionName; + + return { + displayName, + ...(effectiveAgentType ? { agentType: effectiveAgentType } : {}), + }; + } + private async dispatchEventPush(db: Database, env: Env, msg: Record): Promise { // Always send APNs push — iOS handles foreground display via UNUserNotificationCenterDelegate. // Badge count must increment regardless of app state. @@ -2137,83 +2245,42 @@ export class WsBridge { const server = await db.queryOne<{ user_id: string; name: string }>('SELECT user_id, name FROM servers WHERE id = $1', [this.serverId]); if (!server) return; - const { dispatchPush } = await import('../routes/push.js').catch(() => ({ dispatchPush: null })); + const { dispatchPush } = await import('../routes/push.js').catch((err) => { + logger.error({ err }, 'Failed to import push module — push notifications disabled'); + return { dispatchPush: null }; + }); if (!dispatchPush) return; const sessionName = String(msg.session ?? msg.sessionId ?? ''); const eventType = String(msg.type ?? ''); - - // Prefer label/parentLabel from daemon message (has full context including sub-sessions) const daemonLabel = msg.label ? String(msg.label) : undefined; const daemonParentLabel = msg.parentLabel ? String(msg.parentLabel) : undefined; - - // Look up session metadata for human-readable push content - // Check sessions table first, then sub_sessions for sub-session names - let sessionRow = await db.queryOne<{ project_name: string; agent_type: string; label: string | null }>( - 'SELECT project_name, agent_type, label FROM sessions WHERE server_id = $1 AND name = $2 LIMIT 1', - [this.serverId, sessionName], - ).catch(() => null); - - let subType: string | undefined; - if (!sessionRow) { - // Try sub_sessions table: session name is deck_sub_{id} - const subMatch = sessionName.match(/^deck_sub_([a-zA-Z0-9]+)$/); - const subRow = subMatch ? await db.queryOne<{ type: string; label: string | null; parent_session: string }>( - 'SELECT type, label, parent_session FROM sub_sessions WHERE server_id = $1 AND id = $2 LIMIT 1', - [this.serverId, subMatch[1]], - ).catch(() => null) : null; - if (subRow) { - subType = subRow.type; - // Look up parent session for context - if (!daemonParentLabel && subRow.parent_session) { - const parentRow = await db.queryOne<{ project_name: string; label: string | null }>( - 'SELECT project_name, label FROM sessions WHERE server_id = $1 AND name = $2 LIMIT 1', - [this.serverId, subRow.parent_session], - ).catch(() => null); - if (parentRow) { - sessionRow = { project_name: parentRow.project_name, agent_type: subRow.type, label: subRow.label }; - } - } - } - } - - // Build display name: label(type)@mainSession — fallback label to sub-session ID - const label = daemonLabel || sessionRow?.label; - const agentType = subType || sessionRow?.agent_type || String(msg.agentType ?? ''); - const parentContext = daemonParentLabel || sessionRow?.project_name; - const isSub = sessionName.startsWith('deck_sub_'); - - let displayName: string; - const typeSuffix = agentType ? `(${agentType})` : ''; - if (isSub) { - // Prefer label, fallback to agentType, last resort raw ID - const name = label || agentType || sessionName.replace(/^deck_sub_/, ''); - displayName = `${name}${label ? typeSuffix : ''}${parentContext ? `@${parentContext}` : ''}`; - } else { - const name = label || parentContext || sessionName; - displayName = `${name}${typeSuffix}`; - } - const agentLabel = ''; + const daemonProject = msg.project ? String(msg.project) : undefined; + const resolved = await this.resolveReadablePushDisplay(db, sessionName, daemonLabel, daemonParentLabel, daemonProject); + const displayName = resolved.displayName; + const agentType = resolved.agentType || String(msg.agentType ?? ''); + const titleParts = [server.name, displayName]; + if (agentType) titleParts.push(agentType); const lastText = String(msg.lastText ?? msg.message ?? '').slice(0, 200); let title: string; let body: string; switch (eventType) { case 'session.idle': - title = `${server.name} · ${displayName}${agentLabel}`; + title = titleParts.join(' · '); body = lastText || 'Task complete — ready for input'; break; case 'session.notification': { - title = `${server.name} · ${displayName}`; + title = titleParts.join(' · '); body = String(msg.message ?? 'Notification'); break; } case 'session.error': - title = `${server.name} · ${displayName}`; + title = titleParts.join(' · '); body = `Error: ${String(msg.error ?? 'unknown')}`; break; case 'ask.question': - title = `${server.name} · ${displayName}${agentLabel}`; + title = titleParts.join(' · '); body = lastText || 'Waiting for your answer'; break; default: diff --git a/server/test/bridge.test.ts b/server/test/bridge.test.ts index 3f37d6d03..5d5671852 100644 --- a/server/test/bridge.test.ts +++ b/server/test/bridge.test.ts @@ -1354,7 +1354,7 @@ describe('WsBridge', () => { expect(msg.activeModel).toBe('sonnet'); expect(msg.effort).toBe('high'); expect(msg.transportConfig).toEqual({ provider: { mode: 'safe' } }); - expect(msg.state).toBe('running'); + expect(msg.state).toBe('idle'); }); it('subsession.closed from daemon → updates DB + broadcasts subsession.removed to browsers', async () => { @@ -1375,6 +1375,36 @@ describe('WsBridge', () => { expect(msg.sessionName).toBe('deck_sub_sub-456'); }); + it('subsession.closed does not broadcast removal when DB persistence fails', async () => { + const bridge = WsBridge.get(serverId); + const daemonWs = new MockWs(); + const failingDb = { + ...makeDb('valid-hash'), + execute: vi.fn().mockImplementation(async (sql: string) => { + if (sql.includes('UPDATE sub_sessions SET closed_at')) { + throw new Error('db write failed'); + } + return { changes: 1 }; + }), + } as unknown as import('../src/db/client.js').Database; + bridge.handleDaemonConnection(daemonWs as never, failingDb, {} as never); + daemonWs.emit('message', JSON.stringify({ type: 'auth', serverId, token: 't' })); + await flushAsync(); + + const browserWs = new MockWs(); + bridge.handleBrowserConnection(browserWs as never, 'test-user', failingDb); + browserWs.sent.length = 0; + + daemonWs.emit('message', JSON.stringify({ + type: 'subsession.closed', + id: 'sub-456', + sessionName: 'deck_sub_sub-456', + })); + await flushAsync(); + + expect(browserWs.sentStrings).toHaveLength(0); + }); + it('subsession.closed without id → no broadcast', async () => { const { daemonWs, browserWs } = await setupAuthBridge(); @@ -1388,6 +1418,46 @@ describe('WsBridge', () => { expect(browserWs.sentStrings).toHaveLength(0); }); + it('subsession.closed clears only the matching descendant cache while preserving other sub-sessions', async () => { + const { bridge, daemonWs, browserWs } = await setupAuthBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'timeline.event', + event: { + eventId: 'sub-a-1', + sessionId: 'deck_sub_alpha', + ts: 1, + type: 'assistant.text', + payload: { text: 'alpha text' }, + }, + })); + daemonWs.emit('message', JSON.stringify({ + type: 'timeline.event', + event: { + eventId: 'sub-b-1', + sessionId: 'deck_sub_beta', + ts: 2, + type: 'assistant.text', + payload: { text: 'beta text' }, + }, + })); + await flushAsync(); + expect(bridge.getRecentText('deck_sub_alpha')).toHaveLength(1); + expect(bridge.getRecentText('deck_sub_beta')).toHaveLength(1); + + daemonWs.emit('message', JSON.stringify({ + type: 'subsession.closed', + id: 'alpha', + sessionName: 'deck_sub_alpha', + })); + await flushAsync(); + + expect(bridge.getRecentText('deck_sub_alpha')).toHaveLength(0); + expect(bridge.getRecentText('deck_sub_beta')).toHaveLength(1); + const msg = JSON.parse(browserWs.sentStrings.at(-1) ?? '{}'); + expect(msg).toMatchObject({ type: 'subsession.removed', id: 'alpha', sessionName: 'deck_sub_alpha' }); + }); + it('p2p.conflict from daemon → broadcasts to all browsers', async () => { const bridge = WsBridge.get(serverId); const daemonWs = new MockWs(); @@ -1438,10 +1508,30 @@ describe('WsBridge', () => { describe('push notifications', () => { function makePushDb(tokenHash: string) { return { - queryOne: async (sql: string) => { + queryOne: async (sql: string, params?: unknown[]) => { if (sql.includes('FROM servers')) return { token_hash: tokenHash, user_id: 'user-1', name: 'my-server' }; - if (sql.includes('FROM sessions')) return { project_name: 'codedeck', agent_type: 'claude-code', label: null }; - return { token_hash: tokenHash }; + if (sql.includes('FROM sessions') && params?.[1] === 'deck_cd_brain') { + return { project_name: 'codedeck', agent_type: 'claude-code', label: null }; + } + if (sql.includes('FROM sessions') && params?.[1] === 'bootmainxowfy6') { + return { project_name: 'codedeck', agent_type: 'claude-code', label: 'Boot Main' }; + } + if (sql.includes('FROM sub_sessions')) { + if (params?.[1] === 'unlabeled') { + return { type: 'codex', label: null, parent_session: '' }; + } + if (params?.[1] === 'needs-main-label') { + return { type: 'codex', label: null, parent_session: 'bootmainxowfy6' }; + } + if (params?.[1] === 'nested') { + return { type: 'shell', label: null, parent_session: 'deck_sub_parent' }; + } + if (params?.[1] === 'parent') { + return { type: 'codex', label: null, parent_session: 'deck_cd_brain' }; + } + return { type: 'codex', label: 'worker-1', parent_session: 'deck_cd_brain' }; + } + return null; }, query: async () => [], execute: async () => ({ changes: 1 }), @@ -1473,12 +1563,176 @@ describe('WsBridge', () => { expect(dispatchPush).toHaveBeenCalled(); const call = vi.mocked(dispatchPush).mock.calls[0]; const payload = call[0]; - expect(payload.title).toContain('my-server'); - expect(payload.title).toContain('codedeck'); - expect(payload.title).toContain('claude-code'); + expect(payload.title).toBe('my-server · codedeck · claude-code'); expect(payload.body).toContain('Done implementing'); }); + it('prefers sub-session label over session name in push title', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'deck_sub_ab12cd34', + lastText: 'Stopped early.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls[0][0]; + expect(payload.title).toBe('my-server · worker-1 · codex'); + expect(payload.title).not.toContain('deck_sub_ab12cd34'); + }); + + it('resolves hyphenated sub-session ids before falling back to internal session names', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'deck_sub_sub-123', + lastText: 'Done.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls[0][0]; + expect(payload.title).toBe('my-server · worker-1 · codex'); + expect(payload.title).not.toContain('deck_sub_sub-123'); + }); + + it('prefers active session snapshot labels over internal main session names in push title', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session_list', + sessions: [{ + name: 'bootmainxowfy6', + project: 'codedeck', + state: 'idle', + agentType: 'claude-code', + label: 'Boot Main', + }], + })); + await flushAsync(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'bootmainxowfy6', + lastText: 'Ready.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls.at(-1)?.[0]; + expect(payload?.title).toBe('my-server · Boot Main · claude-code'); + expect(payload?.title).not.toContain('bootmainxowfy6'); + }); + + it('prefers stored main-session labels before daemon project fallbacks in push title', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'bootmainxowfy6', + project: 'Readable Main', + agentType: 'claude-code', + lastText: 'Ready.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls.at(-1)?.[0]; + expect(payload?.title).toBe('my-server · Boot Main · claude-code'); + expect(payload?.title).not.toContain('bootmainxowfy6'); + }); + + it('uses parent/project fallback before internal sub-session names in push title', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'deck_sub_unlabeled', + project: 'Readable Main', + agentType: 'codex', + lastText: 'Ready.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls.at(-1)?.[0]; + expect(payload?.title).toBe('my-server · Readable Main · codex'); + expect(payload?.title).not.toContain('deck_sub_unlabeled'); + }); + + it('walks nested sub-session parents until it finds a readable main-session title', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'deck_sub_nested', + project: 'deck_sub_nested', + parentLabel: 'deck_sub_parent', + agentType: 'shell', + lastText: 'Ready.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls.at(-1)?.[0]; + expect(payload?.title).toBe('my-server · codedeck · shell'); + expect(payload?.title).not.toContain('deck_sub_nested'); + expect(payload?.title).not.toContain('deck_sub_parent'); + }); + + it('prefers stored parent labels over opaque daemon parent/project names in push title', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'session.idle', + session: 'deck_sub_needs-main-label', + parentLabel: 'bootmainxowfy6', + project: 'bootmainxowfy6', + agentType: 'codex', + lastText: 'Ready.', + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls.at(-1)?.[0]; + expect(payload?.title).toBe('my-server · Boot Main · codex'); + expect(payload?.title).not.toContain('bootmainxowfy6'); + }); + + it('uses cached sub-session labels for timeline idle pushes before explicit session.idle arrives', async () => { + const { dispatchPush } = await import('../src/routes/push.js'); + const { daemonWs } = await setupPushBridge(); + + daemonWs.emit('message', JSON.stringify({ + type: 'subsession.sync', + id: 'timeline-worker', + sessionType: 'codex', + label: 'Worker Timeline', + parentSession: 'deck_cd_brain', + })); + await flushAsync(); + vi.mocked(dispatchPush).mockClear(); + + daemonWs.emit('message', JSON.stringify({ + type: 'timeline.event', + event: { + sessionId: 'deck_sub_timeline-worker', + eventId: 'evt-1', + ts: Date.now(), + type: 'session.state', + payload: { state: 'idle' }, + }, + })); + await flushAsync(); + + const payload = vi.mocked(dispatchPush).mock.calls.at(-1)?.[0]; + expect(payload?.title).toBe('my-server · Worker Timeline · codex'); + expect(payload?.title).not.toContain('deck_sub_timeline-worker'); + }); + it('uses lastText as push body for session.idle', async () => { const { dispatchPush } = await import('../src/routes/push.js'); const { daemonWs } = await setupPushBridge(); diff --git a/server/test/cron-api.test.ts b/server/test/cron-api.test.ts index f587e550d..b95c1f47f 100644 --- a/server/test/cron-api.test.ts +++ b/server/test/cron-api.test.ts @@ -217,6 +217,30 @@ describe('Cron API routes', () => { expect(body.action).toEqual({ type: 'command', command: '/status' }); }); + it('accepts a null targetSessionName for main-session cron jobs', async () => { + const res = await app.request('/api/cron', jsonReq('POST', '/api/cron', { + ...validCommandBody, + targetSessionName: null, + })); + expect(res.status).toBe(201); + const body = await res.json() as Record; + expect(body.targetSessionName).toBeNull(); + }); + + it('accepts long command prompts well beyond 1500 characters', async () => { + const longCommand = '早上好主人!'.repeat(260); + expect(longCommand.length).toBeGreaterThan(1500); + + const res = await app.request('/api/cron', jsonReq('POST', '/api/cron', { + ...validCommandBody, + targetSessionName: null, + action: { type: 'command', command: longCommand }, + })); + expect(res.status).toBe(201); + const body = await res.json() as Record; + expect(body.action).toEqual({ type: 'command', command: longCommand }); + }); + it('command action missing `command` field returns 400', async () => { const res = await app.request('/api/cron', jsonReq('POST', '/api/cron', { ...validCommandBody, diff --git a/server/test/local-web-preview-registry.test.ts b/server/test/local-web-preview-registry.test.ts index 93c49573d..80223bb70 100644 --- a/server/test/local-web-preview-registry.test.ts +++ b/server/test/local-web-preview-registry.test.ts @@ -18,12 +18,11 @@ describe('LocalWebPreviewRegistry', () => { vi.useRealTimers(); }); - it('creates opaque preview ids and updates access time on get', () => { + it('creates hex preview ids and updates access time on get', () => { const registry = getRegistry(); const { preview, accessToken } = registry.create('user1', 3000, '/docs'); expect(preview.id).toMatch(/^[a-f0-9]{48}$/); - expect(preview.id).not.toContain('3000'); expect(accessToken).toMatch(/^[a-f0-9]{48}$/); vi.setSystemTime(now + 5_000); diff --git a/server/test/quick-data-route.test.ts b/server/test/quick-data-route.test.ts new file mode 100644 index 000000000..39954ebe2 --- /dev/null +++ b/server/test/quick-data-route.test.ts @@ -0,0 +1,84 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { Hono } from 'hono'; + +const getQuickDataMock = vi.fn(); +const upsertQuickDataMock = vi.fn(); + +vi.mock('../src/security/authorization.js', () => ({ + requireAuth: () => async (c: any, next: any) => { + c.set('userId', 'test-user'); + return next(); + }, +})); + +vi.mock('../src/db/queries.js', () => ({ + getQuickData: (...args: unknown[]) => getQuickDataMock(...args), + upsertQuickData: (...args: unknown[]) => upsertQuickDataMock(...args), +})); + +import { quickDataRoutes } from '../src/routes/quick-data.js'; + +const app = new Hono(); +app.use('/*', async (c, next) => { + (c as any).env = { DB: {} }; + return next(); +}); +app.route('/api/quick-data', quickDataRoutes); + +describe('quick-data routes', () => { + beforeEach(() => { + vi.clearAllMocks(); + getQuickDataMock.mockResolvedValue({ + history: ['keep history'], + sessionHistory: { 'deck_a': ['keep session'] }, + commands: ['/status'], + phrases: ['old phrase', 'keep phrase'], + }); + }); + + it('replaces removed custom phrases instead of merging them back', async () => { + const res = await app.request('/api/quick-data', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + data: { + history: ['keep history'], + sessionHistory: { 'deck_a': ['keep session'] }, + commands: ['/status'], + phrases: ['keep phrase'], + }, + }), + }); + + expect(res.status).toBe(200); + expect(upsertQuickDataMock).toHaveBeenCalledWith({}, 'test-user', { + history: ['keep history'], + sessionHistory: { 'deck_a': ['keep session'] }, + commands: ['/status'], + phrases: ['keep phrase'], + }); + }); + + it('persists edited custom phrases as replacements', async () => { + const res = await app.request('/api/quick-data', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + data: { + history: ['keep history'], + sessionHistory: { 'deck_a': ['keep session'] }, + commands: ['/status'], + phrases: ['updated phrase'], + }, + }), + }); + + expect(res.status).toBe(200); + expect(upsertQuickDataMock).toHaveBeenCalledWith({}, 'test-user', { + history: ['keep history'], + sessionHistory: { 'deck_a': ['keep session'] }, + commands: ['/status'], + phrases: ['updated phrase'], + }); + }); +}); diff --git a/server/test/session-mgmt-routes.test.ts b/server/test/session-mgmt-routes.test.ts index 8061ecba7..f039268b8 100644 --- a/server/test/session-mgmt-routes.test.ts +++ b/server/test/session-mgmt-routes.test.ts @@ -4,6 +4,7 @@ import { Hono } from 'hono'; const mockResolveServerRole = vi.fn<() => Promise>().mockResolvedValue('owner'); const mockUpsertDbSession = vi.fn(); const mockUpdateSession = vi.fn(); +const sendToDaemonMock = vi.fn(); vi.mock('../src/security/authorization.js', () => ({ requireAuth: () => async (c: { set: (key: string, value: string) => void }, next: () => Promise) => { @@ -31,7 +32,7 @@ vi.mock('../src/security/crypto.js', () => ({ vi.mock('../src/ws/bridge.js', () => ({ WsBridge: { get: () => ({ - sendToDaemon: vi.fn(), + sendToDaemon: sendToDaemonMock, }), }, })); @@ -128,4 +129,36 @@ describe('session-mgmt persistence routes', () => { }, ); }); + + it('PATCH /sessions/:name relays session.restart when agentType changes', async () => { + const app = await buildApp(); + const res = await app.request('/api/server/srv-1/sessions/deck_proj_brain', { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + agentType: 'codex-sdk', + cwd: '/tmp/next', + description: 'next persona', + }), + }); + + expect(res.status).toBe(200); + expect(mockUpdateSession).toHaveBeenCalledWith( + {}, + 'srv-1', + 'deck_proj_brain', + { + description: 'next persona', + project_dir: '/tmp/next', + }, + ); + expect(sendToDaemonMock).toHaveBeenCalledTimes(1); + expect(JSON.parse(String(sendToDaemonMock.mock.calls[0]?.[0]))).toEqual({ + type: 'session.restart', + sessionName: 'deck_proj_brain', + agentType: 'codex-sdk', + cwd: '/tmp/next', + description: 'next persona', + }); + }); }); diff --git a/server/test/sub-sessions-route.test.ts b/server/test/sub-sessions-route.test.ts index 5ab5f38d7..8ef833217 100644 --- a/server/test/sub-sessions-route.test.ts +++ b/server/test/sub-sessions-route.test.ts @@ -2,6 +2,8 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; import { Hono } from 'hono'; const createSubSessionMock = vi.fn(); +const updateSubSessionMock = vi.fn(); +const sendToDaemonMock = vi.fn(); vi.mock('../src/security/authorization.js', () => ({ requireAuth: () => async (c: any, next: any) => { @@ -15,11 +17,19 @@ vi.mock('../src/db/queries.js', () => ({ getSubSessionsByServer: vi.fn(), getSubSessionById: vi.fn(), createSubSession: (...args: unknown[]) => createSubSessionMock(...args), - updateSubSession: vi.fn(), + updateSubSession: (...args: unknown[]) => updateSubSessionMock(...args), deleteSubSession: vi.fn(), reorderSubSessions: vi.fn(), })); +vi.mock('../src/ws/bridge.js', () => ({ + WsBridge: { + get: () => ({ + sendToDaemon: sendToDaemonMock, + }), + }, +})); + import { subSessionRoutes } from '../src/routes/sub-sessions.js'; const app = new Hono(); @@ -119,4 +129,61 @@ describe('sub-session routes', () => { null, ); }); + + it('PATCH /sub-sessions/:id relays subsession.restart when type changes', async () => { + const { getSubSessionById } = await import('../src/db/queries.js'); + vi.mocked(getSubSessionById).mockResolvedValue({ + id: 'sub12345', + server_id: 'srv1', + type: 'codex', + } as any); + + const res = await app.request('/api/server/srv1/sub-sessions/sub12345', { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + type: 'codex-sdk', + cwd: '/tmp/next', + }), + }); + + expect(res.status).toBe(200); + expect(updateSubSessionMock).toHaveBeenCalledWith( + {}, + 'sub12345', + 'srv1', + { + cwd: '/tmp/next', + }, + ); + expect(sendToDaemonMock).toHaveBeenCalledTimes(1); + expect(JSON.parse(String(sendToDaemonMock.mock.calls[0]?.[0]))).toEqual({ + type: 'subsession.restart', + sessionName: 'deck_sub_sub12345', + agentType: 'codex-sdk', + cwd: '/tmp/next', + }); + }); + + it('PATCH /sub-sessions/:id rejects browser-managed closedAt updates', async () => { + const { getSubSessionById } = await import('../src/db/queries.js'); + vi.mocked(getSubSessionById).mockResolvedValue({ + id: 'sub12345', + server_id: 'srv1', + type: 'codex', + } as any); + + const res = await app.request('/api/server/srv1/sub-sessions/sub12345', { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + closedAt: Date.now(), + }), + }); + + expect(res.status).toBe(400); + await expect(res.json()).resolves.toEqual({ error: 'closed_at_managed_by_daemon' }); + expect(updateSubSessionMock).not.toHaveBeenCalled(); + expect(sendToDaemonMock).not.toHaveBeenCalled(); + }); }); diff --git a/shared/agent-types.ts b/shared/agent-types.ts new file mode 100644 index 000000000..5967d8a5d --- /dev/null +++ b/shared/agent-types.ts @@ -0,0 +1,39 @@ +export const SESSION_AGENT_TYPES = [ + 'claude-code-sdk', + 'claude-code', + 'codex-sdk', + 'codex', + 'opencode', + 'gemini', + 'qwen', + 'openclaw', + 'shell', + 'script', +] as const; + +export type SessionAgentType = typeof SESSION_AGENT_TYPES[number]; + +export const CLAUDE_CODE_FAMILY = ['claude-code-sdk', 'claude-code'] as const; +export const CODEX_FAMILY = ['codex-sdk', 'codex'] as const; +export const TRANSPORT_SESSION_AGENT_TYPES = ['claude-code-sdk', 'codex-sdk', 'qwen', 'openclaw'] as const; +export const PROCESS_SESSION_AGENT_TYPES = ['claude-code', 'codex', 'opencode', 'gemini', 'shell', 'script'] as const; + +export function isSessionAgentType(value: string): value is SessionAgentType { + return (SESSION_AGENT_TYPES as readonly string[]).includes(value); +} + +export function isClaudeCodeFamily(value: string): value is typeof CLAUDE_CODE_FAMILY[number] { + return (CLAUDE_CODE_FAMILY as readonly string[]).includes(value); +} + +export function isCodexFamily(value: string): value is typeof CODEX_FAMILY[number] { + return (CODEX_FAMILY as readonly string[]).includes(value); +} + +export function isTransportSessionAgentType(value: string): value is typeof TRANSPORT_SESSION_AGENT_TYPES[number] { + return (TRANSPORT_SESSION_AGENT_TYPES as readonly string[]).includes(value); +} + +export function getSessionRuntimeType(value: string): 'transport' | 'process' { + return isTransportSessionAgentType(value) ? 'transport' : 'process'; +} diff --git a/shared/daemon-events.ts b/shared/daemon-events.ts new file mode 100644 index 000000000..cc77bdf84 --- /dev/null +++ b/shared/daemon-events.ts @@ -0,0 +1,7 @@ +export const DAEMON_MSG = { + RECONNECTED: 'daemon.reconnected', + DISCONNECTED: 'daemon.disconnected', + UPGRADE_BLOCKED: 'daemon.upgrade_blocked', +} as const; + +export type DaemonMessageType = (typeof DAEMON_MSG)[keyof typeof DAEMON_MSG]; diff --git a/shared/p2p-modes.ts b/shared/p2p-modes.ts index 267b7ed4b..ec9e24481 100644 --- a/shared/p2p-modes.ts +++ b/shared/p2p-modes.ts @@ -16,10 +16,43 @@ export interface P2pSavedConfig { rounds: number; /** User-defined extra prompt appended to every participant's system prompt. */ extraPrompt?: string; - /** Per-hop timeout in minutes. Default: 5. */ + /** Per-hop timeout in minutes. Default: 8. */ hopTimeoutMinutes?: number; } + +export interface P2pConfigSelection { + config: P2pSavedConfig; + rounds: number; + modeOverride: string; +} + +export function buildEffectiveP2pConfig(config: P2pSavedConfig, modeOverride: string): P2pSavedConfig { + if (modeOverride === P2P_CONFIG_MODE) return config; + const overriddenSessions: P2pSavedConfig['sessions'] = {}; + for (const [session, entry] of Object.entries(config.sessions)) { + overriddenSessions[session] = entry.enabled && entry.mode !== 'skip' + ? { ...entry, mode: modeOverride } + : { ...entry }; + } + return { ...config, sessions: overriddenSessions }; +} + +export function buildP2pConfigSelection( + config: P2pSavedConfig, + modeOverride: string, + rounds = config.rounds ?? 1, +): P2pConfigSelection { + const effectiveMode = isComboMode(modeOverride) + ? (parseModePipeline(modeOverride)[0] ?? modeOverride) + : modeOverride; + return { + config: buildEffectiveP2pConfig(config, effectiveMode), + rounds: getComboRoundCount(modeOverride) ?? rounds, + modeOverride, + }; +} + /** Round-aware prompt wrapper — prepended to the mode's base prompt. */ export function roundPrompt(round: number, totalRounds: number, modeKey?: string): string { if (totalRounds <= 1) return ''; @@ -92,22 +125,24 @@ export const BUILT_IN_MODES: P2pMode[] = [ }, { key: 'plan', - prompt: 'You are a technical architect. Design an implementation plan for the provided context. Break down the work into clear steps, identify dependencies and risks, define acceptance criteria, and suggest the optimal execution order. Be specific about files, interfaces, and data flow.', + prompt: 'You are a technical architect. Design an implementation plan for the provided context. Use the user request and the discussion evidence to produce a complete, detailed execution plan. Break down the work into clear steps, identify dependencies and risks, define concrete acceptance and validation criteria, and suggest the optimal execution order. Be specific about files, interfaces, data flow, and how the work will be verified.', callbackRequired: true, defaultTimeoutMs: 300_000, resultStyle: 'findings-first', maxOutputChars: 12_000, summaryPrompt: - 'Write a complete **Implementation Plan** that synthesizes all discussion into an actionable blueprint. Structure it as:\n' + - '1. **Goal** — one-paragraph statement of what this plan achieves and why\n' + - '2. **Architecture Overview** — key components, data flow, and interfaces involved\n' + - '3. **Implementation Phases** — ordered list of phases, each with:\n' + - ' - Specific tasks with file paths and interface changes\n' + - ' - Dependencies (what must be done before this phase)\n' + - ' - Acceptance criteria (how to verify this phase is complete)\n' + - '4. **Risk Assessment** — identified risks with mitigation strategies\n' + - '5. **Open Questions** — unresolved decisions that need stakeholder input\n' + - 'Be precise: name files, functions, types, and data structures. This plan should be directly executable by a developer.', + 'Write a complete **Implementation Plan** that synthesizes the user request and all discussion evidence into an actionable blueprint. Structure it as:\n' + + '1. **Goal and Scope** — what must be delivered, what is in scope, and what is explicitly out of scope\n' + + '2. **Current Context** — the relevant existing behavior, constraints, and discussion conclusions that drive the plan\n' + + '3. **Architecture Overview** — key components, data flow, interfaces, and state transitions involved\n' + + '4. **Implementation Phases** — ordered list of phases, each with:\n' + + ' - Specific tasks with file paths, function/type/interface changes, and sequencing\n' + + ' - Dependencies and prerequisites\n' + + ' - Edge cases, failure handling, and rollout notes when relevant\n' + + '5. **Acceptance and Validation** — explicit acceptance criteria plus concrete verification steps and tests for each major behavior\n' + + '6. **Risk Assessment** — identified risks with mitigation strategies\n' + + '7. **Open Questions** — unresolved decisions that need stakeholder input\n' + + 'Be precise: name files, functions, types, data structures, and test coverage. The final plan must be detailed enough for direct implementation and QA handoff.', }, { key: 'brainstorm', @@ -165,11 +200,9 @@ export interface P2pComboPreset { export const COMBO_PRESETS: P2pComboPreset[] = [ { key: 'brainstorm>discuss>plan', pipeline: ['brainstorm', 'discuss', 'plan'] }, - { key: 'brainstorm>discuss>discuss>plan', pipeline: ['brainstorm', 'discuss', 'discuss', 'plan'] }, { key: 'audit>plan', pipeline: ['audit', 'plan'] }, { key: 'review>plan', pipeline: ['review', 'plan'] }, { key: 'audit>review>plan', pipeline: ['audit', 'review', 'plan'] }, - { key: 'brainstorm>plan', pipeline: ['brainstorm', 'plan'] }, ]; /** Parse a mode string into a per-round pipeline. Single mode → single-element array. */ diff --git a/shared/session-display.ts b/shared/session-display.ts new file mode 100644 index 000000000..5b57d1180 --- /dev/null +++ b/shared/session-display.ts @@ -0,0 +1,28 @@ +function normalizeDisplayValue(value: string | null | undefined): string | undefined { + const trimmed = value?.trim(); + return trimmed ? trimmed : undefined; +} + +export function isInternalSessionDisplayValue(value: string | null | undefined, sessionName?: string | null): boolean { + const normalized = normalizeDisplayValue(value); + if (!normalized) return true; + const normalizedSession = normalizeDisplayValue(sessionName); + if (normalizedSession && normalized === normalizedSession) return true; + if (/^deck_sub_[a-z0-9-]+$/i.test(normalized)) return true; + if (/^deck_.+_(brain|w\d+)$/i.test(normalized)) return true; + if (/^bootmain[a-z0-9-]+$/i.test(normalized)) return true; + return false; +} + +export function pickReadableSessionDisplay( + candidates: Array, + sessionName?: string | null, +): string | undefined { + for (const candidate of candidates) { + const normalized = normalizeDisplayValue(candidate); + if (!normalized) continue; + if (isInternalSessionDisplayValue(normalized, sessionName)) continue; + return normalized; + } + return undefined; +} diff --git a/src/agent/drivers/claude-code.ts b/src/agent/drivers/claude-code.ts index 58946fb09..baeb03458 100644 --- a/src/agent/drivers/claude-code.ts +++ b/src/agent/drivers/claude-code.ts @@ -17,6 +17,9 @@ const STARTUP_PROMPTS: Array<{ keys: string[]; // tmux key names to send in sequence label: string; }> = [ + // "Resume from summary (recommended)" chooser shown on restart/resume. + // The default selection is already "Resume from summary", so Enter confirms it. + { pattern: /resume from summary|resume full session as-is|don't ask me again/i, keys: ['Enter'], label: 'resume-summary' }, // "Security guide" / "Do you trust the files in this folder?" — select "Yes, I trust" // Newer CC versions use a numbered menu (1=Yes, 2=No); older use Enter for default. // Sending "1" then Enter covers both: numbered picks option 1, legacy confirms default. diff --git a/src/agent/providers/claude-code-sdk.ts b/src/agent/providers/claude-code-sdk.ts index 2ebc30cb1..8fb1a2f73 100644 --- a/src/agent/providers/claude-code-sdk.ts +++ b/src/agent/providers/claude-code-sdk.ts @@ -42,6 +42,7 @@ interface ClaudeSdkSessionState { cancelled: boolean; finalMetadata?: Record; pendingComplete?: AgentMessage; + pendingError?: ProviderError; toolCalls: Map; emittedToolStates: Map; } @@ -209,12 +210,35 @@ export class ClaudeCodeSdkProvider implements TransportProvider { throw this.makeError(PROVIDER_ERROR_CODES.PROVIDER_ERROR, 'Claude SDK session is already busy', true); } + await this.startQuery(sessionId, state, message, extraSystemPrompt, true); + } + + async cancel(sessionId: string): Promise { + const state = this.sessions.get(sessionId); + if (!state?.currentQuery) return; + state.cancelled = true; + try { + await state.currentQuery.interrupt(); + } catch {} + try { + state.currentQuery.close(); + } catch {} + } + + private async startQuery( + sessionId: string, + state: ClaudeSdkSessionState, + message: string, + extraSystemPrompt: string | undefined, + allowResumeFallback: boolean, + ): Promise { state.currentText = ''; state.currentMessageId = null; state.completed = false; state.cancelled = false; state.finalMetadata = undefined; state.pendingComplete = undefined; + state.pendingError = undefined; state.toolCalls.clear(); state.emittedToolStates.clear(); @@ -257,27 +281,25 @@ export class ClaudeCodeSdkProvider implements TransportProvider { const q = query({ prompt: message, options: options as any }); state.currentQuery = q; - void this.consumeQuery(sessionId, state, q); + void this.consumeQuery(sessionId, state, q, message, extraSystemPrompt, allowResumeFallback); } - async cancel(sessionId: string): Promise { - const state = this.sessions.get(sessionId); - if (!state?.currentQuery) return; - state.cancelled = true; - try { - await state.currentQuery.interrupt(); - } catch {} - try { - state.currentQuery.close(); - } catch {} - } - - private async consumeQuery(sessionId: string, state: ClaudeSdkSessionState, q: ReturnType): Promise { + private async consumeQuery( + sessionId: string, + state: ClaudeSdkSessionState, + q: ReturnType, + message: string, + extraSystemPrompt: string | undefined, + allowResumeFallback: boolean, + ): Promise { let pendingError: ProviderError | null = null; try { for await (const msg of q) { this.handleMessage(sessionId, state, msg); } + if (!pendingError && state.pendingError) { + pendingError = state.pendingError; + } if (!state.completed && state.cancelled) { pendingError = this.makeError(PROVIDER_ERROR_CODES.CANCELLED, 'Claude turn cancelled', true); } @@ -289,8 +311,15 @@ export class ClaudeCodeSdkProvider implements TransportProvider { state.currentQuery = null; const pendingComplete = state.pendingComplete; state.pendingComplete = undefined; + state.pendingError = undefined; state.currentMessageId = null; state.currentText = ''; + if (!pendingComplete && pendingError && allowResumeFallback && state.started && this.isMissingResumeError(pendingError.message)) { + state.started = false; + logger.info({ provider: this.id, sessionId, resumeId: state.resumeId }, 'Claude SDK resume failed; retrying with sessionId'); + await this.startQuery(sessionId, state, message, extraSystemPrompt, false); + return; + } if (pendingComplete) { for (const cb of this.completeCallbacks) cb(sessionId, pendingComplete); } else if (pendingError) { @@ -437,7 +466,7 @@ export class ClaudeCodeSdkProvider implements TransportProvider { if (msg.type === 'result') { if (msg.is_error) { const details = Array.isArray((msg as any).errors) ? (msg as any).errors.join('; ') : 'Claude execution failed'; - this.emitError(sessionId, this.makeError(PROVIDER_ERROR_CODES.PROVIDER_ERROR, details, false, msg)); + state.pendingError = this.makeError(PROVIDER_ERROR_CODES.PROVIDER_ERROR, details, false, msg); return; } state.started = true; @@ -549,6 +578,10 @@ export class ClaudeCodeSdkProvider implements TransportProvider { return this.makeError(PROVIDER_ERROR_CODES.PROVIDER_ERROR, message, false, err); } + private isMissingResumeError(message: string): boolean { + return /no conversation found|session .* not found|unknown session|invalid session/i.test(message); + } + private makeError(code: string, message: string, recoverable: boolean, details?: unknown): ProviderError { return { code, message, recoverable, ...(details !== undefined ? { details } : {}) }; } diff --git a/src/agent/session-close.ts b/src/agent/session-close.ts new file mode 100644 index 000000000..02737f56b --- /dev/null +++ b/src/agent/session-close.ts @@ -0,0 +1,160 @@ +import { execFile } from 'node:child_process'; +import { promisify } from 'node:util'; + +import { getPanePids } from './tmux.js'; +import type { SessionRecord } from '../store/session-store.js'; +import logger from '../util/logger.js'; + +const execFileAsync = promisify(execFile); +const SAFE_IMCODES_SESSION_RE = /^deck_[a-zA-Z0-9_-]+$/; + +export type CloseStage = + | 'collect' + | 'watchers' + | 'runtime' + | 'processes' + | 'tmux' + | 'verify' + | 'persist' + | 'events'; + +export interface CloseFailure { + sessionName: string; + stage: CloseStage; + message: string; +} + +export interface CloseTreeResult { + ok: boolean; + closed: string[]; + failed: CloseFailure[]; +} + +interface CloseSingleHooks { + emitStopping(record: SessionRecord): Promise | void; + stopWatchers(record: SessionRecord): Promise | void; + stopTransportRuntime(record: SessionRecord): Promise | void; + killProcessRuntime(record: SessionRecord): Promise | void; + verifyClosed(record: SessionRecord): Promise | void; + emitSuccess(record: SessionRecord): Promise | void; + persistSuccess(record: SessionRecord): Promise | void; + emitFailure(record: SessionRecord, failure: CloseFailure): Promise | void; + persistFailure(record: SessionRecord, failure: CloseFailure): Promise | void; +} + +async function recordStageFailure( + failures: CloseFailure[], + record: SessionRecord, + stage: CloseStage, + action: () => Promise | void, +): Promise { + try { + await action(); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + failures.push({ sessionName: record.name, stage, message }); + } +} + +function shouldKillChildProcesses(record: SessionRecord): boolean { + return record.runtimeType !== 'transport'; +} + +export async function killSessionProcesses(sessionName: string): Promise { + if (!SAFE_IMCODES_SESSION_RE.test(sessionName)) { + logger.warn({ sessionName }, 'Rejected invalid session name in killSessionProcesses'); + return; + } + try { + const pids = await getPanePids(sessionName); + for (const pid of pids) { + if (!/^\d+$/.test(pid)) continue; + if (process.platform === 'win32') { + await execFileAsync('taskkill', ['/F', '/T', '/PID', pid], { windowsHide: true }).catch(() => {}); + continue; + } + await execFileAsync('pkill', ['-9', '-P', pid]).catch(() => {}); + await execFileAsync('kill', ['-9', pid]).catch(() => {}); + } + } catch { + // Session may not exist or backend may not expose pane PIDs. + } +} + +export async function closeSingleSession(record: SessionRecord, hooks: CloseSingleHooks): Promise { + const failures: CloseFailure[] = []; + + await recordStageFailure(failures, record, 'events', () => hooks.emitStopping(record)); + await recordStageFailure(failures, record, 'watchers', () => hooks.stopWatchers(record)); + + if (record.runtimeType === 'transport') { + await recordStageFailure(failures, record, 'runtime', () => hooks.stopTransportRuntime(record)); + } else { + if (shouldKillChildProcesses(record)) { + await recordStageFailure(failures, record, 'processes', () => killSessionProcesses(record.name)); + } + await recordStageFailure(failures, record, 'tmux', () => hooks.killProcessRuntime(record)); + } + + await recordStageFailure(failures, record, 'verify', () => hooks.verifyClosed(record)); + + if (failures.length === 0) { + await recordStageFailure(failures, record, 'persist', () => hooks.persistSuccess(record)); + if (failures.length === 0) { + await recordStageFailure(failures, record, 'events', () => hooks.emitSuccess(record)); + } + } + + if (failures.length > 0) { + const primaryFailure = failures[0]; + await recordStageFailure(failures, record, 'events', () => hooks.emitFailure(record, primaryFailure)); + await recordStageFailure(failures, record, 'persist', () => hooks.persistFailure(record, primaryFailure)); + } + + return { + ok: failures.length === 0, + closed: failures.length === 0 ? [record.name] : [], + failed: failures, + }; +} + +export function collectProjectCloseTargets(projectName: string, sessions: SessionRecord[]): SessionRecord[] { + const toStop = new Map(); + + for (const session of sessions) { + if (session.projectName === projectName) toStop.set(session.name, session); + } + + let changed = true; + while (changed) { + changed = false; + for (const session of sessions) { + if (!session.name.startsWith('deck_sub_')) continue; + if (!session.parentSession) continue; + if (toStop.has(session.name)) continue; + if (!toStop.has(session.parentSession)) continue; + toStop.set(session.name, session); + changed = true; + } + } + + const depthCache = new Map(); + const depthOf = (session: SessionRecord): number => { + const cached = depthCache.get(session.name); + if (cached !== undefined) return cached; + if (!session.parentSession || !toStop.has(session.parentSession)) { + depthCache.set(session.name, 0); + return 0; + } + const parent = toStop.get(session.parentSession); + if (!parent) { + depthCache.set(session.name, 0); + return 0; + } + const depth = depthOf(parent) + 1; + depthCache.set(session.name, depth); + return depth; + }; + + return [...toStop.values()].sort((a, b) => depthOf(b) - depthOf(a)); +} diff --git a/src/agent/session-manager.ts b/src/agent/session-manager.ts index dffe61a5a..b43354435 100644 --- a/src/agent/session-manager.ts +++ b/src/agent/session-manager.ts @@ -23,6 +23,7 @@ import { } from '../store/session-store.js'; import logger from '../util/logger.js'; import { timelineEmitter } from '../daemon/timeline-emitter.js'; +import { emitSessionInlineError } from '../daemon/session-error.js'; import { startWatching, startWatchingFile, stopWatching, isWatching, findJsonlPathBySessionId } from '../daemon/jsonl-watcher.js'; import { startWatching as startCodexWatching, startWatchingSpecificFile as startCodexWatchingFile, startWatchingById as startCodexWatchingById, stopWatching as stopCodexWatching, isWatching as isCodexWatching, findRolloutPathByUuid } from '../daemon/codex-watcher.js'; import { startWatching as startGeminiWatching, startWatchingLatest as startGeminiWatchingLatest, stopWatching as stopGeminiWatching, isWatching as isGeminiWatching } from '../daemon/gemini-watcher.js'; @@ -34,9 +35,11 @@ import { getQwenOAuthQuotaUsageLabel } from './provider-quota.js'; import { getClaudeSdkRuntimeConfig } from './sdk-runtime-config.js'; import { getCodexRuntimeConfig } from './codex-runtime-config.js'; import type { TransportEffortLevel } from '../../shared/effort-levels.js'; +import { isClaudeCodeFamily, isCodexFamily } from '../../shared/agent-types.js'; import { getAgentVersion } from './agent-version.js'; import { repoCache } from '../repo/cache.js'; +import { closeSingleSession, collectProjectCloseTargets, type CloseFailure, type CloseTreeResult } from './session-close.js'; /** Start JSONL watcher for a CC session — uses specific file if ccSessionId known, else directory scan. */ function startCCWatcher(sessionName: string, projectDir: string, ccSessionId?: string): void { @@ -112,6 +115,11 @@ export function setSessionEventCallback(cb: SessionEventCallback): void { function emitSessionEvent(event: 'started' | 'stopped' | 'error', session: string, state: string): void { try { _onSessionEvent?.(event, session, state); } catch { /* ignore */ } + if (event === 'error') { + emitSessionInlineError(session, state); + timelineEmitter.emit(session, 'session.state', { state: event, error: state }); + return; + } timelineEmitter.emit(session, 'session.state', { state: event }); } @@ -177,50 +185,79 @@ export async function startProject(config: ProjectConfig): Promise { } } -/** Stop all sessions for a project and remove them from the store. */ -export async function stopProject(projectName: string): Promise { - const allSessions = storeSessions(); - const toStop = new Map(); +function buildCloseFailureMessage(record: SessionRecord, failure: CloseFailure): string { + const prefix = record.name.startsWith('deck_sub_') ? 'Sub-session' : 'Session'; + return `${prefix} close failed during ${failure.stage}: ${failure.message}`; +} - for (const session of allSessions) { - if (session.projectName === projectName) toStop.set(session.name, session); - } +/** Stop all sessions for a project and remove them from the store on confirmed success. */ +export async function stopProject( + projectName: string, + serverLink?: { send(msg: object): void } | null, +): Promise { + const targets = collectProjectCloseTargets(projectName, storeSessions()); + const invalidatedDirs = new Set(); + const result: CloseTreeResult = { ok: true, closed: [], failed: [] }; + + for (const record of targets) { + const closeResult = await closeSingleSession(record, { + emitStopping: () => { + timelineEmitter.emit(record.name, 'session.state', { state: 'stopping' }); + }, + stopWatchers: () => { + stopStructuredWatchers(record.name); + }, + stopTransportRuntime: async () => { + await stopTransportRuntimeSession(record.name); + }, + killProcessRuntime: async () => { + await killSession(record.name); + }, + verifyClosed: async () => { + if (record.runtimeType === RUNTIME_TYPES.TRANSPORT) { + if (transportRuntimes.has(record.name)) throw new Error('transport runtime still registered'); + return; + } + if (await sessionExists(record.name)) throw new Error('session still exists after kill'); + }, + emitSuccess: async () => { + if (record.name.startsWith('deck_sub_')) { + timelineEmitter.emit(record.name, 'session.state', { state: 'stopped' }); + return; + } + emitSessionEvent('stopped', record.name, 'stopped'); + }, + persistSuccess: async () => { + if (record.name.startsWith('deck_sub_')) { + const id = record.name.replace(/^deck_sub_/, ''); + if (serverLink && id !== record.name) { + serverLink.send({ type: 'subsession.closed', id, sessionName: record.name }); + } + } + removeSession(record.name); + emitSessionPersist(null, record.name); + if (record.projectDir && !invalidatedDirs.has(record.projectDir)) { + invalidatedDirs.add(record.projectDir); + repoCache.invalidate(record.projectDir); + } + }, + emitFailure: async (_record, failure) => { + emitSessionEvent('error', record.name, buildCloseFailureMessage(record, failure)); + }, + persistFailure: async (_record, failure) => { + const next: SessionRecord = { ...record, state: 'error', updatedAt: Date.now() }; + upsertSession(next); + emitSessionPersist(next, record.name); + logger.warn({ session: record.name, stage: failure.stage, message: failure.message }, 'Project shutdown failed'); + }, + }); - let changed = true; - while (changed) { - changed = false; - for (const session of allSessions) { - if (!session.name.startsWith('deck_sub_')) continue; - if (!session.parentSession) continue; - if (toStop.has(session.name)) continue; - if (!toStop.has(session.parentSession)) continue; - toStop.set(session.name, session); - changed = true; - } + result.closed.push(...closeResult.closed); + result.failed.push(...closeResult.failed); } - const invalidatedDirs = new Set(); - for (const s of toStop.values()) { - stopWatching(s.name); - stopCodexWatching(s.name); - stopGeminiWatching(s.name); - stopOpenCodeWatching(s.name); - const transportRuntime = transportRuntimes.get(s.name); - if (transportRuntime) { - if (transportRuntime.providerSessionId) unregisterProviderRoute(transportRuntime.providerSessionId); - await transportRuntime.kill().catch(() => {}); - transportRuntimes.delete(s.name); - } else { - await killSession(s.name).catch(() => {}); - } - removeSession(s.name); - emitSessionPersist(null, s.name); - emitSessionEvent('stopped', s.name, 'stopped'); - if (s.projectDir && !invalidatedDirs.has(s.projectDir)) { - invalidatedDirs.add(s.projectDir); - repoCache.invalidate(s.projectDir); - } - } + result.ok = result.failed.length === 0; + return result; } /** Kill tmux sessions and watchers for a project but keep store records (for restart). */ @@ -413,6 +450,7 @@ export async function restoreFromStore(): Promise { try { await restartSession(hydrated); } catch (err) { logger.error({ err, session: hydrated.name }, 'Failed to restart session on restore — skipping (tmux may be unavailable)'); updateSessionState(hydrated.name, 'error'); + emitSessionEvent('error', hydrated.name, err instanceof Error ? err.message : String(err)); } } else if (isLiveSession && !paneAlive) { // Session exists (remain-on-exit) but process is dead — respawn instead of creating a new session @@ -420,6 +458,7 @@ export async function restoreFromStore(): Promise { try { await respawnSession(hydrated); } catch (err) { logger.error({ err, session: hydrated.name }, 'Failed to respawn session on restore — skipping'); updateSessionState(hydrated.name, 'error'); + emitSessionEvent('error', hydrated.name, err instanceof Error ? err.message : String(err)); } } else if (hydrated.agentType === 'claude-code' && hydrated.projectDir && !isWatching(hydrated.name)) { if (hydrated.ccSessionId) { @@ -513,7 +552,7 @@ export async function restoreFromStore(): Promise { agentType, agentVersion: await getAgentVersion(agentType), projectDir, - state: 'running', + state: 'idle', restarts: 0, restartTimestamps: [], createdAt: Date.now(), @@ -525,7 +564,7 @@ export async function restoreFromStore(): Promise { upsertSession(record); emitSessionPersist(record, name); - emitSessionEvent('started', name, 'running'); + emitSessionEvent('started', name, 'idle'); if (agentType === 'claude-code' && projectDir) { startStructuredWatcher(name, agentType, projectDir, { ccSessionId }); } else if (agentType === 'opencode' && projectDir) { @@ -553,9 +592,10 @@ export async function restartSession(record: SessionRecord): Promise { const recentRestarts = record.restartTimestamps.filter((t) => t > windowStart); if (recentRestarts.length >= MAX_RESTARTS) { + const message = `Restart loop detected: more than ${MAX_RESTARTS} restarts within 5 minutes`; logger.error({ session: record.name }, 'Restart loop detected — marking as error'); updateSessionState(record.name, 'error'); - emitSessionEvent('error', record.name, 'error'); + emitSessionEvent('error', record.name, message); return false; } @@ -571,7 +611,7 @@ export async function restartSession(record: SessionRecord): Promise { ...effectiveRecord, restarts: record.restarts + 1, restartTimestamps: [...recentRestarts, now], - state: 'running', + state: 'idle', updatedAt: now, }; upsertSession(updated); @@ -608,9 +648,10 @@ export async function respawnSession(record: SessionRecord): Promise { const recentRestarts = record.restartTimestamps.filter((t) => t > windowStart); if (recentRestarts.length >= MAX_RESTARTS) { + const message = `Restart loop detected: more than ${MAX_RESTARTS} restarts within 5 minutes`; logger.error({ session: record.name }, 'Restart loop detected — marking as error'); updateSessionState(record.name, 'error'); - emitSessionEvent('error', record.name, 'error'); + emitSessionEvent('error', record.name, message); return false; } @@ -662,7 +703,7 @@ export async function respawnSession(record: SessionRecord): Promise { ...effectiveRecord, restarts: record.restarts + 1, restartTimestamps: [...recentRestarts, now], - state: 'running', + state: 'idle', updatedAt: now, }; upsertSession(updated); @@ -742,6 +783,96 @@ export interface LaunchOpts { userCreated?: boolean; } +export interface SessionRelaunchOverrides { + agentType?: AgentType; + projectDir?: string; + label?: string | null; + description?: string | null; + requestedModel?: string | null; + effort?: TransportEffortLevel | null; + transportConfig?: Record | null; + ccPreset?: string | null; +} + +export function getCompatibleSessionIds( + record: Pick, + agentType: AgentType, +): Pick { + return { + ...(isClaudeCodeFamily(agentType) && record.ccSessionId ? { ccSessionId: record.ccSessionId } : {}), + ...(isCodexFamily(agentType) && record.codexSessionId ? { codexSessionId: record.codexSessionId } : {}), + ...(agentType === 'gemini' && record.geminiSessionId ? { geminiSessionId: record.geminiSessionId } : {}), + ...(agentType === 'opencode' && record.opencodeSessionId ? { opencodeSessionId: record.opencodeSessionId } : {}), + }; +} + +function stopStructuredWatchers(sessionName: string): void { + stopWatching(sessionName); + stopCodexWatching(sessionName); + stopGeminiWatching(sessionName); + stopOpenCodeWatching(sessionName); +} + +export async function stopTransportRuntimeSession(sessionName: string): Promise { + const transportRuntime = transportRuntimes.get(sessionName); + if (!transportRuntime) return; + if (transportRuntime.providerSessionId) unregisterProviderRoute(transportRuntime.providerSessionId); + await transportRuntime.kill(); + transportRuntimes.delete(sessionName); +} + +async function teardownSessionRuntime(record: SessionRecord): Promise { + stopStructuredWatchers(record.name); + const transportRuntime = transportRuntimes.get(record.name); + if (transportRuntime) { + await stopTransportRuntimeSession(record.name).catch(() => {}); + return; + } + await killSession(record.name).catch(() => {}); +} + +export async function relaunchSessionWithSettings( + record: SessionRecord, + overrides: SessionRelaunchOverrides = {}, +): Promise { + const targetAgentType = (overrides.agentType ?? record.agentType) as AgentType; + const targetProjectDir = overrides.projectDir ?? record.projectDir; + const targetLabel = overrides.label !== undefined ? overrides.label : (record.label ?? null); + const targetDescription = overrides.description !== undefined ? overrides.description : (record.description ?? null); + const targetRequestedModel = overrides.requestedModel !== undefined ? overrides.requestedModel : (record.requestedModel ?? null); + const targetEffort = overrides.effort !== undefined ? overrides.effort : (record.effort ?? null); + const targetTransportConfig = overrides.transportConfig !== undefined ? overrides.transportConfig : (record.transportConfig ?? null); + const targetCcPreset = overrides.ccPreset !== undefined ? overrides.ccPreset : (record.ccPreset ?? null); + const compatibleIds = getCompatibleSessionIds(record, targetAgentType); + const preserveTransportBinding = record.runtimeType === RUNTIME_TYPES.TRANSPORT + && record.agentType === targetAgentType + && typeof record.providerSessionId === 'string' + && record.providerSessionId.length > 0; + + await teardownSessionRuntime(record); + + await launchSession({ + name: record.name, + projectName: record.projectName, + role: record.role, + agentType: targetAgentType, + projectDir: targetProjectDir, + label: targetLabel ?? undefined, + description: targetDescription ?? undefined, + requestedModel: targetRequestedModel ?? undefined, + effort: targetEffort ?? undefined, + transportConfig: targetTransportConfig ?? undefined, + ccPreset: targetAgentType === 'claude-code' ? (targetCcPreset ?? undefined) : undefined, + ...(preserveTransportBinding ? { + bindExistingKey: record.providerSessionId, + skipCreate: true, + } : {}), + ...compatibleIds, + ...(record.parentSession ? { parentSession: record.parentSession } : {}), + ...(record.userCreated ? { userCreated: true } : {}), + }); +} + /** In-memory map of active transport session runtimes */ const transportRuntimes = new Map(); @@ -753,11 +884,19 @@ function wireTransportCallbacks(runtime: TransportSessionRuntime, sessionName: s timelineEmitter.emit(sessionName, 'assistant.thinking', { text: '' }, { source: 'daemon', confidence: 'high' }); } const mapped = (status === 'streaming' || status === 'thinking') ? 'running' : status; - timelineEmitter.emit(sessionName, 'session.state', { state: mapped }, { source: 'daemon', confidence: 'high' }); + timelineEmitter.emit(sessionName, 'session.state', { + state: mapped, + pendingCount: runtime.pendingCount, + pendingMessages: runtime.pendingMessages, + }, { source: 'daemon', confidence: 'high' }); }; runtime.onDrain = (merged, count) => { - timelineEmitter.emit(sessionName, 'user.message', { text: merged, batchedCount: count }); - timelineEmitter.emit(sessionName, 'session.state', { state: 'running' }, { source: 'daemon', confidence: 'high' }); + timelineEmitter.emit(sessionName, 'user.message', { text: merged, batchedCount: count, allowDuplicate: true }); + timelineEmitter.emit(sessionName, 'session.state', { + state: 'running', + pendingCount: runtime.pendingCount, + pendingMessages: runtime.pendingMessages, + }, { source: 'daemon', confidence: 'high' }); }; } @@ -918,7 +1057,7 @@ export async function restoreTransportSessions(providerId: string): Promise { const { name, projectName, role, agentType, projectDir, skipStore, label, description, bindExistingKey, skipCreate, parentSession } = opts; const existing = getSession(name); + const inheritedClaudeResumeId = opts.ccSessionId ?? (!opts.fresh ? existing?.ccSessionId : undefined); + const shouldResumeClaudeCliConversation = agentType === 'claude-code-sdk' + && existing?.agentType === 'claude-code' + && existing?.runtimeType !== RUNTIME_TYPES.TRANSPORT + && typeof inheritedClaudeResumeId === 'string' + && inheritedClaudeResumeId.length > 0; if (opts.fresh) { const existingRuntime = transportRuntimes.get(name); @@ -998,6 +1143,12 @@ export async function launchTransportSession(opts: LaunchOpts): Promise { } } else if (agentType === 'claude-code-sdk') { transportResumeId = opts.ccSessionId ?? (!opts.fresh ? getSession(name)?.ccSessionId : undefined) ?? randomUUID(); + // Switching from Claude CLI -> SDK must resume the inherited conversation. + // Re-creating with the same sessionId makes Claude reject the turn with + // "Session ID ... is already in use", which is what users were seeing. + if (shouldResumeClaudeCliConversation) { + effectiveSkipCreate = true; + } if (opts.ccPreset) { const { resolvePresetEnv } = await import('../daemon/cc-presets.js'); transportEnv = { ...(transportEnv ?? {}), ...(await resolvePresetEnv(opts.ccPreset, transportResumeId)) }; @@ -1036,7 +1187,7 @@ export async function launchTransportSession(opts: LaunchOpts): Promise { role, agentType, projectDir, - state: 'running', + state: 'idle', restarts: 0, restartTimestamps: [], createdAt: Date.now(), @@ -1071,7 +1222,7 @@ export async function launchTransportSession(opts: LaunchOpts): Promise { emitSessionPersist(record, name); } - emitSessionEvent('started', name, 'running'); + emitSessionEvent('started', name, 'idle'); logger.info({ session: name, agentType, providerId: provider.id }, 'Launched transport session'); } catch (err) { // Rollback runtime + route on persistence failure @@ -1187,7 +1338,7 @@ export async function launchSession(opts: LaunchOpts): Promise { agentType, agentVersion, projectDir, - state: 'running', + state: 'idle', restarts: existing?.restarts ?? 0, restartTimestamps: existing?.restartTimestamps ?? [], createdAt: existing?.createdAt ?? Date.now(), @@ -1199,6 +1350,9 @@ export async function launchSession(opts: LaunchOpts): Promise { ...(opencodeSessionId ? { opencodeSessionId } : {}), ...(opts.ccPreset ? { ccPreset: opts.ccPreset } : {}), ...(label ? { label } : {}), + ...(opts.description ? { description: opts.description } : {}), + ...(opts.parentSession ? { parentSession: opts.parentSession } : {}), + ...(opts.userCreated ? { userCreated: true } : {}), ...(familyDisplay ?? {}), }; upsertSession(record); @@ -1214,6 +1368,9 @@ export async function launchSession(opts: LaunchOpts): Promise { ...(geminiSessionId ? { geminiSessionId } : {}), ...(opencodeSessionId ? { opencodeSessionId } : {}), ...(opts.qwenModel ? { qwenModel: opts.qwenModel } : {}), + ...(opts.description ? { description: opts.description } : {}), + ...(opts.parentSession ? { parentSession: opts.parentSession } : {}), + ...(opts.userCreated ? { userCreated: true } : {}), updatedAt: Date.now(), }; upsertSession(merged); @@ -1221,7 +1378,7 @@ export async function launchSession(opts: LaunchOpts): Promise { } } - emitSessionEvent('started', name, 'running'); + emitSessionEvent('started', name, 'idle'); // Start structured-event watchers for supported agent types startStructuredWatcher(name, agentType, projectDir, { ccSessionId, codexSessionId, geminiSessionId, opencodeSessionId }); diff --git a/src/agent/tmux.ts b/src/agent/tmux.ts index 8ea1f86e3..8358dc216 100644 --- a/src/agent/tmux.ts +++ b/src/agent/tmux.ts @@ -762,7 +762,18 @@ export async function startPipePaneStream(session: string, paneId: string): Prom // well-tested libuv pipe path. fd = fs.openSync(fifoPath, fs.constants.O_RDWR | fs.constants.O_NONBLOCK); const cat = spawn('cat', [fifoPath], { stdio: ['ignore', 'pipe', 'ignore'] }); - stream = cat.stdout!; + const catReady = new Promise((resolve, reject) => { + cat.once('spawn', () => resolve()); + cat.once('error', (err) => reject(err)); + }); + cat.on('error', (err) => { + if (stream && !stream.destroyed) stream.destroy(err); + }); + await catReady; + if (!cat.stdout) { + throw new Error('pipe-pane cat reader missing stdout pipe'); + } + stream = cat.stdout; needsManualClose = true; catProc = cat; @@ -804,7 +815,12 @@ export async function startPipePaneStream(session: string, paneId: string): Prom } catch (err) { // Rollback: destroy stream + close fd if needed, clean up files if (stream) { destroyPipeStream(stream, fd, needsManualClose, catProc); } - else if (fd >= 0) { try { fs.closeSync(fd); } catch { /* ignore */ } } + else { + if (catProc) { + try { catProc.kill('SIGTERM'); } catch { /* ignore */ } + } + if (fd >= 0) { try { fs.closeSync(fd); } catch { /* ignore */ } } + } await execFile('tmux', ['pipe-pane', '-t', paneId]).catch(() => {}); await fsp.unlink(fifoPath).catch(() => {}); await fsp.rmdir(dir).catch(() => {}); diff --git a/src/agent/transport-session-runtime.ts b/src/agent/transport-session-runtime.ts index 0130fddb5..404a6e8f9 100644 --- a/src/agent/transport-session-runtime.ts +++ b/src/agent/transport-session-runtime.ts @@ -120,6 +120,8 @@ export class TransportSessionRuntime implements SessionRuntime { get sending(): boolean { return this._sending; } /** Number of messages waiting in the queue. */ get pendingCount(): number { return this._pendingMessages.length; } + /** Snapshot of queued messages waiting to be drained. */ + get pendingMessages(): string[] { return [...this._pendingMessages]; } async initialize(config: SessionConfig): Promise { this._providerSessionId = await this.provider.createSession(config); @@ -155,8 +157,6 @@ export class TransportSessionRuntime implements SessionRuntime { if (!this._providerSessionId) { throw new Error('TransportSessionRuntime not initialized — call initialize() first'); } - // Clear pending — user cancelled, they don't want queued messages either - this._pendingMessages = []; if (!this.provider.cancel) return; await this.provider.cancel(this._providerSessionId); } diff --git a/src/daemon/cc-presets.ts b/src/daemon/cc-presets.ts index 484620886..3ccf67cb4 100644 --- a/src/daemon/cc-presets.ts +++ b/src/daemon/cc-presets.ts @@ -53,9 +53,14 @@ export async function savePresets(presets: CcPreset[]): Promise { await fs.writeFile(PRESETS_PATH, JSON.stringify(presets, null, 2), 'utf8'); } +function normalizePresetName(name: string): string { + return name.trim().toLowerCase(); +} + export async function getPreset(name: string): Promise { const presets = await loadPresets(); - return presets.find((p) => p.name === name); + const normalized = normalizePresetName(name); + return presets.find((p) => normalizePresetName(p.name) === normalized); } /** diff --git a/src/daemon/codex-watcher.ts b/src/daemon/codex-watcher.ts index 58b05c1d0..a6b29bb4d 100644 --- a/src/daemon/codex-watcher.ts +++ b/src/daemon/codex-watcher.ts @@ -476,20 +476,24 @@ export async function startWatching(sessionName: string, workDir: string, model? watchers.set(sessionName, state); const control = watcherControl(sessionName); registerWatcherControl(sessionName, control); - - for (const dir of recentSessionDirs()) { - const found = await findLatestRollout(dir, workDir); - if (found) { - const s = await stat(found); + startPoll(sessionName, state); + void watchDir(sessionName, state, state.workDir || codexSessionDir(new Date())); + void (async () => { + for (const dir of recentSessionDirs()) { + if (state.stopped) return; + const found = await findLatestRollout(dir, workDir); + if (!found || state.stopped) continue; + const s = await stat(found).catch(() => null); + if (!s || state.stopped) continue; state.activeFile = found; state.fileOffset = s.size; claimedFiles.set(found, sessionName); await emitRecentHistory(sessionName, found, model); - break; + return; } - } - startPoll(sessionName, state); - void watchDir(sessionName, state, state.workDir || codexSessionDir(new Date())); + })().catch((err) => { + logger.debug({ err, sessionName, workDir }, 'codex-watcher: initial rollout scan failed'); + }); return control; } diff --git a/src/daemon/command-handler.ts b/src/daemon/command-handler.ts index 64ff6c77a..f5aa13919 100644 --- a/src/daemon/command-handler.ts +++ b/src/daemon/command-handler.ts @@ -2,7 +2,7 @@ * Handle commands from the web UI and inbound chat messages via ServerLink. * Commands arrive as JSON objects with a `type` field. */ -import { startProject, stopProject, teardownProject, getTransportRuntime, launchTransportSession, isProviderSessionBound, persistSessionRecord, type ProjectConfig } from '../agent/session-manager.js'; +import { startProject, stopProject, teardownProject, getTransportRuntime, launchTransportSession, isProviderSessionBound, persistSessionRecord, relaunchSessionWithSettings, type ProjectConfig } from '../agent/session-manager.js'; import { isTransportAgent } from '../agent/detect.js'; import { sendKeys, sendKeysDelayedEnter, sendRawInput, resizeSession, sendKey, getPaneStartCommand } from '../agent/tmux.js'; import { listSessions, getSession, upsertSession, removeSession, type SessionRecord } from '../store/session-store.js'; @@ -11,6 +11,7 @@ import { terminalStreamer, type StreamSubscriber } from './terminal-streamer.js' import type { ServerLink } from './server-link.js'; import { timelineEmitter } from './timeline-emitter.js'; import { timelineStore } from './timeline-store.js'; +import { emitSessionInlineError } from './session-error.js'; import { startSubSession, stopSubSession, @@ -32,7 +33,6 @@ import { getComboRoundCount, parseModePipeline, P2P_CONFIG_MODE, type P2pSession import { CRON_MSG } from '../../shared/cron-types.js'; import { executeCronJob } from './cron-executor.js'; import { TRANSPORT_MSG } from '../../shared/transport-events.js'; -import { getProvider } from '../agent/provider-registry.js'; import { copyFile } from 'node:fs/promises'; import { randomUUID } from 'node:crypto'; import { ensureImcDir, imcSubDir } from '../util/imc-dir.js'; @@ -44,6 +44,7 @@ import { CODEX_MODEL_IDS, normalizeClaudeCodeModelId } from '../shared/models/op import { getClaudeSdkRuntimeConfig, normalizeClaudeSdkModelForProvider } from '../agent/sdk-runtime-config.js'; import { getCodexRuntimeConfig } from '../agent/codex-runtime-config.js'; import { P2P_TERMINAL_RUN_STATUSES } from '../../shared/p2p-status.js'; +import { DAEMON_MSG } from '../../shared/daemon-events.js'; import { CLAUDE_SDK_EFFORT_LEVELS, CODEX_SDK_EFFORT_LEVELS, @@ -202,6 +203,7 @@ import { getQwenRuntimeConfig } from '../agent/qwen-runtime-config.js'; import { getQwenDisplayMetadata } from '../agent/provider-display.js'; import { buildSessionList } from './session-list.js'; import { getQwenOAuthQuotaUsageLabel, recordQwenOAuthRequest } from '../agent/provider-quota.js'; +import { listProviderSessions as listProviderSessionsImpl } from './provider-sessions.js'; function describeTransportSendError(err: unknown): string { if (err && typeof err === 'object') { @@ -211,6 +213,28 @@ function describeTransportSendError(err: unknown): string { return err instanceof Error ? err.message : String(err); } +const pendingSessionRelaunches = new Map>(); + +function trackPendingSessionRelaunch(sessionName: string, pending: Promise): Promise { + pendingSessionRelaunches.set(sessionName, pending); + void pending.then(() => { + if (pendingSessionRelaunches.get(sessionName) === pending) pendingSessionRelaunches.delete(sessionName); + }, () => { + if (pendingSessionRelaunches.get(sessionName) === pending) pendingSessionRelaunches.delete(sessionName); + }); + return pending; +} + +async function waitForPendingSessionRelaunch(sessionName: string): Promise { + const pending = pendingSessionRelaunches.get(sessionName); + if (!pending) return; + try { + await pending; + } catch { + // Restart path already emitted its own error and corrective session sync. + } +} + function refreshQwenQuotaUsageLabels(serverLink?: ServerLink): void { const usageLabel = getQwenOAuthQuotaUsageLabel(); for (const session of listSessions()) { @@ -308,11 +332,11 @@ function expandAllTargets(initiatorName: string, mode: string, excludeSameType = if (!inDomain) continue; - if (mode === P2P_CONFIG_MODE && sessionConfig) { + if (sessionConfig) { const entry = sessionConfig[s.name]; if (!entry || !entry.enabled) continue; // strict: missing = excluded if (entry.mode === 'skip') continue; - targets.push({ session: s.name, mode: entry.mode }); + targets.push({ session: s.name, mode: mode === P2P_CONFIG_MODE ? entry.mode : mode }); } else { targets.push({ session: s.name, mode }); } @@ -527,7 +551,7 @@ export function handleWebCommand(msg: unknown, serverLink: ServerLink): void { void handleStart(cmd, serverLink); break; case 'session.stop': - void handleStop(cmd); + void handleStop(cmd, serverLink); break; case 'session.restart': void handleRestart(cmd, serverLink); @@ -620,7 +644,7 @@ export function handleWebCommand(msg: unknown, serverLink: ServerLink): void { void handleServerDelete(); break; case 'daemon.upgrade': - void handleDaemonUpgrade(cmd.targetVersion as string | undefined); + void handleDaemonUpgrade(cmd.targetVersion as string | undefined, serverLink); break; case 'file.search': void handleFileSearch(cmd, serverLink); @@ -838,6 +862,42 @@ async function handleStart(cmd: Record, serverLink: ServerLink) } async function handleRestart(cmd: Record, serverLink: ServerLink): Promise { + const sessionName = cmd.sessionName as string | undefined; + if (sessionName) { + const record = getSession(sessionName); + if (!record) { + logger.warn({ sessionName }, 'session.restart: session not found in store'); + return; + } + try { + await trackPendingSessionRelaunch(sessionName, (async () => { + try { + await relaunchSessionWithSettings(record, { + agentType: (cmd.agentType as any) ?? undefined, + projectDir: ('cwd' in cmd ? (cmd.cwd as string | undefined) : undefined), + label: ('label' in cmd ? (cmd.label as string | null) : undefined), + description: ('description' in cmd ? (cmd.description as string | null) : undefined), + requestedModel: ('requestedModel' in cmd ? (cmd.requestedModel as string | null) : undefined), + effort: ('effort' in cmd ? (cmd.effort as any) : undefined), + transportConfig: ('transportConfig' in cmd ? (cmd.transportConfig as Record | null) : undefined), + }); + await handleGetSessions(serverLink); + logger.info({ sessionName, agentType: cmd.agentType ?? record.agentType }, 'Session relaunched via settings'); + } catch (err) { + logger.error({ sessionName, err }, 'session.restart(sessionName) failed'); + const message = err instanceof Error ? err.message : String(err); + emitSessionInlineError(sessionName, message); + try { serverLink.send({ type: 'session.error', project: record.projectName, message }); } catch { /* ignore */ } + await handleGetSessions(serverLink); + throw err; + } + })()); + } catch { + // Failure already surfaced via session.error + corrective session_list. + } + return; + } + const project = cmd.project as string | undefined; const fresh = cmd.fresh === true; if (!project) { @@ -874,23 +934,38 @@ async function handleRestart(cmd: Record, serverLink: ServerLin } catch (err) { logger.error({ project, err }, 'session.restart failed'); const message = err instanceof Error ? err.message : String(err); + emitSessionInlineError(brain.name, message); try { serverLink.send({ type: 'session.error', project, message }); } catch { /* ignore */ } } } -async function handleStop(cmd: Record): Promise { +async function handleStop(cmd: Record, serverLink: ServerLink): Promise { const project = cmd.project as string | undefined; if (!project) { logger.warn('session.stop: missing project name'); return; } + let result; try { - await stopProject(project); - logger.info({ project }, 'Session stopped via web'); + result = await stopProject(project, serverLink); } catch (err) { logger.error({ project, err }, 'session.stop failed'); + const message = err instanceof Error ? err.message : String(err); + try { serverLink.send({ type: 'session.error', project, message: `Shutdown failed: ${message}` }); } catch { /* ignore */ } + return; + } + + if (result.ok) { + logger.info({ project }, 'Session stopped via web'); + return; } + + const message = result.failed + .map((failure) => `${failure.sessionName}:${failure.stage}`) + .join(', '); + logger.warn({ project, failed: result.failed }, 'session.stop completed with shutdown failures'); + try { serverLink.send({ type: 'session.error', project, message: `Shutdown failed: ${message}` }); } catch { /* ignore */ } } /** @@ -944,6 +1019,8 @@ async function handleSend(cmd: Record, serverLink: ServerLink): return; } + await waitForPendingSessionRelaunch(sessionName); + // Fallback: legacy clients that don't send commandId get a server-generated one const isLegacy = !commandId; const effectiveId = commandId ?? crypto.randomUUID(); @@ -1126,11 +1203,14 @@ async function handleSend(cmd: Record, serverLink: ServerLink): // Transport sessions — route directly to the provider runtime, bypassing tmux. const transportRuntime = getTransportRuntime(sessionName); const record = (await import('../store/session-store.js')).getSession(sessionName); + const emitTransportUserMessage = (payloadText: string, extra?: Record) => { + timelineEmitter.emit(sessionName, 'user.message', { text: payloadText, allowDuplicate: true, ...(extra ?? {}) }); + }; if (!transportRuntime && record?.runtimeType === 'transport') { // No runtime — provider not connected. Show error in chat. const errMsg = `Provider ${record.providerId ?? 'unknown'} not connected. Reconnecting...`; logger.warn({ sessionName, providerId: record.providerId }, 'session.send: transport session has no runtime'); - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'assistant.text', { text: `⚠️ ${errMsg}`, streaming: false }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'session.state', { state: 'idle', error: errMsg }, { source: 'daemon', confidence: 'high' }); const errStatus = 'error'; @@ -1142,7 +1222,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): const release = await getMutex(sessionName).acquire(); try { if (text.trim() === '/stop') { - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); await transportRuntime.cancel(); // Mark session for fresh start so daemon restart doesn't resume the stuck conversation if (record?.agentType === 'qwen') { @@ -1168,7 +1248,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): const authHint = qwenAuthType === 'qwen-oauth' ? ' (current tier only allows coder-model)' : ''; - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'assistant.text', { text: `⚠️ Unknown Qwen model: ${nextModel}${authHint}`, streaming: false, @@ -1200,7 +1280,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): persistSessionRecord(nextRecord, sessionName); await handleGetSessions(serverLink); syncSubSessionIfNeeded(sessionName, serverLink); - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'usage.update', { model: nextModel, contextWindow: resolveContextWindow(undefined, nextModel), @@ -1217,7 +1297,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): const requestedModel = modelMatch[1]; const selectedModel = normalizeClaudeCodeModelId(requestedModel); if (!selectedModel) { - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'assistant.text', { text: `⚠️ Unknown Claude model: ${requestedModel}`, streaming: false }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'command.ack', { commandId: effectiveId, status: 'error', error: `Unknown Claude model: ${requestedModel}` }); try { serverLink.send({ type: 'command.ack', commandId: effectiveId, status: 'error', session: sessionName, error: `Unknown Claude model: ${requestedModel}` }); } catch {} @@ -1237,7 +1317,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): persistSessionRecord(nextRecord, sessionName); await handleGetSessions(serverLink); syncSubSessionIfNeeded(sessionName, serverLink); - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'usage.update', { model: selectedModel, contextWindow: resolveContextWindow(undefined, selectedModel) }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'assistant.text', { text: `Switched model to ${selectedModel}`, streaming: false }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'command.ack', { commandId: effectiveId, status: isLegacy ? 'accepted_legacy' : 'accepted' }); @@ -1247,7 +1327,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): if (record?.agentType === 'codex-sdk' && modelMatch) { const nextModel = modelMatch[1]; if (!CODEX_MODEL_IDS.includes(nextModel as any)) { - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'assistant.text', { text: `⚠️ Unknown Codex model: ${nextModel}`, streaming: false }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'command.ack', { commandId: effectiveId, status: 'error', error: `Unknown Codex model: ${nextModel}` }); try { serverLink.send({ type: 'command.ack', commandId: effectiveId, status: 'error', session: sessionName, error: `Unknown Codex model: ${nextModel}` }); } catch {} @@ -1269,7 +1349,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): persistSessionRecord(nextRecord, sessionName); await handleGetSessions(serverLink); syncSubSessionIfNeeded(sessionName, serverLink); - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'usage.update', { model: nextModel, contextWindow: resolveContextWindow(undefined, nextModel) }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'assistant.text', { text: `Switched model to ${nextModel}`, streaming: false }, { source: 'daemon', confidence: 'high' }); timelineEmitter.emit(sessionName, 'command.ack', { commandId: effectiveId, status: isLegacy ? 'accepted_legacy' : 'accepted' }); @@ -1281,7 +1361,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): const allowed = getSupportedEffortLevels(record?.agentType); if (!isTransportEffortLevel(nextEffort) || !allowed.includes(nextEffort)) { const supported = allowed.join(', '); - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'assistant.text', { text: `⚠️ Unsupported thinking level: ${nextEffort}. Supported: ${supported}`, streaming: false, @@ -1300,7 +1380,7 @@ async function handleSend(cmd: Record, serverLink: ServerLink): persistSessionRecord(nextRecord, sessionName); await handleGetSessions(serverLink); syncSubSessionIfNeeded(sessionName, serverLink); - timelineEmitter.emit(sessionName, 'user.message', { text }); + emitTransportUserMessage(text); timelineEmitter.emit(sessionName, 'assistant.text', { text: `Switched thinking level to ${nextEffort}`, streaming: false, @@ -1309,7 +1389,6 @@ async function handleSend(cmd: Record, serverLink: ServerLink): try { serverLink.send({ type: 'command.ack', commandId: effectiveId, status: isLegacy ? 'accepted_legacy' : 'accepted', session: sessionName }); } catch {} return; } - timelineEmitter.emit(sessionName, 'user.message', { text }); if (record?.agentType === 'qwen' && record.qwenAuthType === 'qwen-oauth') { recordQwenOAuthRequest(); refreshQwenQuotaUsageLabels(serverLink); @@ -1317,8 +1396,15 @@ async function handleSend(cmd: Record, serverLink: ServerLink): // send() is synchronous: dispatches immediately if idle, queues if busy. // Status changes come from transport runtime's onStatusChange callback. const result = transportRuntime.send(text); + if (result === 'sent') { + emitTransportUserMessage(text); + } if (result === 'queued') { - timelineEmitter.emit(sessionName, 'session.state', { state: 'queued', pendingCount: transportRuntime.pendingCount }, { source: 'daemon', confidence: 'high' }); + timelineEmitter.emit(sessionName, 'session.state', { + state: 'queued', + pendingCount: transportRuntime.pendingCount, + pendingMessages: transportRuntime.pendingMessages, + }, { source: 'daemon', confidence: 'high' }); } // Clear fresh-start flag — the new conversation is now active if (record?.qwenFreshOnResume) { @@ -1835,7 +1921,12 @@ async function handleSubSessionStop(cmd: Record, serverLink: Se logger.warn('subsession.stop: missing sessionName'); return; } - await stopSubSession(sName, serverLink).catch((e: unknown) => logger.error({ err: e, sName }, 'subsession.stop failed')); + const result = await stopSubSession(sName, serverLink).catch((e: unknown) => { + logger.error({ err: e, sName }, 'subsession.stop failed'); + return null; + }); + if (!result || result.ok) return; + logger.warn({ sessionName: sName, failed: result.failed }, 'subsession.stop completed with shutdown failures'); } async function handleSubSessionRestart(cmd: Record, serverLink: ServerLink): Promise { @@ -1851,51 +1942,28 @@ async function handleSubSessionRestart(cmd: Record, serverLink: } const id = sName.replace(/^deck_sub_/, ''); try { - const effectiveRecord = (await recoverOpenCodeSessionRecord(record)) ?? record; - // Stop without notifying server (preserve PG record) - await stopSubSession(sName, null); - if (effectiveRecord.runtimeType === 'transport') { - await launchTransportSession({ - name: sName, - projectName: sName, - role: 'w1', - agentType: effectiveRecord.agentType as any, - projectDir: effectiveRecord.projectDir || process.cwd(), - description: effectiveRecord.description ?? undefined, - label: effectiveRecord.label ?? undefined, - bindExistingKey: effectiveRecord.providerSessionId ?? undefined, - skipCreate: !!effectiveRecord.providerSessionId, - parentSession: effectiveRecord.parentSession ?? undefined, - requestedModel: effectiveRecord.requestedModel ?? undefined, - effort: effectiveRecord.effort ?? undefined, - transportConfig: effectiveRecord.transportConfig ?? undefined, - userCreated: effectiveRecord.userCreated, - }); + await trackPendingSessionRelaunch(sName, (async () => { try { - serverLink.send(await buildSubSessionSync(id)); - } catch { /* not connected */ } - return; - } - // Recreate with same ID — pass existing session IDs so agents resume - // their conversation and ensureSessionFile skips if file already exists - await startSubSession({ - id, - type: effectiveRecord.agentType, - cwd: effectiveRecord.projectDir || null, - parentSession: effectiveRecord.parentSession || null, - ccSessionId: effectiveRecord.ccSessionId ?? null, - codexSessionId: effectiveRecord.codexSessionId ?? null, - geminiSessionId: effectiveRecord.geminiSessionId ?? null, - opencodeSessionId: effectiveRecord.opencodeSessionId ?? null, - ccPreset: effectiveRecord.ccPreset ?? null, - description: effectiveRecord.description ?? null, - }); - // Sync updated state to server - try { - serverLink.send(await buildSubSessionSync(id)); - } catch { /* not connected */ } - } catch (e: unknown) { - logger.error({ err: e, sessionName: sName }, 'subsession.restart failed'); + const effectiveRecord = (await recoverOpenCodeSessionRecord(record)) ?? record; + await relaunchSessionWithSettings(effectiveRecord, { + agentType: (cmd.agentType as any) ?? undefined, + projectDir: ('cwd' in cmd ? (cmd.cwd as string | undefined) : undefined), + label: ('label' in cmd ? (cmd.label as string | null) : undefined), + description: ('description' in cmd ? (cmd.description as string | null) : undefined), + requestedModel: ('requestedModel' in cmd ? (cmd.requestedModel as string | null) : undefined), + effort: ('effort' in cmd ? (cmd.effort as any) : undefined), + transportConfig: ('transportConfig' in cmd ? (cmd.transportConfig as Record | null) : undefined), + }); + try { + serverLink.send(await buildSubSessionSync(id)); + } catch { /* not connected */ } + } catch (e: unknown) { + logger.error({ err: e, sessionName: sName }, 'subsession.restart failed'); + throw e; + } + })()); + } catch { + // Failure already logged; keep command handler alive for future sends. } } @@ -1980,7 +2048,7 @@ async function handleP2pListDiscussions(_cmd: Record, serverLin for (const s of listSessions()) { if (s.projectDir) projectDirs.add(s.projectDir); } - const discussions: Array<{ id: string; fileName: string; preview: string; mtime: number }> = []; + const discussions: Array<{ id: string; fileName: string; path: string; preview: string; mtime: number }> = []; for (const projectDir of projectDirs) { const dir = imcSubDir(projectDir, 'discussions'); try { @@ -1993,7 +2061,7 @@ async function handleP2pListDiscussions(_cmd: Record, serverLin const content = await fsReadFileRaw(fullPath, 'utf8'); const reqMatch = content.match(/## User Request\s*\n+(.+)/); const preview = reqMatch?.[1]?.trim().slice(0, 120) || f; - discussions.push({ id: f.replace('.md', ''), fileName: f, preview, mtime: s.mtimeMs }); + discussions.push({ id: f.replace('.md', ''), fileName: f, path: fullPath, preview, mtime: s.mtimeMs }); } catch { /* skip unreadable */ } } } catch { /* dir may not exist */ } @@ -2005,14 +2073,15 @@ async function handleP2pListDiscussions(_cmd: Record, serverLin async function handleP2pReadDiscussion(cmd: Record, serverLink: ServerLink): Promise { const id = cmd.id as string | undefined; - if (!id) { serverLink.send({ type: 'p2p.read_discussion_response', error: 'missing_id' }); return; } + const requestId = cmd.requestId as string | undefined; + if (!id) { serverLink.send({ type: 'p2p.read_discussion_response', requestId, error: 'missing_id' }); return; } // 1. Check active P2P runs first (in-memory, always fresh) for (const run of listP2pRuns()) { if (run.id === id || run.discussionId === id) { try { const content = await fsReadFileRaw(run.contextFilePath, 'utf8'); - serverLink.send({ type: 'p2p.read_discussion_response', id, content }); + serverLink.send({ type: 'p2p.read_discussion_response', id, requestId, content }); return; } catch { /* file may not exist yet */ } } @@ -2027,11 +2096,11 @@ async function handleP2pReadDiscussion(cmd: Record, serverLink: const filePath = nodePath.join(imcSubDir(projectDir, 'discussions'), `${id}.md`); try { const content = await fsReadFileRaw(filePath, 'utf8'); - serverLink.send({ type: 'p2p.read_discussion_response', id, content }); + serverLink.send({ type: 'p2p.read_discussion_response', id, requestId, content }); return; } catch { /* try next project */ } } - serverLink.send({ type: 'p2p.read_discussion_response', id, error: 'not_found' }); + serverLink.send({ type: 'p2p.read_discussion_response', id, requestId, error: 'not_found' }); } // ── Discussion handlers ──────────────────────────────────────────────────── @@ -2153,7 +2222,24 @@ async function handleDiscussionStop(cmd: Record): Promise * 3. A short sleep before the restart gives the current daemon time to finish * sending any in-flight messages. */ -async function handleDaemonUpgrade(targetVersion?: string): Promise { +async function handleDaemonUpgrade(targetVersion?: string, serverLink?: ServerLink): Promise { + const activeRuns = getActiveP2pRunsBlockingDaemonUpgrade(); + if (activeRuns.length > 0) { + logger.warn({ + targetVersion, + activeRunIds: activeRuns.map((run) => run.id), + activeRunStatuses: activeRuns.map((run) => run.status), + }, 'daemon.upgrade: blocked because P2P runs are active'); + try { + serverLink?.send({ + type: DAEMON_MSG.UPGRADE_BLOCKED, + reason: 'p2p_active', + activeRunIds: activeRuns.map((run) => run.id), + }); + } catch { /* ignore */ } + return; + } + const { spawn } = await import('child_process'); const { writeFileSync, mkdtempSync, existsSync } = await import('fs'); const { join, dirname } = await import('path'); @@ -2202,7 +2288,8 @@ launchctl load -w "${plist}"`; const cleanupPath = join(scriptDir, 'cleanup.cmd'); const cleanupVbsPath = join(scriptDir, 'cleanup.vbs'); const targetVer = targetVersion ?? 'latest'; - // .cmd files: UTF-8 + BOM so cmd.exe handles non-ASCII paths. + // .cmd files: UTF-8 + BOM, and the script itself switches to UTF-8 with + // `chcp 65001` before touching any non-ASCII paths. // .vbs files: UTF-16 LE + BOM so wscript handles non-ASCII paths. writeFileSync(cleanupPath, encodeCmdAsUtf8Bom(buildWindowsCleanupScript(scriptDir))); writeFileSync(cleanupVbsPath, encodeVbsAsUtf16(buildWindowsCleanupVbs(cleanupPath))); @@ -2353,6 +2440,10 @@ const FILE_SEARCH_EXCLUDES = new Set([ const FILE_SEARCH_MAX = 20; +export function getActiveP2pRunsBlockingDaemonUpgrade(runs = listP2pRuns()) { + return runs.filter((run) => !P2P_TERMINAL_RUN_STATUSES.has(run.status)); +} + async function handleFileSearch(cmd: Record, serverLink: ServerLink): Promise { const query = (cmd.query as string ?? '').trim(); const projectDir = cmd.projectDir as string | undefined; @@ -2880,10 +2971,7 @@ export function fileSearchByLengthAsc(a: FzfEntry, b: FzfEntry): number { /** Reusable: fetch remote sessions from a provider. */ export async function listProviderSessions(providerId: string): Promise> { - const provider = getProvider(providerId); - if (!provider) return []; - if (!provider.capabilities.sessionRestore || !provider.listSessions) return []; - return provider.listSessions(); + return listProviderSessionsImpl(providerId); } // ── CC env presets ──────────────────────────────────────────────────────── diff --git a/src/daemon/lifecycle.ts b/src/daemon/lifecycle.ts index aa6e4239f..d05057076 100644 --- a/src/daemon/lifecycle.ts +++ b/src/daemon/lifecycle.ts @@ -26,6 +26,7 @@ import * as fs from 'node:fs'; import * as path from 'node:path'; import * as os from 'node:os'; import { P2P_TERMINAL_RUN_STATUSES } from '../../shared/p2p-status.js'; +import { pickReadableSessionDisplay } from '../../shared/session-display.js'; /** Get the last assistant.text from a session's timeline (for push notification context). */ function getLastAssistantText(sessionName: string): string | undefined { @@ -41,6 +42,42 @@ function getLastAssistantText(sessionName: string): string | undefined { return undefined; } +function resolvePushDisplayContext(sessionName: string, sessions: SessionRecord[]): { + project: string; + label?: string; + parentLabel?: string; +} { + const byName = new Map(sessions.map((session) => [session.name, session] as const)); + const session = byName.get(sessionName); + const label = pickReadableSessionDisplay([session?.label], sessionName); + const visited = new Set(); + let cursor = session; + let parentLabel: string | undefined; + + while (cursor?.parentSession && !visited.has(cursor.parentSession)) { + visited.add(cursor.parentSession); + const parent = byName.get(cursor.parentSession); + if (!parent) break; + const readable = pickReadableSessionDisplay([parent.label, parent.projectName], parent.name); + if (readable) { + parentLabel = readable; + break; + } + cursor = parent; + } + + const project = pickReadableSessionDisplay( + [label, parentLabel, session?.projectName], + sessionName, + ) ?? session?.projectName ?? sessionName; + + return { + project, + ...(label ? { label } : {}), + ...(parentLabel ? { parentLabel } : {}), + }; +} + export interface DaemonContext { config: Config; memory: MemoryBackend | null; @@ -580,21 +617,35 @@ export async function startup(): Promise { const hookResult = await startHookServer((payload) => { if (!serverLink) return; try { - const record = listSessions().find((s) => s.name === payload.session); - const projectName = record?.projectName ?? payload.session; - // Build human-readable display label: prefer label, then project name, then session ID - const sessionLabel = record?.label || undefined; - const parentRecord = record?.parentSession ? listSessions().find((s) => s.name === record.parentSession) : undefined; - const parentLabel = parentRecord?.label || parentRecord?.projectName || undefined; + const sessions = listSessions(); + const record = sessions.find((s) => s.name === payload.session); + const display = resolvePushDisplayContext(payload.session, sessions); if (payload.event === 'idle') { // Shell/script sessions are always "idle" — skip to avoid noise if (record?.agentType === 'shell' || record?.agentType === 'script') return; // notifySessionIdle is handled by the unified timeline listener below // Include last assistant text for push notification context const lastText = getLastAssistantText(payload.session); - serverLink.send({ type: 'session.idle', session: payload.session, project: projectName, agentType: payload.agentType, ...(lastText ? { lastText } : {}), ...(sessionLabel ? { label: sessionLabel } : {}), ...(parentLabel ? { parentLabel } : {}) }); + serverLink.send({ + type: 'session.idle', + session: payload.session, + project: display.project, + agentType: payload.agentType, + ...(lastText ? { lastText } : {}), + ...(display.label ? { label: display.label } : {}), + ...(display.parentLabel ? { parentLabel: display.parentLabel } : {}), + }); } else if (payload.event === 'notification') { - serverLink.send({ type: 'session.notification', session: payload.session, project: projectName, agentType: record?.agentType ?? '', title: payload.title, message: payload.message, ...(sessionLabel ? { label: sessionLabel } : {}), ...(parentLabel ? { parentLabel } : {}) }); + serverLink.send({ + type: 'session.notification', + session: payload.session, + project: display.project, + agentType: record?.agentType ?? '', + title: payload.title, + message: payload.message, + ...(display.label ? { label: display.label } : {}), + ...(display.parentLabel ? { parentLabel: display.parentLabel } : {}), + }); } else if (payload.event === 'tool_start') { serverLink.send({ type: 'session.tool', session: payload.session, tool: payload.tool }); } else if (payload.event === 'tool_end') { diff --git a/src/daemon/oc-session-sync.ts b/src/daemon/oc-session-sync.ts index 892c07402..439f07cc9 100644 --- a/src/daemon/oc-session-sync.ts +++ b/src/daemon/oc-session-sync.ts @@ -164,7 +164,7 @@ export async function syncOcSessions(serverLink: ServerLink): Promise { projectDir: mainSessionProjectDir(group.agentName), bindExistingKey: group.mainSession.key, skipCreate: true, skipStore: true, }); - upsertSession({ ...mainRecord!, state: 'running', label: mainLabel, updatedAt: Date.now() }); + upsertSession({ ...mainRecord!, state: 'idle', label: mainLabel, updatedAt: Date.now() }); logger.info({ session: mName, ocKey: group.mainSession.key }, 'oc-sync: reconnected main session runtime'); } catch (err) { registerProviderRoute(group.mainSession.key, mName); @@ -217,7 +217,7 @@ export async function syncOcSessions(serverLink: ServerLink): Promise { parentSession: mName, }); const newLabel = preferredOpenClawLabel(storeEntry.label, ch.displayName, ch.key); - upsertSession({ ...storeEntry, state: 'running', parentSession: mName, label: newLabel, updatedAt: Date.now() }); + upsertSession({ ...storeEntry, state: 'idle', parentSession: mName, label: newLabel, updatedAt: Date.now() }); // Update server DB label (may have been stored with sanitized key before displayName fix) const subId = storeEntry.name.replace('deck_sub_', ''); try { @@ -253,7 +253,7 @@ export async function syncOcSessions(serverLink: ServerLink): Promise { const reconnLabel = preferredOpenClawLabel(existingInStore.label, ch.displayName, ch.key); upsertSession({ ...existingInStore, - state: 'running', + state: 'idle', parentSession: mName, label: reconnLabel, updatedAt: Date.now(), diff --git a/src/daemon/p2p-orchestrator.ts b/src/daemon/p2p-orchestrator.ts index 51365bd52..4c3356485 100644 --- a/src/daemon/p2p-orchestrator.ts +++ b/src/daemon/p2p-orchestrator.ts @@ -106,14 +106,12 @@ export function serializeP2pRun(run: P2pRun): P2pRunUpdatePayload { const currentRoundCompletedHopCount = run.hopStates.filter( (hop) => hop.round_index === run.currentRound && hop.status === 'completed', ).length; - const currentHop = run.activeTargetSessions[0] ?? run.currentTargetSession; - const currentHopState = currentHop - ? run.hopStates.find((hop) => - hop.session === currentHop && - hop.round_index === run.currentRound && - (hop.status === 'running' || hop.status === 'dispatched'), - ) ?? null - : null; + const activeHopStates = run.hopStates.filter((hop) => + hop.round_index === run.currentRound && + (hop.status === 'running' || hop.status === 'dispatched'), + ); + const currentHopState = activeHopStates[0] ?? null; + const currentHop = currentHopState?.session ?? run.activeTargetSessions[0] ?? run.currentTargetSession; const hopCounts = countHopStates(run.hopStates); return { @@ -244,17 +242,18 @@ export function serializeP2pRun(run: P2pRun): P2pRunUpdatePayload { const status = hop.status === 'completed' ? 'done' : 'skipped'; nodes.push({ session: t.session, ...info, status }); } - if (currentHopState) { + const activeSessions = new Set(activeHopStates.map((hop) => hop.session)); + for (const activeHop of activeHopStates) { const curMode = combo ? resolveMode(run.currentRound) : ( - run.allTargets.find((t) => t.session === currentHopState.session)?.mode - ?? run.remainingTargets.find((t) => t.session === currentHopState.session)?.mode + run.allTargets.find((t) => t.session === activeHop.session)?.mode + ?? run.remainingTargets.find((t) => t.session === activeHop.session)?.mode ?? run.mode ); - const info = getInfo(currentHopState.session, curMode, 'hop'); - nodes.push({ session: currentHopState.session, ...info, status: 'active' }); + const info = getInfo(activeHop.session, curMode, 'hop'); + nodes.push({ session: activeHop.session, ...info, status: 'active' }); } for (const t of run.remainingTargets) { - if (t.session === currentHop) continue; + if (activeSessions.has(t.session)) continue; const pendingMode = combo ? resolveMode(run.currentRound) : t.mode; const info = getInfo(t.session, pendingMode, 'hop'); nodes.push({ session: t.session, ...info, status: 'pending' }); @@ -473,7 +472,8 @@ export async function cancelP2pRun(runId: string, serverLink: ServerLink | null) return true; } - return false; + activeRuns.delete(runId); + return true; } // ── Resume after daemon restart ─────────────────────────────────────────── @@ -734,12 +734,12 @@ async function executeChain(run: P2pRun, modeConfig: P2pMode | undefined, server ? `${discussionParticipantNameWithMode(run.initiatorSession, roundModeKey)} — Final Summary` : `${discussionParticipantNameWithMode(run.initiatorSession, roundModeKey)} — Round ${run.currentRound}/${run.rounds} Summary`; const roundSummaryInstruction = isLastRound - ? `${summaryModeConfig?.summaryPrompt ?? 'Synthesize a final summary that captures the consensus, key decisions, and any remaining disagreements across all rounds.'}\nBefore writing the summary, use the hop evidence already appended into the discussion file for this round. Append only the new summary section.` + ? `${summaryModeConfig?.summaryPrompt ?? 'Synthesize a final summary that captures the consensus, key decisions, and any remaining disagreements across all rounds.'}\nBefore writing the summary, use the hop evidence already appended into the discussion file for this round. If the user context clearly specifies a destination file for the final plan, write the complete plan there. Otherwise, write the complete plan at the end of the discussion file.` : `Synthesize the key points, areas of agreement, and open questions from this round. Then assign specific focus areas or questions for each participant in the next round (round ${run.currentRound + 1}). Append to the file.\nIMPORTANT: This is ANALYSIS ONLY. Do NOT implement fixes, do NOT edit code files, do NOT run commands. Only write your analysis into this discussion file.`; const roundSummaryPrompt = buildHopPrompt(run, summaryModeConfig, { session: run.initiatorSession, sectionHeader: roundSummaryHeader, - instruction: `${roundSummaryInstruction}\nThe orchestrator has already appended each completed hop's evidence into the discussion file. Do not re-copy or restructure prior sections; append only your round-summary section.`, + instruction: `${roundSummaryInstruction}\nThe orchestrator has already appended each completed hop's evidence into the discussion file. If you write the final plan to another file, still append a short completion note under the new final-summary heading in the discussion file that records the chosen output file path.`, isInitial: false, }, rp); logger.info({ runId: run.id, round: run.currentRound, isLastRound, roundMode: roundModeKey }, isLastRound ? 'P2P: Final summary — initiator' : 'P2P: Round summary — initiator'); @@ -1071,12 +1071,17 @@ export function buildHopPrompt(run: P2pRun, mode: P2pMode | undefined, opts: Hop parts.push(`[P2P Discussion Task — run ${run.id}]`); parts.push(``); if (isFinalSummary) { - parts.push(`This is the FINAL round of a multi-agent discussion. Your discussion file is: ${filePath}`); + parts.push(`This is the FINAL round of a multi-agent discussion.`); + parts.push(`Discussion file: ${run.contextFilePath}`); parts.push(``); parts.push(`Steps:`); - parts.push(`1. Read the discussion file`); - parts.push(`2. Add a new heading "## ${opts.sectionHeader}" at the end and write your final synthesis`); - parts.push(`3. Base the synthesis on the collected hop evidence already appended into the discussion file for this round`); + parts.push(`1. Read the discussion file and use both the user's original request and the final discussion evidence as source context`); + parts.push(`2. Infer whether the user context specifies a concrete destination file for the final plan`); + parts.push(`3. If a concrete destination file is clear from the user context, write the complete plan there. Otherwise, write the complete plan at the end of the discussion file under a new heading "## ${opts.sectionHeader}"`); + parts.push(`4. If you wrote the plan to another file, still append a short note under "## ${opts.sectionHeader}" in the discussion file that records the destination path and confirms the plan was written`); + parts.push(``); + parts.push(`Final summary instructions:`); + parts.push(opts.instruction); parts.push(``); parts.push(`User's original request: "${run.userText}"`); } else { @@ -1124,6 +1129,9 @@ function transition(run: P2pRun, status: P2pRunStatus, serverLink: ServerLink | } else if (status === 'failed' || status === 'timed_out') { run.runPhase = 'failed'; } + if (P2P_TERMINAL_RUN_STATUSES.has(status)) { + run.completedAt = run.completedAt ?? new Date().toISOString(); + } run.updatedAt = new Date().toISOString(); logger.info({ runId: run.id, status }, 'P2P run state transition'); pushState(run, serverLink); @@ -1131,6 +1139,7 @@ function transition(run: P2pRun, status: P2pRunStatus, serverLink: ServerLink | function failRun(run: P2pRun, errorType: string, message: string, serverLink: ServerLink | null): void { run.error = `${errorType}: ${message}`; + run.completedAt = run.completedAt ?? new Date().toISOString(); run.updatedAt = new Date().toISOString(); const status: P2pRunStatus = errorType === 'timed_out' ? 'timed_out' : 'failed'; run.status = status; diff --git a/src/daemon/provider-sessions.ts b/src/daemon/provider-sessions.ts new file mode 100644 index 000000000..297599286 --- /dev/null +++ b/src/daemon/provider-sessions.ts @@ -0,0 +1,8 @@ +import { getProvider } from '../agent/provider-registry.js'; + +export async function listProviderSessions(providerId: string): Promise> { + const provider = getProvider(providerId); + if (!provider) return []; + if (!provider.capabilities.sessionRestore || !provider.listSessions) return []; + return provider.listSessions(); +} diff --git a/src/daemon/session-error.ts b/src/daemon/session-error.ts new file mode 100644 index 000000000..f2c4c2ed5 --- /dev/null +++ b/src/daemon/session-error.ts @@ -0,0 +1,17 @@ +import type { TimelineSource } from './timeline-event.js'; +import { timelineEmitter } from './timeline-emitter.js'; + +export function formatSessionErrorMessage(message: string): string { + return message.startsWith('⚠️') ? message : `⚠️ Error: ${message}`; +} + +export function emitSessionInlineError( + sessionId: string, + message: string, + source: TimelineSource = 'daemon', +): void { + timelineEmitter.emit(sessionId, 'assistant.text', { + text: formatSessionErrorMessage(message), + streaming: false, + }, { source, confidence: 'high' }); +} diff --git a/src/daemon/subsession-manager.ts b/src/daemon/subsession-manager.ts index f9770811a..18ccd89db 100644 --- a/src/daemon/subsession-manager.ts +++ b/src/daemon/subsession-manager.ts @@ -2,11 +2,8 @@ * Sub-session manager — creates/stops/rebuilds tmux sessions for sub-sessions. */ -import { newSession, killSession, sessionExists, getPanePids } from '../agent/tmux.js'; -import { execFile } from 'node:child_process'; -import { promisify } from 'node:util'; -const execFileAsync = promisify(execFile); -import { getDriver, getTransportRuntime, launchTransportSession } from '../agent/session-manager.js'; +import { newSession, killSession, sessionExists } from '../agent/tmux.js'; +import { getDriver, getTransportRuntime, launchTransportSession, stopTransportRuntimeSession } from '../agent/session-manager.js'; import type { AgentType } from '../agent/detect.js'; import { isTransportAgent } from '../agent/detect.js'; import { timelineStore } from './timeline-store.js'; @@ -19,6 +16,8 @@ import type { TransportEffortLevel } from '../../shared/effort-levels.js'; import logger from '../util/logger.js'; import { getAgentVersion } from '../agent/agent-version.js'; +import { closeSingleSession, type CloseFailure, type CloseTreeResult } from '../agent/session-close.js'; +import { emitSessionInlineError } from './session-error.js'; export interface SubSessionRecord { id: string; @@ -210,7 +209,7 @@ export async function startSubSession(sub: SubSessionRecord): Promise { timelineEmitter.emit(sessionName, 'session.state', { state: 'started' }); upsertSession({ - name: sessionName, projectName: sessionName, agentType: sub.type, agentVersion, role: 'w1', state: 'running', + name: sessionName, projectName: sessionName, agentType: sub.type, agentVersion, role: 'w1', state: 'idle', projectDir: sub.cwd ?? '', label: sub.label ?? undefined, ccSessionId: sub.ccSessionId ?? undefined, codexSessionId: sub.codexSessionId ?? undefined, @@ -243,43 +242,62 @@ export async function startSubSession(sub: SubSessionRecord): Promise { } } -/** Validate that a session name matches the expected pattern to prevent injection. */ -const SAFE_SESSION_NAME_RE = /^deck_sub_[a-zA-Z0-9_-]+$/; - -/** Kill all processes running inside a session's panes before killing the session itself. - * This prevents orphan agent processes that hold session UUIDs after the session is gone. - * Uses the backend-aware getPanePids() export from tmux.ts. */ -async function killSessionProcesses(sessionName: string): Promise { - if (!SAFE_SESSION_NAME_RE.test(sessionName)) { - logger.warn({ sessionName }, 'Rejected invalid session name in killSessionProcesses'); - return; - } - try { - const pids = await getPanePids(sessionName); - for (const pid of pids) { - if (!/^\d+$/.test(pid)) continue; // only allow numeric PIDs - // Kill all children of the shell (the actual agent process), then the shell itself - await execFileAsync('pkill', ['-9', '-P', pid]).catch(() => {}); - await execFileAsync('kill', ['-9', pid]).catch(() => {}); - } - } catch { /* session may not exist or have no panes */ } +function buildSubSessionCloseFailureMessage(failure: CloseFailure): string { + return `Sub-session close failed during ${failure.stage}: ${failure.message}`; } -export async function stopSubSession(sessionName: string, serverLink?: { send(msg: object): void } | null): Promise { - timelineEmitter.emit(sessionName, 'session.state', { state: 'stopped' }); - await killSessionProcesses(sessionName); - await killSession(sessionName).catch(() => {}); - (await import('./jsonl-watcher.js')).stopWatching(sessionName); - (await import('./codex-watcher.js')).stopWatching(sessionName); - (await import('./gemini-watcher.js')).stopWatching(sessionName); - (await import('./opencode-watcher.js')).stopWatching(sessionName); - removeSession(sessionName); - - // Notify server so DB is updated (sub-session ID = session name without 'deck_sub_' prefix) - const id = sessionName.replace(/^deck_sub_/, ''); - if (serverLink && id !== sessionName) { - try { serverLink.send({ type: 'subsession.closed', id, sessionName }); } catch { /* not connected */ } +export async function stopSubSession( + sessionName: string, + serverLink?: { send(msg: object): void } | null, +): Promise { + const record = getSession(sessionName); + if (!record) { + return { ok: true, closed: [], failed: [] }; } + + return closeSingleSession(record, { + emitStopping: () => { + timelineEmitter.emit(sessionName, 'session.state', { state: 'stopping' }); + }, + stopWatchers: async () => { + (await import('./jsonl-watcher.js')).stopWatching(sessionName); + (await import('./codex-watcher.js')).stopWatching(sessionName); + (await import('./gemini-watcher.js')).stopWatching(sessionName); + (await import('./opencode-watcher.js')).stopWatching(sessionName); + }, + stopTransportRuntime: async () => { + await stopTransportRuntimeSession(sessionName); + }, + killProcessRuntime: async () => { + await killSession(sessionName); + }, + verifyClosed: async () => { + const runtime = getTransportRuntime(sessionName); + if (runtime) throw new Error('transport runtime still active'); + if (record.runtimeType !== 'transport' && await sessionExists(sessionName)) { + throw new Error('session still exists after kill'); + } + }, + emitSuccess: async () => { + timelineEmitter.emit(sessionName, 'session.state', { state: 'stopped' }); + }, + persistSuccess: async () => { + const id = sessionName.replace(/^deck_sub_/, ''); + if (serverLink && id !== sessionName) { + serverLink.send({ type: 'subsession.closed', id, sessionName }); + } + removeSession(sessionName); + }, + emitFailure: async (_record, failure) => { + const message = buildSubSessionCloseFailureMessage(failure); + emitSessionInlineError(sessionName, message); + timelineEmitter.emit(sessionName, 'session.state', { state: 'error', error: message }); + }, + persistFailure: async (_record, failure) => { + upsertSession({ ...record, state: 'error', updatedAt: Date.now() }); + logger.warn({ sessionName, stage: failure.stage, message: failure.message }, 'Sub-session shutdown failed'); + }, + }); } export async function rebuildSubSessions(subSessions: SubSessionRecord[]): Promise { @@ -347,7 +365,7 @@ export async function rebuildSubSessions(subSessions: SubSessionRecord[]): Promi const effectiveGeminiSessionId = sub.geminiSessionId ?? stored?.geminiSessionId; const effectiveOpenCodeSessionId = sub.opencodeSessionId ?? stored?.opencodeSessionId; upsertSession({ - name: sessionName, projectName: sessionName, agentType: sub.type, agentVersion: stored?.agentVersion ?? await getAgentVersion(sub.type as AgentType, sub.shellBin ?? undefined), role: 'w1', state: 'running', + name: sessionName, projectName: sessionName, agentType: sub.type, agentVersion: stored?.agentVersion ?? await getAgentVersion(sub.type as AgentType, sub.shellBin ?? undefined), role: 'w1', state: 'idle', projectDir: sub.cwd ?? '', label: sub.label ?? stored?.label ?? undefined, ccSessionId: effectiveCcSessionId ?? undefined, codexSessionId: effectiveCodexSessionId ?? undefined, diff --git a/src/daemon/terminal-streamer.ts b/src/daemon/terminal-streamer.ts index d1c054ba4..5c3f6da5a 100644 --- a/src/daemon/terminal-streamer.ts +++ b/src/daemon/terminal-streamer.ts @@ -17,6 +17,7 @@ import type { Readable } from 'stream'; import { BACKEND, capturePaneVisible, capturePaneHistory, getPaneId, getPaneSize, sessionExists, startPipePaneStream, stopPipePaneStream } from '../agent/tmux.js'; +import { isTransportAgent } from '../agent/detect.js'; import { getSession, upsertSession } from '../store/session-store.js'; import { processRawPtyData, resetParser } from './terminal-parser.js'; import { isWatching } from './jsonl-watcher.js'; @@ -24,6 +25,7 @@ import { isWatching as isCodexWatching } from './codex-watcher.js'; import { isWatching as isGeminiWatching } from './gemini-watcher.js'; import logger from '../util/logger.js'; import { timelineEmitter } from './timeline-emitter.js'; +import { emitSessionInlineError } from './session-error.js'; import type { TerminalDiff, TerminalHistory } from '../shared/transport/terminal.js'; const IDLE_THRESHOLD_MS = 5_000; // 5s without raw bytes → idle (Stop hook fires immediately; this is fallback) @@ -31,6 +33,12 @@ const MAX_RAW_BUFFER = 256 * 1024; // 256KB per-subscriber snapshot-pending buff const REBIND_DELAYS_MS = [1000, 2000, 4000, 8000, 16000, 30000]; const MAX_REBIND_ATTEMPTS = 5; +function shouldSuppressPaneIdInlineError(sessionName: string): boolean { + const session = getSession(sessionName); + return session?.runtimeType === 'transport' + || (typeof session?.agentType === 'string' && isTransportAgent(session.agentType)); +} + export type { TerminalDiff, TerminalHistory } from '../shared/transport/terminal.js'; export interface StreamSubscriber { @@ -293,6 +301,9 @@ export class TerminalStreamer { } if (!paneId) { logger.error({ sessionName }, 'Cannot start pipe-pane: paneId not available — restart session to fix'); + if (!shouldSuppressPaneIdInlineError(sessionName)) { + this.emitSessionStreamError(sessionName, 'Terminal stream unavailable: pane id not available. Restart the session to fix.'); + } // Do not remove subscribers: they can still receive on-demand snapshots return; } @@ -481,6 +492,7 @@ export class TerminalStreamer { private errorAllSubscribers(sessionName: string, err: Error): void { const subs = this.subscribers.get(sessionName); if (!subs) return; + this.emitSessionStreamError(sessionName, err.message); for (const [sub] of subs) { try { sub.onError?.(err); } catch { /* ignore */ } } @@ -489,6 +501,10 @@ export class TerminalStreamer { this.clearIdleTimer(sessionName); } + private emitSessionStreamError(sessionName: string, message: string): void { + emitSessionInlineError(sessionName, message); + } + // ── Idle detection ────────────────────────────────────────────────────────── private resetIdleTimer(sessionName: string): void { diff --git a/src/daemon/timeline-emitter.ts b/src/daemon/timeline-emitter.ts index cab68bd0b..8faae7f13 100644 --- a/src/daemon/timeline-emitter.ts +++ b/src/daemon/timeline-emitter.ts @@ -67,6 +67,7 @@ export class TimelineEmitter { // Deduplicate user.message — skip if same session + same text within 5s if (type === 'user.message') { const text = String(payload.text ?? ''); + const allowDuplicate = payload.allowDuplicate === true; // Resolve temp file references: replace instruction with actual file content const tempMatch = text.match(TEMP_FILE_RE); @@ -79,10 +80,12 @@ export class TimelineEmitter { const key = sessionId; const resolvedText = String(payload.text ?? ''); - const prev = this.recentUserMsg.get(key); - const now = Date.now(); - if (prev && prev.text === resolvedText && now - prev.ts < 5_000) return null; - this.recentUserMsg.set(key, { text: resolvedText, ts: now }); + if (!allowDuplicate) { + const prev = this.recentUserMsg.get(key); + const now = Date.now(); + if (prev && prev.text === resolvedText && now - prev.ts < 5_000) return null; + this.recentUserMsg.set(key, { text: resolvedText, ts: now }); + } } const seq = (this.seqMap.get(sessionId) ?? 0) + 1; diff --git a/src/daemon/transport-relay.ts b/src/daemon/transport-relay.ts index 3d5a767b1..73cb58897 100644 --- a/src/daemon/transport-relay.ts +++ b/src/daemon/transport-relay.ts @@ -179,7 +179,7 @@ export function wireProviderToRelay(provider: TransportProvider): void { /** Emit user.message through timeline when user sends to a transport session. */ export function emitTransportUserMessage(sessionId: string, text: string): void { - timelineEmitter.emit(sessionId, 'user.message', { text }, { source: 'daemon', confidence: 'high' }); + timelineEmitter.emit(sessionId, 'user.message', { text, allowDuplicate: true }, { source: 'daemon', confidence: 'high' }); void appendTransportEvent(sessionId, { type: 'user.message', sessionId, @@ -209,7 +209,7 @@ export function broadcastProviderStatus(providerId: string, connected: boolean): /** Fetch remote sessions from a provider and broadcast to browsers + sync to server DB. */ async function pushProviderSessions(providerId: string): Promise { try { - const { listProviderSessions } = await import('./command-handler.js'); + const { listProviderSessions } = await import('./provider-sessions.js'); const sessions = await listProviderSessions(providerId); if (!sendToServer) return; // Send via sync_sessions — bridge handles this: caches, persists to DB, broadcasts to browsers diff --git a/src/shared/models/context.ts b/src/shared/models/context.ts index a4f942e64..b13b0a543 100644 --- a/src/shared/models/context.ts +++ b/src/shared/models/context.ts @@ -6,6 +6,7 @@ export const OPENAI_CONTEXT_WINDOWS = { export const CLAUDE_CONTEXT_WINDOWS = { OPUS_1M_ALIAS: 1_000_000, + OPUS_4_FAMILY: 1_000_000, CLAUDE_4_FAMILY: 200_000, CLAUDE_3_FAMILY: 200_000, } as const; @@ -39,8 +40,9 @@ export function inferContextWindow(model?: string | null): number | undefined { if (/^gpt-4\.1(?:$|[-_.])/.test(m)) return OPENAI_CONTEXT_WINDOWS.GPT_41_FAMILY; if (m == 'opus[1m]') return CLAUDE_CONTEXT_WINDOWS.OPUS_1M_ALIAS; + if (/^claude-opus-4(?:$|[-_.])/.test(m)) return CLAUDE_CONTEXT_WINDOWS.OPUS_4_FAMILY; if (m == 'sonnet' || m == 'haiku') return CLAUDE_CONTEXT_WINDOWS.CLAUDE_4_FAMILY; - if (/^claude-(?:opus|sonnet|haiku)-4(?:$|[-_.])/.test(m)) return CLAUDE_CONTEXT_WINDOWS.CLAUDE_4_FAMILY; + if (/^claude-(?:sonnet|haiku)-4(?:$|[-_.])/.test(m)) return CLAUDE_CONTEXT_WINDOWS.CLAUDE_4_FAMILY; if (/^claude-3(?:[.-]|$)/.test(m)) return CLAUDE_CONTEXT_WINDOWS.CLAUDE_3_FAMILY; if (/^coder-model$/.test(m)) return QWEN_CONTEXT_WINDOWS.CODER_MODEL; diff --git a/src/store/session-store.ts b/src/store/session-store.ts index 74939562e..2982b77aa 100644 --- a/src/store/session-store.ts +++ b/src/store/session-store.ts @@ -141,7 +141,13 @@ async function probeSessionStates(): Promise { function scheduleWrite(): void { if (writeTimer) clearTimeout(writeTimer); writeTimer = setTimeout(async () => { - await writeFile(STORE_PATH, JSON.stringify(store, null, 2), 'utf8'); + try { + await mkdir(STORE_DIR, { recursive: true }); + await writeFile(STORE_PATH, JSON.stringify(store, null, 2), 'utf8'); + } catch { + // Tests may tear down temp HOME dirs while a debounced write is pending. + // Losing that best-effort write is fine; a later flush/load will recreate it. + } writeTimer = null; }, DEBOUNCE_MS); } @@ -183,5 +189,6 @@ export async function flushStore(): Promise { clearTimeout(writeTimer); writeTimer = null; } + await mkdir(STORE_DIR, { recursive: true }); await writeFile(STORE_PATH, JSON.stringify(store, null, 2), 'utf8'); } diff --git a/src/util/windows-upgrade-script.ts b/src/util/windows-upgrade-script.ts index 9d5247ff6..e5104ed0a 100644 --- a/src/util/windows-upgrade-script.ts +++ b/src/util/windows-upgrade-script.ts @@ -14,9 +14,13 @@ export interface WindowsUpgradeScriptInput { } export function buildWindowsCleanupScript(scriptDir: string): string { + void scriptDir; return `@echo off\r +chcp 65001 >nul 2>&1\r +setlocal\r timeout /t 120 /nobreak >nul\r -rmdir /s /q "${scriptDir}"\r +for %%I in ("%~dp0.") do set "SCRIPT_DIR=%%~fI"\r +rmdir /s /q "%SCRIPT_DIR%"\r `; } @@ -36,14 +40,24 @@ export function buildWindowsUpgradeVbs(batchPath: string): string { export function buildWindowsUpgradeBatch(input: WindowsUpgradeScriptInput): string { const { logFile, cleanupVbsPath, npmCmd, pkgSpec, targetVer, vbsLauncherPath, upgradeLockFile } = input; + void logFile; + void cleanupVbsPath; + void vbsLauncherPath; + void upgradeLockFile; return `@echo off\r +chcp 65001 >nul 2>&1\r setlocal EnableDelayedExpansion\r -echo === imcodes upgrade started at %date% %time% === >> "${logFile}"\r +for %%I in ("%~dp0.") do set "SCRIPT_DIR=%%~fI"\r +set "LOG_FILE=%SCRIPT_DIR%\\upgrade.log"\r +set "CLEANUP_VBS=%SCRIPT_DIR%\\cleanup.vbs"\r +set "VBS_LAUNCHER=%USERPROFILE%\\.imcodes\\daemon-launcher.vbs"\r +set "UPGRADE_LOCK=%USERPROFILE%\\.imcodes\\upgrade.lock"\r +echo === imcodes upgrade started at %date% %time% === >> "%LOG_FILE%"\r timeout /t 2 /nobreak > nul\r \r rem ── Create upgrade lock — watchdog will pause while this file exists ──\r -echo upgrade > "${upgradeLockFile}"\r -echo Upgrade lock created >> "${logFile}"\r +echo upgrade > "%UPGRADE_LOCK%"\r +echo Upgrade lock created >> "%LOG_FILE%"\r \r rem ── Kill daemon + old watchdog so npm can overwrite files cleanly ─────\r rem Old watchdog versions don't know about the lock file, so we must kill\r @@ -52,7 +66,7 @@ rem that respects the lock.\r set "PIDFILE=%USERPROFILE%\\.imcodes\\daemon.pid"\r if exist "%PIDFILE%" (\r set /p OLD_PID=<"%PIDFILE%"\r - echo Stopping daemon PID !OLD_PID! and old watchdog... >> "${logFile}"\r + echo Stopping daemon PID !OLD_PID! and old watchdog... >> "%LOG_FILE%"\r rem Find watchdog (parent of daemon) and tree-kill it\r for /f "tokens=2 delims==" %%a in ('wmic process where "ProcessId=!OLD_PID!" get ParentProcessId /format:list 2^>nul ^| find "="') do (\r set "WD_PID=%%a"\r @@ -64,72 +78,72 @@ if exist "%PIDFILE%" (\r timeout /t 2 /nobreak >nul\r )\r \r -echo Installing ${pkgSpec}... >> "${logFile}"\r -call "${npmCmd}" install -g ${pkgSpec} >> "${logFile}" 2>&1\r +echo Installing ${pkgSpec}... >> "%LOG_FILE%"\r +call "${npmCmd}" install -g ${pkgSpec} >> "%LOG_FILE%" 2>&1\r if %errorlevel% neq 0 (\r - echo Install FAILED — removing lock, watchdog will restart current version. >> "${logFile}"\r - echo === upgrade aborted at %date% %time% === >> "${logFile}"\r - del "${upgradeLockFile}" >nul 2>&1\r - if exist "${vbsLauncherPath}" wscript "${vbsLauncherPath}"\r - wscript "${cleanupVbsPath}" >nul 2>&1\r + echo Install FAILED — removing lock, watchdog will restart current version. >> "%LOG_FILE%"\r + echo === upgrade aborted at %date% %time% === >> "%LOG_FILE%"\r + del "%UPGRADE_LOCK%" >nul 2>&1\r + if exist "%VBS_LAUNCHER%" wscript "%VBS_LAUNCHER%"\r + wscript "%CLEANUP_VBS%" >nul 2>&1\r goto :done\r )\r \r set "NPM_PREFIX="\r for /f "usebackq delims=" %%p in (\`call "${npmCmd}" prefix -g 2^>nul\`) do if not defined NPM_PREFIX set "NPM_PREFIX=%%p"\r if not defined NPM_PREFIX (\r - echo Could not resolve npm global prefix after install. >> "${logFile}"\r - echo === upgrade aborted at %date% %time% === >> "${logFile}"\r - del "${upgradeLockFile}" >nul 2>&1\r - if exist "${vbsLauncherPath}" wscript "${vbsLauncherPath}"\r - wscript "${cleanupVbsPath}" >nul 2>&1\r + echo Could not resolve npm global prefix after install. >> "%LOG_FILE%"\r + echo === upgrade aborted at %date% %time% === >> "%LOG_FILE%"\r + del "%UPGRADE_LOCK%" >nul 2>&1\r + if exist "%VBS_LAUNCHER%" wscript "%VBS_LAUNCHER%"\r + wscript "%CLEANUP_VBS%" >nul 2>&1\r goto :done\r )\r \r set "CLI_SHIM=%NPM_PREFIX%\\imcodes.cmd"\r if not exist "%CLI_SHIM%" (\r - echo imcodes shim missing after install: %CLI_SHIM% >> "${logFile}"\r - echo === upgrade aborted at %date% %time% === >> "${logFile}"\r - del "${upgradeLockFile}" >nul 2>&1\r - if exist "${vbsLauncherPath}" wscript "${vbsLauncherPath}"\r - wscript "${cleanupVbsPath}" >nul 2>&1\r + echo imcodes shim missing after install: %CLI_SHIM% >> "%LOG_FILE%"\r + echo === upgrade aborted at %date% %time% === >> "%LOG_FILE%"\r + del "%UPGRADE_LOCK%" >nul 2>&1\r + if exist "%VBS_LAUNCHER%" wscript "%VBS_LAUNCHER%"\r + wscript "%CLEANUP_VBS%" >nul 2>&1\r goto :done\r )\r \r set "INSTALLED_VER="\r for /f "usebackq delims=" %%v in (\`call "%CLI_SHIM%" --version 2^>nul\`) do if not defined INSTALLED_VER set "INSTALLED_VER=%%v"\r -echo Install succeeded. Installed version: %INSTALLED_VER%, target: ${targetVer}, shim: %CLI_SHIM% >> "${logFile}"\r +echo Install succeeded. Installed version: %INSTALLED_VER%, target: ${targetVer}, shim: %CLI_SHIM% >> "%LOG_FILE%"\r if not "${targetVer}"=="latest" if /I not "%INSTALLED_VER%"=="${targetVer}" (\r - echo Version mismatch after install — removing lock, watchdog will restart. >> "${logFile}"\r - echo === upgrade aborted at %date% %time% === >> "${logFile}"\r - del "${upgradeLockFile}" >nul 2>&1\r - if exist "${vbsLauncherPath}" wscript "${vbsLauncherPath}"\r - wscript "${cleanupVbsPath}" >nul 2>&1\r + echo Version mismatch after install — removing lock, watchdog will restart. >> "%LOG_FILE%"\r + echo === upgrade aborted at %date% %time% === >> "%LOG_FILE%"\r + del "%UPGRADE_LOCK%" >nul 2>&1\r + if exist "%VBS_LAUNCHER%" wscript "%VBS_LAUNCHER%"\r + wscript "%CLEANUP_VBS%" >nul 2>&1\r goto :done\r )\r where imcodes >nul 2>&1\r if %errorlevel% neq 0 (\r - echo WARNING: imcodes not found on PATH >> "${logFile}"\r - echo To fix: setx PATH "%NPM_PREFIX%;%%PATH%%" >> "${logFile}"\r + echo WARNING: imcodes not found on PATH >> "%LOG_FILE%"\r + echo To fix: setx PATH "%NPM_PREFIX%;%%PATH%%" >> "%LOG_FILE%"\r )\r -echo Regenerating daemon launch chain... >> "${logFile}"\r -call "%CLI_SHIM%" repair-watchdog >> "${logFile}" 2>&1\r +echo Regenerating daemon launch chain... >> "%LOG_FILE%"\r +call "%CLI_SHIM%" repair-watchdog >> "%LOG_FILE%" 2>&1\r if %errorlevel% neq 0 (\r - echo WARNING: Launch chain regeneration failed >> "${logFile}"\r + echo WARNING: Launch chain regeneration failed >> "%LOG_FILE%"\r )\r \r rem ── Start new watchdog (lock-aware), then remove lock ─────────────────\r rem The new watchdog (generated by repair-watchdog) checks the lock file.\r rem It will loop/wait while the lock exists, then start the daemon once\r rem we delete it below.\r -echo Starting new watchdog via VBS... >> "${logFile}"\r -if exist "${vbsLauncherPath}" (\r - wscript "${vbsLauncherPath}"\r +echo Starting new watchdog via VBS... >> "%LOG_FILE%"\r +if exist "%VBS_LAUNCHER%" (\r + wscript "%VBS_LAUNCHER%"\r ) else (\r - echo WARNING: VBS launcher not found at ${vbsLauncherPath} >> "${logFile}"\r + echo WARNING: VBS launcher not found at %VBS_LAUNCHER% >> "%LOG_FILE%"\r )\r -echo Removing upgrade lock... >> "${logFile}"\r -del "${upgradeLockFile}" >nul 2>&1\r +echo Removing upgrade lock... >> "%LOG_FILE%"\r +del "%UPGRADE_LOCK%" >nul 2>&1\r \r rem Wait for new watchdog to start the daemon, then health-check\r timeout /t 10 /nobreak >nul\r @@ -137,15 +151,15 @@ if exist "%PIDFILE%" (\r set /p DAEMON_PID=<"%PIDFILE%"\r tasklist /fi "PID eq !DAEMON_PID!" /nh 2^>nul | find "!DAEMON_PID!" >nul\r if !errorlevel! equ 0 (\r - echo Health check PASSED: daemon PID !DAEMON_PID! alive >> "${logFile}"\r + echo Health check PASSED: daemon PID !DAEMON_PID! alive >> "%LOG_FILE%"\r ) else (\r - echo Health check FAILED: PID !DAEMON_PID! not running >> "${logFile}"\r + echo Health check FAILED: PID !DAEMON_PID! not running >> "%LOG_FILE%"\r )\r ) else (\r - echo Health check FAILED: daemon.pid not found >> "${logFile}"\r + echo Health check FAILED: daemon.pid not found >> "%LOG_FILE%"\r )\r -wscript "${cleanupVbsPath}" >nul 2>&1\r +wscript "%CLEANUP_VBS%" >nul 2>&1\r :done\r -echo === upgrade done at %date% %time% === >> "${logFile}"\r +echo === upgrade done at %date% %time% === >> "%LOG_FILE%"\r `; } diff --git a/test/agent/claude-code-sdk-provider.test.ts b/test/agent/claude-code-sdk-provider.test.ts index 02707e3f0..cfd13f4b1 100644 --- a/test/agent/claude-code-sdk-provider.test.ts +++ b/test/agent/claude-code-sdk-provider.test.ts @@ -152,6 +152,87 @@ describe('ClaudeCodeSdkProvider', () => { expect(run.options.resume).toBeUndefined(); }); + it('uses resume mode when createSession marks an inherited session as existing', async () => { + sdkMock.setNextMessages([ + { type: 'system', subtype: 'init', session_id: 'session-existing', model: 'claude-sonnet-4-6' }, + { type: 'result', session_id: 'session-existing', subtype: 'success', is_error: false, result: 'ACK', usage: { input_tokens: 1, output_tokens: 1, cache_read_input_tokens: 0 } }, + ]); + + const provider = new ClaudeCodeSdkProvider(); + await provider.connect({ binaryPath: 'claude' }); + await provider.createSession({ + sessionKey: 'route-existing', + cwd: '/tmp/project', + resumeId: 'session-existing', + skipCreate: true, + }); + + await provider.send('route-existing', 'hello'); + await flush(); + + const run = sdkMock.runs.at(-1)!; + expect(run.options.resume).toBe('session-existing'); + expect(run.options.sessionId).toBeUndefined(); + }); + + it('falls back to sessionId create when inherited resume id no longer exists', async () => { + const makeIterator = (messages: any[]) => { + async function* gen() { + for (const message of messages) yield message; + } + const iterator = gen() as AsyncGenerator & { close(): void; interrupt(): Promise }; + iterator.close = () => {}; + iterator.interrupt = async () => {}; + return iterator; + }; + + sdkMock.query + .mockImplementationOnce(({ prompt, options }: { prompt: string; options: Record }) => { + sdkMock.runs.push({ prompt, options, closed: false, interrupted: false }); + return makeIterator([ + { + type: 'result', + session_id: 'session-missing', + subtype: 'error', + is_error: true, + errors: ['No conversation found with session ID: session-missing'], + }, + ]); + }) + .mockImplementationOnce(({ prompt, options }: { prompt: string; options: Record }) => { + sdkMock.runs.push({ prompt, options, closed: false, interrupted: false }); + return makeIterator([ + { type: 'system', subtype: 'init', session_id: 'session-missing', model: 'claude-sonnet-4-6' }, + { type: 'result', session_id: 'session-missing', subtype: 'success', is_error: false, result: 'ACK', usage: { input_tokens: 1, output_tokens: 1, cache_read_input_tokens: 0 } }, + ]); + }); + + const provider = new ClaudeCodeSdkProvider(); + await provider.connect({ binaryPath: 'claude' }); + await provider.createSession({ + sessionKey: 'route-missing', + cwd: '/tmp/project', + resumeId: 'session-missing', + skipCreate: true, + }); + + const completed: string[] = []; + const errors: string[] = []; + provider.onComplete((_sid, msg) => completed.push(msg.content)); + provider.onError((_sid, err) => errors.push(err.message)); + + await provider.send('route-missing', 'hello'); + await flush(); + + expect(sdkMock.runs).toHaveLength(2); + expect(sdkMock.runs[0]?.options.resume).toBe('session-missing'); + expect(sdkMock.runs[0]?.options.sessionId).toBeUndefined(); + expect(sdkMock.runs[1]?.options.sessionId).toBe('session-missing'); + expect(sdkMock.runs[1]?.options.resume).toBeUndefined(); + expect(completed).toEqual(['ACK']); + expect(errors).toEqual([]); + }); + it('passes session env through to the Claude SDK query options', async () => { sdkMock.setNextMessages([ { type: 'system', subtype: 'init', session_id: 'session-env', model: 'claude-sonnet-4-6' }, diff --git a/test/agent/drivers/drivers.test.ts b/test/agent/drivers/drivers.test.ts index 05c25f8b6..65fe4ad94 100644 --- a/test/agent/drivers/drivers.test.ts +++ b/test/agent/drivers/drivers.test.ts @@ -1,4 +1,4 @@ -import { describe, it, expect, vi } from 'vitest'; +import { describe, it, expect, vi, afterEach } from 'vitest'; import { ClaudeCodeDriver } from '../../../src/agent/drivers/claude-code.js'; import { CodexDriver } from '../../../src/agent/drivers/codex.js'; import { OpenCodeDriver } from '../../../src/agent/drivers/opencode.js'; @@ -9,6 +9,10 @@ import { ShellDriver } from '../../../src/agent/drivers/shell.js'; describe('ClaudeCodeDriver', () => { const driver = new ClaudeCodeDriver(); + afterEach(() => { + vi.useRealTimers(); + }); + it('type is claude-code', () => { expect(driver.type).toBe('claude-code'); }); @@ -62,6 +66,30 @@ describe('ClaudeCodeDriver', () => { const result = await driver.captureLastResponse(capturePane, sendKeys, showBuffer); expect(result).toContain('fallback line'); }); + + it('postLaunch confirms the summary resume chooser with Enter', async () => { + vi.useFakeTimers(); + const capturePane = vi.fn() + .mockResolvedValueOnce([ + '1. Resume from summary (recommended)', + '2. Resume full session as-is', + "3. Don't ask me again", + 'Enter to confirm · Esc to cancel', + ]) + .mockResolvedValueOnce(['❯']); + const sendKey = vi.fn().mockResolvedValue(undefined); + + const done = driver.postLaunch!(capturePane, sendKey); + await vi.advanceTimersByTimeAsync(1_500); + await Promise.resolve(); + await vi.advanceTimersByTimeAsync(700); + await Promise.resolve(); + await vi.advanceTimersByTimeAsync(1_500); + await done; + + expect(sendKey).toHaveBeenCalledTimes(1); + expect(sendKey).toHaveBeenCalledWith('Enter'); + }); }); // ── Codex ───────────────────────────────────────────────────────────────────── diff --git a/test/agent/qwen-provider.test.ts b/test/agent/qwen-provider.test.ts index 51d691b61..e7af72c42 100644 --- a/test/agent/qwen-provider.test.ts +++ b/test/agent/qwen-provider.test.ts @@ -274,7 +274,7 @@ describe('QwenProvider', () => { // First send dispatches immediately runtime.send('first'); - await new Promise((resolve) => setTimeout(resolve, 25)); + await waitForSpawnCount(1); const first = lastSpawn(); first.child.stdout.write(`${JSON.stringify({ type: 'stream_event', event: { type: 'message_start', message: { id: 'msg-queue-1' } } })}\n`); first.child.stdout.write(`${JSON.stringify({ type: 'assistant', message: { id: 'assistant-queue-1', content: [{ type: 'text', text: 'Still running' }] } })}\n`); @@ -305,7 +305,7 @@ describe('QwenProvider', () => { provider.onError((_sid, err) => errors.push(err.message)); runtime.send('first'); - await new Promise((resolve) => setTimeout(resolve, 25)); + await waitForSpawnCount(1); const first = lastSpawn(); first.child.stdout.write(`${JSON.stringify({ type: 'stream_event', event: { type: 'message_start', message: { id: 'msg-queue-close-1' } } })}\n`); first.child.stdout.write(`${JSON.stringify({ type: 'result', is_error: false, result: 'done' })}\n`); diff --git a/test/daemon/cc-presets.test.ts b/test/daemon/cc-presets.test.ts new file mode 100644 index 000000000..3ad82ceaa --- /dev/null +++ b/test/daemon/cc-presets.test.ts @@ -0,0 +1,66 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { mkdtemp, mkdir, rm, writeFile } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const state = vi.hoisted(() => ({ + home: '', +})); + +vi.mock('node:os', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + homedir: () => state.home, + }; +}); + +describe('cc presets', () => { + beforeEach(async () => { + state.home = await mkdtemp(join(tmpdir(), 'imcodes-cc-presets-')); + await mkdir(join(state.home, '.imcodes'), { recursive: true }); + await writeFile( + join(state.home, '.imcodes', 'cc-presets.json'), + JSON.stringify([ + { + name: 'minimax', + env: { + ANTHROPIC_BASE_URL: 'https://api.minimax.io/anthropic', + ANTHROPIC_AUTH_TOKEN: 'test-token', + ANTHROPIC_MODEL: 'MiniMax-M2.7', + }, + contextWindow: 200000, + }, + ]), + 'utf8', + ); + }); + + afterEach(async () => { + vi.resetModules(); + if (state.home) await rm(state.home, { recursive: true, force: true }); + state.home = ''; + }); + + it('matches preset names case-insensitively', async () => { + const { getPreset } = await import('../../src/daemon/cc-presets.js'); + + await expect(getPreset('minimax')).resolves.toMatchObject({ name: 'minimax' }); + await expect(getPreset('MiniMax')).resolves.toMatchObject({ name: 'minimax' }); + }); + + it('resolves env and context hints for mixed-case preset names', async () => { + const { resolvePresetEnv } = await import('../../src/daemon/cc-presets.js'); + + await expect(resolvePresetEnv('MiniMax')).resolves.toMatchObject({ + ANTHROPIC_BASE_URL: 'https://api.minimax.io/anthropic', + ANTHROPIC_AUTH_TOKEN: 'test-token', + ANTHROPIC_MODEL: 'MiniMax-M2.7', + ANTHROPIC_SMALL_FAST_MODEL: 'MiniMax-M2.7', + ANTHROPIC_DEFAULT_SONNET_MODEL: 'MiniMax-M2.7', + ANTHROPIC_DEFAULT_OPUS_MODEL: 'MiniMax-M2.7', + ANTHROPIC_DEFAULT_HAIKU_MODEL: 'MiniMax-M2.7', + IMCODES_CONTEXT_WINDOW: '200000', + }); + }); +}); diff --git a/test/daemon/codex-watcher.test.ts b/test/daemon/codex-watcher.test.ts index 02aa18825..4b16897c9 100644 --- a/test/daemon/codex-watcher.test.ts +++ b/test/daemon/codex-watcher.test.ts @@ -314,8 +314,16 @@ describe('readCwd', () => { // ── startWatching / stopWatching / isWatching ───────────────────────────────── describe('isWatching / stopWatching', () => { + const originalHome = process.env.HOME; + + beforeEach(async () => { + process.env.HOME = await mkdtemp(join(tmpdir(), 'codex-watcher-home-')); + }); + afterEach(() => { stopWatching('session-x'); + if (originalHome === undefined) delete process.env.HOME; + else process.env.HOME = originalHome; }); it('isWatching returns false before startWatching', () => { @@ -323,7 +331,7 @@ describe('isWatching / stopWatching', () => { }); it('isWatching returns true after startWatching', async () => { - // Use a workDir that won't match any real file so watcher just idles + // Use an isolated HOME so the watcher does not scan the developer's real Codex history. await startWatching('session-x', '/tmp/__nonexistent_codex_dir__'); expect(isWatching('session-x')).toBe(true); }); diff --git a/test/daemon/command-handler-stop.test.ts b/test/daemon/command-handler-stop.test.ts new file mode 100644 index 000000000..f203427ee --- /dev/null +++ b/test/daemon/command-handler-stop.test.ts @@ -0,0 +1,167 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +const { stopProjectMock, stopSubSessionMock, loggerErrorMock, loggerWarnMock } = vi.hoisted(() => ({ + stopProjectMock: vi.fn(), + stopSubSessionMock: vi.fn().mockResolvedValue({ ok: true, closed: ['deck_sub_worker'], failed: [] }), + loggerErrorMock: vi.fn(), + loggerWarnMock: vi.fn(), +})); + +vi.mock('../../src/store/session-store.js', () => ({ + listSessions: vi.fn(() => []), + getSession: vi.fn(() => null), + upsertSession: vi.fn(), + removeSession: vi.fn(), +})); + +vi.mock('../../src/agent/session-manager.js', () => ({ + startProject: vi.fn(), + stopProject: stopProjectMock, + teardownProject: vi.fn(), + getTransportRuntime: vi.fn(() => undefined), + launchTransportSession: vi.fn(), + isProviderSessionBound: vi.fn(() => false), + persistSessionRecord: vi.fn(), + relaunchSessionWithSettings: vi.fn(), +})); + +vi.mock('../../src/agent/tmux.js', () => ({ + sendKeys: vi.fn(), + sendKeysDelayedEnter: vi.fn(), + sendRawInput: vi.fn(), + resizeSession: vi.fn(), + sendKey: vi.fn(), + getPaneStartCommand: vi.fn(), +})); + +vi.mock('../../src/router/message-router.js', () => ({ + routeMessage: vi.fn(), +})); + +vi.mock('../../src/daemon/terminal-streamer.js', () => ({ + terminalStreamer: { + subscribe: vi.fn(), + unsubscribe: vi.fn(), + start: vi.fn(), + stop: vi.fn(), + }, +})); + +vi.mock('../../src/daemon/timeline-emitter.js', () => ({ + timelineEmitter: { + emit: vi.fn(), + on: vi.fn(() => () => {}), + off: vi.fn(), + }, +})); + +vi.mock('../../src/daemon/timeline-store.js', () => ({ + timelineStore: { + append: vi.fn(), + read: vi.fn(() => []), + clear: vi.fn(), + }, +})); + +vi.mock('../../src/daemon/subsession-manager.js', () => ({ + startSubSession: vi.fn(), + stopSubSession: stopSubSessionMock, + rebuildSubSessions: vi.fn(), + detectShells: vi.fn().mockResolvedValue([]), + readSubSessionResponse: vi.fn(), + subSessionName: (id: string) => `deck_sub_${id}`, +})); + +vi.mock('../../src/daemon/p2p-orchestrator.js', () => ({ + startP2pRun: vi.fn(), + cancelP2pRun: vi.fn(), + getP2pRun: vi.fn(() => undefined), + listP2pRuns: vi.fn(() => []), + serializeP2pRun: vi.fn(), +})); + +vi.mock('../../src/daemon/repo-handler.js', () => ({ + handleRepoCommand: vi.fn(), +})); + +vi.mock('../../src/daemon/file-transfer-handler.js', () => ({ + handleFileUpload: vi.fn(), + handleFileDownload: vi.fn(), + createProjectFileHandle: vi.fn(), + lookupAttachment: vi.fn(() => undefined), +})); + +vi.mock('../../src/daemon/preview-relay.js', () => ({ + handlePreviewCommand: vi.fn(), +})); + +vi.mock('../../src/daemon/provider-sessions.js', () => ({ + listProviderSessions: vi.fn(() => []), +})); + +vi.mock('../../src/util/logger.js', () => ({ + default: { + info: vi.fn(), + warn: loggerWarnMock, + error: loggerErrorMock, + debug: vi.fn(), + }, +})); + +vi.mock('../../src/util/imc-dir.js', () => ({ + ensureImcDir: vi.fn().mockResolvedValue('/tmp/imc'), + imcSubDir: vi.fn((dir: string, sub: string) => `${dir}/.imc/${sub}`), +})); + +import { handleWebCommand } from '../../src/daemon/command-handler.js'; + +const flushAsync = () => new Promise((resolve) => setTimeout(resolve, 0)); + +describe('handleWebCommand shutdown failure paths', () => { + const serverLink = { + send: vi.fn(), + sendBinary: vi.fn(), + sendTimelineEvent: vi.fn(), + daemonVersion: '0.1.0', + }; + + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('reports structured session.stop failures without losing later command handling', async () => { + stopProjectMock.mockResolvedValueOnce({ + ok: false, + closed: [], + failed: [{ sessionName: 'deck_proj_brain', stage: 'verify', message: 'session still exists after kill' }], + }); + + handleWebCommand({ type: 'session.stop', project: 'proj' }, serverLink as any); + await flushAsync(); + + expect(serverLink.send).toHaveBeenCalledWith({ + type: 'session.error', + project: 'proj', + message: 'Shutdown failed: deck_proj_brain:verify', + }); + + handleWebCommand({ type: 'subsession.stop', sessionName: 'deck_sub_worker' }, serverLink as any); + await flushAsync(); + + expect(stopSubSessionMock).toHaveBeenCalledWith('deck_sub_worker', serverLink); + }); + + it('reports thrown session.stop failures instead of only logging them', async () => { + stopProjectMock.mockRejectedValueOnce(new Error('backend unavailable')); + + handleWebCommand({ type: 'session.stop', project: 'proj' }, serverLink as any); + await flushAsync(); + + expect(loggerErrorMock).toHaveBeenCalled(); + expect(serverLink.send).toHaveBeenCalledWith({ + type: 'session.error', + project: 'proj', + message: 'Shutdown failed: backend unavailable', + }); + }); +}); diff --git a/test/daemon/command-handler-transport-queue.test.ts b/test/daemon/command-handler-transport-queue.test.ts new file mode 100644 index 000000000..53d0d07b5 --- /dev/null +++ b/test/daemon/command-handler-transport-queue.test.ts @@ -0,0 +1,220 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +const { getSessionMock, getTransportRuntimeMock, emitMock, relaunchSessionWithSettingsMock } = vi.hoisted(() => ({ + getSessionMock: vi.fn(), + getTransportRuntimeMock: vi.fn(), + emitMock: vi.fn(), + relaunchSessionWithSettingsMock: vi.fn(), +})); + +vi.mock('../../src/store/session-store.js', () => ({ + listSessions: vi.fn(() => []), + getSession: getSessionMock, + upsertSession: vi.fn(), + removeSession: vi.fn(), + updateSessionState: vi.fn(), +})); + +vi.mock('../../src/agent/session-manager.js', () => ({ + startProject: vi.fn(), + stopProject: vi.fn(), + teardownProject: vi.fn(), + getTransportRuntime: getTransportRuntimeMock, + launchTransportSession: vi.fn(), + isProviderSessionBound: vi.fn(() => false), + persistSessionRecord: vi.fn(), + relaunchSessionWithSettings: relaunchSessionWithSettingsMock, +})); + +vi.mock('../../src/agent/tmux.js', () => ({ + sendKeys: vi.fn(), + sendKeysDelayedEnter: vi.fn(), + sendRawInput: vi.fn(), + resizeSession: vi.fn(), + sendKey: vi.fn(), + getPaneStartCommand: vi.fn(), +})); + +vi.mock('../../src/router/message-router.js', () => ({ + routeMessage: vi.fn(), +})); + +vi.mock('../../src/daemon/terminal-streamer.js', () => ({ + terminalStreamer: { + subscribe: vi.fn(), + unsubscribe: vi.fn(), + start: vi.fn(), + stop: vi.fn(), + }, +})); + +vi.mock('../../src/daemon/timeline-emitter.js', () => ({ + timelineEmitter: { + emit: emitMock, + on: vi.fn(() => () => {}), + off: vi.fn(), + epoch: 0, + replay: vi.fn(() => ({ events: [], truncated: false })), + }, +})); + +vi.mock('../../src/daemon/timeline-store.js', () => ({ + timelineStore: { + append: vi.fn(), + read: vi.fn(() => []), + clear: vi.fn(), + }, +})); + +vi.mock('../../src/daemon/subsession-manager.js', () => ({ + startSubSession: vi.fn(), + stopSubSession: vi.fn(), + rebuildSubSessions: vi.fn(), + detectShells: vi.fn().mockResolvedValue([]), + readSubSessionResponse: vi.fn(), + subSessionName: (id: string) => `deck_sub_${id}`, +})); + +vi.mock('../../src/daemon/p2p-orchestrator.js', () => ({ + startP2pRun: vi.fn(), + cancelP2pRun: vi.fn(), + getP2pRun: vi.fn(() => undefined), + listP2pRuns: vi.fn(() => []), + serializeP2pRun: vi.fn(), +})); + +vi.mock('../../src/daemon/repo-handler.js', () => ({ + handleRepoCommand: vi.fn(), +})); + +vi.mock('../../src/daemon/file-transfer-handler.js', () => ({ + handleFileUpload: vi.fn(), + handleFileDownload: vi.fn(), + createProjectFileHandle: vi.fn(), + lookupAttachment: vi.fn(() => undefined), +})); + +vi.mock('../../src/daemon/preview-relay.js', () => ({ + handlePreviewCommand: vi.fn(), +})); + +vi.mock('../../src/daemon/provider-sessions.js', () => ({ + listProviderSessions: vi.fn(() => []), +})); + +vi.mock('../../src/util/logger.js', () => ({ + default: { + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }, +})); + +vi.mock('../../src/util/imc-dir.js', () => ({ + ensureImcDir: vi.fn().mockResolvedValue('/tmp/imc'), + imcSubDir: vi.fn((dir: string, sub: string) => `${dir}/.imc/${sub}`), +})); + +import { handleWebCommand } from '../../src/daemon/command-handler.js'; + +const flushAsync = () => new Promise((resolve) => setTimeout(resolve, 0)); + +describe('handleWebCommand transport queue behavior', () => { + const serverLink = { + send: vi.fn(), + sendBinary: vi.fn(), + sendTimelineEvent: vi.fn(), + daemonVersion: '0.1.0', + }; + + beforeEach(() => { + vi.clearAllMocks(); + getSessionMock.mockReturnValue({ + name: 'deck_transport_brain', + projectName: 'transport', + role: 'brain', + agentType: 'claude-code-sdk', + runtimeType: 'transport', + state: 'running', + }); + }); + + it('does not emit a user.message for queued transport sends', async () => { + getTransportRuntimeMock.mockReturnValue({ + send: vi.fn(() => 'queued'), + pendingCount: 2, + pendingMessages: ['queued msg', 'queued msg 2'], + }); + + handleWebCommand({ type: 'session.send', session: 'deck_transport_brain', text: 'queued msg', commandId: 'cmd-queued' }, serverLink as any); + await flushAsync(); + + expect(emitMock).not.toHaveBeenCalledWith('deck_transport_brain', 'user.message', { text: 'queued msg' }); + expect(emitMock).toHaveBeenCalledWith( + 'deck_transport_brain', + 'session.state', + { state: 'queued', pendingCount: 2, pendingMessages: ['queued msg', 'queued msg 2'] }, + expect.any(Object), + ); + expect(emitMock).toHaveBeenCalledWith('deck_transport_brain', 'command.ack', { commandId: 'cmd-queued', status: 'accepted' }); + }); + + it('emits a user.message immediately for dispatched transport sends', async () => { + getTransportRuntimeMock.mockReturnValue({ + send: vi.fn(() => 'sent'), + pendingCount: 0, + }); + + handleWebCommand({ type: 'session.send', session: 'deck_transport_brain', text: 'sent msg', commandId: 'cmd-sent' }, serverLink as any); + await flushAsync(); + + expect(emitMock).toHaveBeenCalledWith('deck_transport_brain', 'user.message', { text: 'sent msg', allowDuplicate: true }); + expect(emitMock).not.toHaveBeenCalledWith( + 'deck_transport_brain', + 'session.state', + expect.objectContaining({ state: 'queued' }), + expect.anything(), + ); + }); + + it('waits for an in-flight settings restart before sending the first transport message', async () => { + let restartResolved = false; + let resolveRestart: (() => void) | null = null; + relaunchSessionWithSettingsMock.mockImplementation( + () => new Promise((resolve) => { + resolveRestart = () => { + restartResolved = true; + resolve(); + }; + }), + ); + getSessionMock.mockImplementation(() => ({ + name: 'deck_transport_brain', + projectName: 'transport', + role: 'brain', + agentType: restartResolved ? 'claude-code-sdk' : 'claude-code', + runtimeType: restartResolved ? 'transport' : 'process', + state: 'idle', + })); + const transportSend = vi.fn(() => 'sent'); + getTransportRuntimeMock.mockImplementation(() => ( + restartResolved ? { send: transportSend, pendingCount: 0 } : undefined + )); + + handleWebCommand({ type: 'session.restart', sessionName: 'deck_transport_brain', agentType: 'claude-code-sdk' }, serverLink as any); + handleWebCommand({ type: 'session.send', session: 'deck_transport_brain', text: 'after restart', commandId: 'cmd-after-restart' }, serverLink as any); + + await flushAsync(); + expect(transportSend).not.toHaveBeenCalled(); + expect(emitMock).not.toHaveBeenCalledWith('deck_transport_brain', 'command.ack', { commandId: 'cmd-after-restart', status: 'accepted' }); + + resolveRestart?.(); + await flushAsync(); + await flushAsync(); + + expect(transportSend).toHaveBeenCalledWith('after restart'); + expect(emitMock).toHaveBeenCalledWith('deck_transport_brain', 'user.message', { text: 'after restart', allowDuplicate: true }); + expect(emitMock).toHaveBeenCalledWith('deck_transport_brain', 'command.ack', { commandId: 'cmd-after-restart', status: 'accepted' }); + }); +}); diff --git a/test/daemon/daemon-upgrade-guard.test.ts b/test/daemon/daemon-upgrade-guard.test.ts new file mode 100644 index 000000000..b1a46c1b0 --- /dev/null +++ b/test/daemon/daemon-upgrade-guard.test.ts @@ -0,0 +1,27 @@ +import { describe, expect, it } from 'vitest'; + +import { getActiveP2pRunsBlockingDaemonUpgrade } from '../../src/daemon/command-handler.js'; + +describe('getActiveP2pRunsBlockingDaemonUpgrade', () => { + it('returns active runs that should block daemon upgrades', () => { + const blocked = getActiveP2pRunsBlockingDaemonUpgrade([ + { id: 'run_running', status: 'running' }, + { id: 'run_dispatched', status: 'dispatched' }, + { id: 'run_completed', status: 'completed' }, + { id: 'run_cancelled', status: 'cancelled' }, + ] as any); + + expect(blocked.map((run) => run.id)).toEqual(['run_running', 'run_dispatched']); + }); + + it('returns an empty list when all runs are terminal', () => { + const blocked = getActiveP2pRunsBlockingDaemonUpgrade([ + { id: 'run_completed', status: 'completed' }, + { id: 'run_failed', status: 'failed' }, + { id: 'run_cancelled', status: 'cancelled' }, + { id: 'run_timed_out', status: 'timed_out' }, + ] as any); + + expect(blocked).toEqual([]); + }); +}); diff --git a/test/daemon/launch-session-codex.test.ts b/test/daemon/launch-session-codex.test.ts index a3f0075db..a99be6db7 100644 --- a/test/daemon/launch-session-codex.test.ts +++ b/test/daemon/launch-session-codex.test.ts @@ -70,16 +70,20 @@ vi.mock('../../src/agent/codex-runtime-config.js', () => ({ }), })); -import { launchSession } from '../../src/agent/session-manager.js'; +import { launchSession, setSessionEventCallback } from '../../src/agent/session-manager.js'; // ── Tests ───────────────────────────────────────────────────────────────────── describe('launchSession — Codex ID handling', () => { beforeEach(() => { vi.clearAllMocks(); + setSessionEventCallback(() => {}); }); it('assigns an explicit codexSessionId before first launch and persists it', async () => { + const onSessionEvent = vi.fn(); + setSessionEventCallback(onSessionEvent); + await launchSession({ name: 'deck_codex_brain', projectName: 'test', @@ -97,5 +101,7 @@ describe('launchSession — Codex ID handling', () => { const upsertCalls = mocks.upsertSession.mock.calls; const lastRecord = upsertCalls[upsertCalls.length - 1][0]; expect(lastRecord.codexSessionId).toBe('new-codex-uuid'); + expect(lastRecord.state).toBe('idle'); + expect(onSessionEvent).toHaveBeenCalledWith('started', 'deck_codex_brain', 'idle'); }); }); diff --git a/test/daemon/launch-session-opencode.test.ts b/test/daemon/launch-session-opencode.test.ts index df2a6d833..7eceb9f44 100644 --- a/test/daemon/launch-session-opencode.test.ts +++ b/test/daemon/launch-session-opencode.test.ts @@ -96,6 +96,7 @@ describe('launchSession — OpenCode ID handling', () => { const upsertCalls = mocks.upsertSession.mock.calls; const lastRecord = upsertCalls[upsertCalls.length - 1][0]; expect(lastRecord.opencodeSessionId).toBe('oc-main-uuid'); + expect(lastRecord.state).toBe('idle'); expect(mocks.discoverOpenCodeSessionId).toHaveBeenCalledWith('/proj', expect.objectContaining({ exactDirectory: '/proj', knownSessionIds: ['old-session'], diff --git a/test/daemon/p2p-behavioral.test.ts b/test/daemon/p2p-behavioral.test.ts index 5fab4930c..cd17e69ae 100644 --- a/test/daemon/p2p-behavioral.test.ts +++ b/test/daemon/p2p-behavioral.test.ts @@ -20,6 +20,9 @@ function makeRun(overrides: Partial = {}): P2pRun { totalTargets: 2, mode: 'audit', status: 'running', + runPhase: 'running', + summaryPhase: null, + activePhase: 'hop', contextFilePath: '/tmp/test-discussion.md', userText: 'review this code', timeoutMs: 300000, @@ -34,6 +37,9 @@ function makeRun(overrides: Partial = {}): P2pRun { currentRound: 1, allTargets: [], extraPrompt: '', + hopStartedAt: Date.now(), + hopStates: [], + activeTargetSessions: [], _cancelled: false, ...overrides, }; @@ -113,6 +119,27 @@ describe('buildHopPrompt — production function', () => { const extraIdx = prompt.indexOf('Additional instructions'); expect(roundIdx).toBeLessThan(extraIdx); }); + + it('includes plan final-summary instructions for model-side target inference', () => { + const mode = getP2pMode('plan'); + const run = makeRun({ + mode: 'plan', + userText: '根据讨论结果把完整方案写到 @docs/implementation-plan.md', + }); + const prompt = buildHopPrompt(run, mode, { + session: 'deck_proj_brain', + sectionHeader: 'brain — Final Summary', + instruction: `${mode!.summaryPrompt}\nUse the discussion evidence as source material.`, + isInitial: false, + }); + + expect(prompt).toContain('Discussion file: /tmp/test-discussion.md'); + expect(prompt).toContain('Final summary instructions:'); + expect(prompt).toContain('Acceptance and Validation'); + expect(prompt).toContain('Infer whether the user context specifies a concrete destination file for the final plan'); + expect(prompt).toContain('If a concrete destination file is clear from the user context, write the complete plan there.'); + expect(prompt).toContain('If you wrote the plan to another file, still append a short note under "## brain — Final Summary" in the discussion file'); + }); }); // ── Rounds clamping (via P2P_MAX_ROUNDS constant in orchestrator) ───────────── diff --git a/test/daemon/p2p-config-mode.test.ts b/test/daemon/p2p-config-mode.test.ts index 78f32e2b7..3a6cc1301 100644 --- a/test/daemon/p2p-config-mode.test.ts +++ b/test/daemon/p2p-config-mode.test.ts @@ -301,6 +301,12 @@ describe('getComboRoundCount', () => { }); describe('COMBO_PRESETS', () => { + it('omits deprecated brainstorm presets from the default combo list', () => { + const presetKeys = COMBO_PRESETS.map((preset) => preset.key); + expect(presetKeys).not.toContain('brainstorm>discuss>discuss>plan'); + expect(presetKeys).not.toContain('brainstorm>plan'); + }); + it('all presets have valid mode keys in their pipeline', () => { for (const preset of COMBO_PRESETS) { for (const modeKey of preset.pipeline) { diff --git a/test/daemon/p2p-orchestrator.test.ts b/test/daemon/p2p-orchestrator.test.ts index 07ce9aacc..3576400e5 100644 --- a/test/daemon/p2p-orchestrator.test.ts +++ b/test/daemon/p2p-orchestrator.test.ts @@ -77,9 +77,8 @@ function pathFromPrompt(prompt: string): string { } function headingFromPrompt(prompt: string): string { - const match = prompt.match(/Add a new heading "## ([^"]+)"/); - if (!match) throw new Error(`No heading found in prompt: ${prompt}`); - return match[1]; + const match = prompt.match(/Add a new heading "## ([^"]+)"/) ?? prompt.match(/under "?## ([^"\n]+)"?/); + return match?.[1] ?? 'Automated Test Output'; } async function waitForStatus(runId: string, expected: P2pRunStatus[], maxMs = 10000): Promise { @@ -330,6 +329,39 @@ describe('P2P orchestrator — parallel rounds', () => { expect(done.summaryPhase).toBe('completed'); }); + it('completes the discussion when a single hop times out', async () => { + detectStatusAsyncMock.mockImplementation(async (session: string) => ( + session === 'deck_proj_w1' ? 'running' : 'idle' + )); + sendKeysDelayedEnterMock.mockImplementation(async (session: string, prompt: string) => { + if (session === 'deck_proj_w1') return; + const filePath = pathFromPrompt(prompt); + const heading = headingFromPrompt(prompt); + await appendFile(filePath, `\n## ${heading}\n\nBRAIN-${session}\n`, 'utf8'); + setTimeout(() => notifySessionIdle(session), 20); + }); + + const run = await startP2pRun( + 'deck_proj_brain', + [{ session: 'deck_proj_w1', mode: 'audit' }], + 'single hop timeout should not fail the run', + [], + serverLinkMock as any, + 1, + undefined, + undefined, + 120, + ); + + const done = await waitForStatus(run.id, ['completed']); + expect(done.status).toBe('completed'); + expect(done.hopStates).toHaveLength(1); + expect(done.hopStates[0].status).toBe('timed_out'); + expect(done.summaryPhase).toBe('completed'); + const content = await readFile(done.contextFilePath, 'utf8'); + expect(content).toContain('BRAIN-deck_proj_brain'); + }); + it('preserves completed evidence and still summarizes on partial hop failure', async () => { sendKeysDelayedEnterMock.mockImplementation(async (session: string, prompt: string) => { const filePath = pathFromPrompt(prompt); @@ -451,6 +483,35 @@ describe('P2P orchestrator — parallel rounds', () => { expect(sendKeyMock).toHaveBeenCalled(); }); + it('treats cancel on a terminal run as close and removes it from memory', async () => { + sendKeysDelayedEnterMock.mockImplementation(async (session: string, prompt: string) => { + if (session === 'deck_proj_w1') return; + const filePath = pathFromPrompt(prompt); + const heading = headingFromPrompt(prompt); + await appendFile(filePath, `\n## ${heading}\n\nBRAIN-${session}\n`, 'utf8'); + setTimeout(() => notifySessionIdle(session), 20); + }); + + const run = await startP2pRun( + 'deck_proj_brain', + [{ session: 'deck_proj_w1', mode: 'audit' }], + 'close failed/timed-out p2p', + [], + serverLinkMock as any, + 1, + undefined, + undefined, + 120, + ); + + await waitForStatus(run.id, ['completed']); + expect(getP2pRun(run.id)?.status).toBe('completed'); + + const closed = await cancelP2pRun(run.id, serverLinkMock as any); + expect(closed).toBe(true); + expect(getP2pRun(run.id)).toBeUndefined(); + }); + it('emits additive hop/run payload fields without breaking legacy fields', async () => { const run = await startP2pRun( 'deck_proj_brain', @@ -472,4 +533,88 @@ describe('P2P orchestrator — parallel rounds', () => { expect(payload.summary_phase).toBe('completed'); expect(payload.hop_counts?.completed).toBeGreaterThanOrEqual(1); }); + + it('projects all active hops into all_nodes for parallel round progress', () => { + const run: P2pRun = { + id: 'run_parallel', + discussionId: 'disc_parallel', + mainSession: 'deck_proj_brain', + initiatorSession: 'deck_proj_brain', + currentTargetSession: 'deck_proj_w1', + finalReturnSession: 'deck_proj_brain', + remainingTargets: [ + { session: 'deck_proj_w1', mode: 'audit' }, + { session: 'deck_proj_w2', mode: 'review' }, + ], + totalTargets: 2, + mode: 'discuss', + status: 'running', + runPhase: 'round_execution', + summaryPhase: null, + activePhase: 'hop', + contextFilePath: '/tmp/run_parallel.md', + userText: 'parallel progress', + timeoutMs: 120000, + resultSummary: null, + completedHops: [], + skippedHops: [], + error: null, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + completedAt: null, + rounds: 1, + currentRound: 1, + allTargets: [ + { session: 'deck_proj_w1', mode: 'audit' }, + { session: 'deck_proj_w2', mode: 'review' }, + ], + extraPrompt: '', + hopStartedAt: Date.now(), + hopStates: [ + { + hop_index: 1, + round_index: 1, + session: 'deck_proj_w1', + mode: 'audit', + status: 'running', + started_at: Date.now(), + completed_at: null, + error: null, + output_path: null, + section_header: 'W1', + artifact_path: '/tmp/run_parallel.round1.hop1.md', + working_path: null, + baseline_size: 0, + baseline_content: '', + }, + { + hop_index: 2, + round_index: 1, + session: 'deck_proj_w2', + mode: 'review', + status: 'dispatched', + started_at: Date.now(), + completed_at: null, + error: null, + output_path: null, + section_header: 'W2', + artifact_path: '/tmp/run_parallel.round1.hop2.md', + working_path: null, + baseline_size: 0, + baseline_content: '', + }, + ], + activeTargetSessions: ['deck_proj_w1', 'deck_proj_w2'], + _cancelled: false, + }; + + const payload = serializeP2pRun(run); + const activeNodes = payload.all_nodes?.filter((node) => node.phase === 'hop' && node.status === 'active') ?? []; + const pendingNodes = payload.all_nodes?.filter((node) => node.phase === 'hop' && node.status === 'pending') ?? []; + + expect(activeNodes.map((node) => node.session)).toEqual(['deck_proj_w1', 'deck_proj_w2']); + expect(pendingNodes).toHaveLength(0); + expect(payload.current_target_session).toBe('deck_proj_w1'); + expect(payload.active_hop_number).toBe(1); + }); }); diff --git a/test/daemon/p2p-parser.test.ts b/test/daemon/p2p-parser.test.ts index ec1f6b38b..b3e71ddb8 100644 --- a/test/daemon/p2p-parser.test.ts +++ b/test/daemon/p2p-parser.test.ts @@ -248,6 +248,51 @@ describe('structured P2P routing via WS fields', () => { (startP2pRun as ReturnType).mockResolvedValue({ id: 'run-1' }); }); + it('structured combo mode with p2pSessionConfig filters __all__ expansion to enabled participants', async () => { + handleWebCommand({ + type: 'session.send', + sessionName: 'deck_proj_brain', + text: 'review this code', + commandId: 'cmd-combo-config', + p2pMode: 'brainstorm>discuss', + p2pSessionConfig: { + deck_proj_w1: { enabled: true, mode: 'audit' }, + deck_proj_w2: { enabled: false, mode: 'review' }, + }, + }, mockServerLink as any); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(startP2pRun).toHaveBeenCalledTimes(1); + const [_initiator, targets] = (startP2pRun as ReturnType).mock.calls[0]; + expect(targets).toEqual([ + { session: 'deck_proj_w1', mode: 'brainstorm>discuss' }, + ]); + }); + + it('config mode still uses per-session configured modes', async () => { + handleWebCommand({ + type: 'session.send', + sessionName: 'deck_proj_brain', + text: 'review this code', + commandId: 'cmd-config-mode', + p2pMode: 'config', + p2pSessionConfig: { + deck_proj_w1: { enabled: true, mode: 'audit' }, + deck_proj_w2: { enabled: true, mode: 'review' }, + }, + }, mockServerLink as any); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(startP2pRun).toHaveBeenCalledTimes(1); + const [_initiator, targets] = (startP2pRun as ReturnType).mock.calls[0]; + expect(targets).toEqual([ + { session: 'deck_proj_w1', mode: 'audit' }, + { session: 'deck_proj_w2', mode: 'review' }, + ]); + }); + it('p2pAtTargets with __all__ expands to all active sessions', async () => { handleWebCommand({ type: 'session.send', diff --git a/test/daemon/provider-sessions.test.ts b/test/daemon/provider-sessions.test.ts index 573e100e5..e0908e28b 100644 --- a/test/daemon/provider-sessions.test.ts +++ b/test/daemon/provider-sessions.test.ts @@ -3,8 +3,13 @@ */ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { TRANSPORT_MSG } from '../../shared/transport-events.js'; +import { setTransportRelaySend, broadcastProviderStatus } from '../../src/daemon/transport-relay.js'; describe('provider session listing', () => { + beforeEach(() => { + vi.resetModules(); + }); + it('TRANSPORT_MSG has LIST_SESSIONS and SESSIONS_RESPONSE constants', () => { expect(TRANSPORT_MSG.LIST_SESSIONS).toBe('provider.list_sessions'); expect(TRANSPORT_MSG.SESSIONS_RESPONSE).toBe('provider.sessions_response'); @@ -15,7 +20,7 @@ describe('provider session listing', () => { vi.doMock('../../src/agent/provider-registry.js', () => ({ getProvider: () => undefined, })); - const { listProviderSessions } = await import('../../src/daemon/command-handler.js'); + const { listProviderSessions } = await import('../../src/daemon/provider-sessions.js'); const sessions = await listProviderSessions('openclaw'); expect(sessions).toEqual([]); vi.doUnmock('../../src/agent/provider-registry.js'); @@ -27,7 +32,7 @@ describe('provider session listing', () => { capabilities: { sessionRestore: false }, }), })); - const mod = await import('../../src/daemon/command-handler.js'); + const mod = await import('../../src/daemon/provider-sessions.js'); // Re-import to pick up new mock const sessions = await mod.listProviderSessions('openclaw'); expect(sessions).toEqual([]); @@ -36,15 +41,6 @@ describe('provider session listing', () => { }); describe('broadcastProviderStatus auto-push', () => { - let setTransportRelaySend: typeof import('../../src/daemon/transport-relay.js')['setTransportRelaySend']; - let broadcastProviderStatus: typeof import('../../src/daemon/transport-relay.js')['broadcastProviderStatus']; - - beforeEach(async () => { - const mod = await import('../../src/daemon/transport-relay.js'); - setTransportRelaySend = mod.setTransportRelaySend; - broadcastProviderStatus = mod.broadcastProviderStatus; - }); - it('sends provider.status message to server', () => { const sent: Record[] = []; setTransportRelaySend((msg) => sent.push(msg)); diff --git a/test/daemon/sdk-transport-restore.test.ts b/test/daemon/sdk-transport-restore.test.ts index f2e1b80ef..347554824 100644 --- a/test/daemon/sdk-transport-restore.test.ts +++ b/test/daemon/sdk-transport-restore.test.ts @@ -111,15 +111,17 @@ vi.mock('../../src/daemon/timeline-emitter.js', () => ({ vi.mock('../../src/agent/tmux.js', () => ({ listSessions: vi.fn().mockResolvedValue([]), - newSession: vi.fn(), killSession: vi.fn(), sessionExists: vi.fn(), isPaneAlive: vi.fn(), respawnPane: vi.fn(), - sendKeys: vi.fn(), sendKey: vi.fn(), capturePane: vi.fn(), showBuffer: vi.fn(), getPaneId: vi.fn(), getPaneCwd: vi.fn(), getPaneStartCommand: vi.fn(), cleanupOrphanFifos: vi.fn(), BACKEND: 'tmux', + newSession: vi.fn().mockResolvedValue(undefined), killSession: vi.fn().mockResolvedValue(undefined), sessionExists: vi.fn(), isPaneAlive: vi.fn(), respawnPane: vi.fn(), + sendKeys: vi.fn(), sendKey: vi.fn(), capturePane: vi.fn(), showBuffer: vi.fn(), getPaneId: vi.fn().mockResolvedValue(undefined), getPaneCwd: vi.fn().mockResolvedValue('/tmp'), getPaneStartCommand: vi.fn().mockResolvedValue(''), cleanupOrphanFifos: vi.fn(), BACKEND: 'tmux', })); -vi.mock('../../src/daemon/jsonl-watcher.js', () => ({ startWatching: vi.fn(), startWatchingFile: vi.fn(), stopWatching: vi.fn(), isWatching: vi.fn(() => false), findJsonlPathBySessionId: vi.fn() })); -vi.mock('../../src/daemon/codex-watcher.js', () => ({ startWatching: vi.fn(), startWatchingSpecificFile: vi.fn(), startWatchingById: vi.fn(), stopWatching: vi.fn(), isWatching: vi.fn(() => false), findRolloutPathByUuid: vi.fn(async () => null) })); -vi.mock('../../src/daemon/gemini-watcher.js', () => ({ startWatching: vi.fn(), startWatchingLatest: vi.fn(), stopWatching: vi.fn(), isWatching: vi.fn(() => false) })); -vi.mock('../../src/daemon/opencode-watcher.js', () => ({ startWatching: vi.fn(), stopWatching: vi.fn(), isWatching: vi.fn(() => false) })); +vi.mock('../../src/daemon/jsonl-watcher.js', () => ({ startWatching: vi.fn().mockResolvedValue(undefined), startWatchingFile: vi.fn().mockResolvedValue(undefined), stopWatching: vi.fn(), isWatching: vi.fn(() => false), findJsonlPathBySessionId: vi.fn(() => '/tmp/mock.jsonl') })); +vi.mock('../../src/daemon/codex-watcher.js', () => ({ startWatching: vi.fn().mockResolvedValue(undefined), startWatchingSpecificFile: vi.fn().mockResolvedValue(undefined), startWatchingById: vi.fn().mockResolvedValue(undefined), stopWatching: vi.fn(), isWatching: vi.fn(() => false), findRolloutPathByUuid: vi.fn(async () => null) })); +vi.mock('../../src/daemon/gemini-watcher.js', () => ({ startWatching: vi.fn().mockResolvedValue(undefined), startWatchingLatest: vi.fn().mockResolvedValue(undefined), stopWatching: vi.fn(), isWatching: vi.fn(() => false) })); +vi.mock('../../src/daemon/opencode-watcher.js', () => ({ startWatching: vi.fn().mockResolvedValue(undefined), stopWatching: vi.fn(), isWatching: vi.fn(() => false) })); vi.mock('../../src/agent/structured-session-bootstrap.js', () => ({ resolveStructuredSessionBootstrap: vi.fn(async (x) => x) })); vi.mock('../../src/agent/qwen-runtime-config.js', () => ({ getQwenRuntimeConfig: vi.fn(async () => null) })); +vi.mock('../../src/agent/sdk-runtime-config.js', () => ({ getClaudeSdkRuntimeConfig: vi.fn(async () => ({})) })); +vi.mock('../../src/agent/codex-runtime-config.js', () => ({ getCodexRuntimeConfig: vi.fn(async () => ({})) })); vi.mock('../../src/agent/provider-display.js', () => ({ getQwenDisplayMetadata: vi.fn(() => ({})) })); vi.mock('../../src/agent/provider-quota.js', () => ({ getQwenOAuthQuotaUsageLabel: vi.fn(() => '') })); vi.mock('../../src/agent/agent-version.js', () => ({ getAgentVersion: vi.fn(async () => 'test') })); @@ -129,7 +131,8 @@ vi.mock('../../src/repo/cache.js', () => ({ repoCache: { invalidate: vi.fn() } } vi.mock('../../src/agent/brain-dispatcher.js', () => ({ BrainDispatcher: vi.fn().mockImplementation(() => ({ start: vi.fn(), stop: vi.fn() })) })); import { connectProvider, disconnectAll } from '../../src/agent/provider-registry.js'; -import { getTransportRuntime, restoreTransportSessions } from '../../src/agent/session-manager.js'; +import { getTransportRuntime, launchTransportSession, relaunchSessionWithSettings, restoreTransportSessions, setSessionEventCallback } from '../../src/agent/session-manager.js'; +import { newSession } from '../../src/agent/tmux.js'; const flush = async () => { for (let i = 0; i < 4; i++) await new Promise((resolve) => setTimeout(resolve, 0)); @@ -140,6 +143,7 @@ describe('sdk transport session restore', () => { mocks.store.clear(); mocks.claudeRuns.length = 0; mocks.codexRuns.length = 0; + setSessionEventCallback(() => {}); }); afterEach(async () => { @@ -182,6 +186,7 @@ describe('sdk transport session restore', () => { expect(mocks.claudeRuns[0].options.resume).toBe('cc-session-restore'); expect(mocks.claudeRuns[0].options.model).toBe('sonnet'); expect(mocks.claudeRuns[0].options.effort).toBe('high'); + expect(mocks.store.get('deck_sdk_cc_brain')?.state).toBe('idle'); expect(mocks.store.get('deck_sdk_cc_brain')?.modelDisplay).toBe('claude-sonnet-4-6'); expect(mocks.store.get('deck_sdk_cc_brain')?.requestedModel).toBe('sonnet'); expect(mocks.store.get('deck_sdk_cc_brain')?.effort).toBe('high'); @@ -221,7 +226,116 @@ describe('sdk transport session restore', () => { expect(mocks.codexRuns).toHaveLength(1); expect(mocks.codexRuns[0]).toMatchObject({ mode: 'resume', id: 'codex-thread-restore' }); + expect(mocks.store.get('deck_sdk_cx_brain')?.state).toBe('idle'); expect(mocks.store.get('deck_sdk_cx_brain')?.requestedModel).toBe('gpt-5.4'); expect(mocks.store.get('deck_sdk_cx_brain')?.effort).toBe('medium'); }); + + it('emits started idle when launching a new transport session', async () => { + const onSessionEvent = vi.fn(); + setSessionEventCallback(onSessionEvent); + + await connectProvider('claude-code-sdk', {}); + await launchTransportSession({ + name: 'deck_sdk_new_brain', + projectName: 'sdknew', + role: 'brain', + agentType: 'claude-code-sdk', + projectDir: '/tmp/sdk-new', + requestedModel: 'sonnet', + effort: 'high', + }); + + expect(mocks.store.get('deck_sdk_new_brain')?.state).toBe('idle'); + expect(onSessionEvent).toHaveBeenCalledWith('started', 'deck_sdk_new_brain', 'idle'); + }); + + it('resumes Claude conversation when switching from cli to sdk', async () => { + const name = 'deck_switch_ccsdk_brain'; + const record = { + name, + projectName: 'switchccsdk', + role: 'brain', + agentType: 'claude-code', + projectDir: '/tmp/switch-ccsdk', + state: 'idle', + restarts: 0, + restartTimestamps: [], + createdAt: Date.now(), + updatedAt: Date.now(), + runtimeType: 'process', + ccSessionId: 'cc-session-switch', + }; + mocks.store.set(name, record); + + await connectProvider('claude-code-sdk', {}); + await relaunchSessionWithSettings(record as any, { agentType: 'claude-code-sdk' }); + + const runtime = getTransportRuntime(name); + expect(runtime).toBeDefined(); + expect(mocks.store.get(name)?.agentType).toBe('claude-code-sdk'); + expect(mocks.store.get(name)?.ccSessionId).toBe('cc-session-switch'); + + runtime!.send('What token did I ask you to remember?'); + await flush(); + + expect(mocks.claudeRuns.at(-1)?.options.resume).toBe('cc-session-switch'); + expect(mocks.claudeRuns.at(-1)?.options.sessionId).toBeUndefined(); + }); + + it('preserves Claude resume id when switching from sdk to cli', async () => { + const name = 'deck_switch_cccli_brain'; + const record = { + name, + projectName: 'switchcccli', + role: 'brain', + agentType: 'claude-code-sdk', + projectDir: '/tmp/switch-cccli', + state: 'idle', + restarts: 0, + restartTimestamps: [], + createdAt: Date.now(), + updatedAt: Date.now(), + runtimeType: 'transport', + providerId: 'claude-code-sdk', + providerSessionId: 'route-cc-switch', + ccSessionId: 'cc-session-switch', + }; + mocks.store.set(name, record); + + await connectProvider('claude-code-sdk', {}); + await relaunchSessionWithSettings(record as any, { agentType: 'claude-code' }); + + expect(mocks.store.get(name)?.agentType).toBe('claude-code'); + expect(mocks.store.get(name)?.ccSessionId).toBe('cc-session-switch'); + expect(String(vi.mocked(newSession).mock.calls.at(-1)?.[1] ?? '')).toContain('cc-session-switch'); + }); + + it('preserves Codex thread id when switching from sdk to cli', async () => { + const name = 'deck_switch_cxcli_brain'; + const record = { + name, + projectName: 'switchcxcli', + role: 'brain', + agentType: 'codex-sdk', + projectDir: '/tmp/switch-cxcli', + state: 'idle', + restarts: 0, + restartTimestamps: [], + createdAt: Date.now(), + updatedAt: Date.now(), + runtimeType: 'transport', + providerId: 'codex-sdk', + providerSessionId: 'route-cx-switch', + codexSessionId: 'codex-thread-switch', + }; + mocks.store.set(name, record); + + await connectProvider('codex-sdk', {}); + await relaunchSessionWithSettings(record as any, { agentType: 'codex' }); + + expect(mocks.store.get(name)?.agentType).toBe('codex'); + expect(mocks.store.get(name)?.codexSessionId).toBe('codex-thread-switch'); + expect(String(vi.mocked(newSession).mock.calls.at(-1)?.[1] ?? '')).toContain('codex-thread-switch'); + }); }); diff --git a/test/daemon/session-close.test.ts b/test/daemon/session-close.test.ts new file mode 100644 index 000000000..df67d9850 --- /dev/null +++ b/test/daemon/session-close.test.ts @@ -0,0 +1,185 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +const { getPanePidsMock, execFileMock } = vi.hoisted(() => ({ + getPanePidsMock: vi.fn().mockResolvedValue([]), + execFileMock: vi.fn(), +})); + +vi.mock('../../src/agent/tmux.js', () => ({ + getPanePids: getPanePidsMock, +})); + +vi.mock('node:child_process', () => ({ + execFile: (...args: unknown[]) => execFileMock(...args), +})); + +import { closeSingleSession, collectProjectCloseTargets, killSessionProcesses } from '../../src/agent/session-close.js'; +import type { SessionRecord } from '../../src/store/session-store.js'; + +function makeRecord(overrides: Partial = {}): SessionRecord { + return { + name: 'deck_proj_brain', + projectName: 'proj', + role: 'brain', + agentType: 'claude-code', + projectDir: '/proj', + state: 'running', + restarts: 0, + restartTimestamps: [], + createdAt: 1, + updatedAt: 1, + ...overrides, + }; +} + +describe('closeSingleSession', () => { + beforeEach(() => { + vi.clearAllMocks(); + execFileMock.mockImplementation((_file: string, _args: string[], _optsOrCb?: unknown, maybeCb?: unknown) => { + const cb = typeof _optsOrCb === 'function' + ? _optsOrCb as (err: Error | null, stdout?: string, stderr?: string) => void + : maybeCb as ((err: Error | null, stdout?: string, stderr?: string) => void) | undefined; + cb?.(null, '', ''); + }); + }); + + it('isolates stage failures and does not emit success when verification fails', async () => { + const callOrder: string[] = []; + const result = await closeSingleSession(makeRecord(), { + emitStopping: () => { callOrder.push('emitStopping'); }, + stopWatchers: () => { + callOrder.push('stopWatchers'); + throw new Error('watcher cleanup failed'); + }, + stopTransportRuntime: () => { callOrder.push('stopTransportRuntime'); }, + killProcessRuntime: () => { callOrder.push('killProcessRuntime'); }, + verifyClosed: () => { + callOrder.push('verifyClosed'); + throw new Error('tmux still alive'); + }, + emitSuccess: () => { callOrder.push('emitSuccess'); }, + persistSuccess: () => { callOrder.push('persistSuccess'); }, + emitFailure: () => { callOrder.push('emitFailure'); }, + persistFailure: () => { callOrder.push('persistFailure'); }, + }); + + expect(result.ok).toBe(false); + expect(result.closed).toEqual([]); + expect(result.failed).toEqual([ + { sessionName: 'deck_proj_brain', stage: 'watchers', message: 'watcher cleanup failed' }, + { sessionName: 'deck_proj_brain', stage: 'verify', message: 'tmux still alive' }, + ]); + expect(callOrder).toEqual([ + 'emitStopping', + 'stopWatchers', + 'killProcessRuntime', + 'verifyClosed', + 'emitFailure', + 'persistFailure', + ]); + }); + + it('does not emit success when persistence fails after close verification', async () => { + const callOrder: string[] = []; + const result = await closeSingleSession(makeRecord(), { + emitStopping: () => { callOrder.push('emitStopping'); }, + stopWatchers: () => { callOrder.push('stopWatchers'); }, + stopTransportRuntime: () => { callOrder.push('stopTransportRuntime'); }, + killProcessRuntime: () => { callOrder.push('killProcessRuntime'); }, + verifyClosed: () => { callOrder.push('verifyClosed'); }, + persistSuccess: () => { + callOrder.push('persistSuccess'); + throw new Error('db update failed'); + }, + emitSuccess: () => { callOrder.push('emitSuccess'); }, + emitFailure: () => { callOrder.push('emitFailure'); }, + persistFailure: () => { callOrder.push('persistFailure'); }, + }); + + expect(result.ok).toBe(false); + expect(result.closed).toEqual([]); + expect(result.failed).toEqual([ + { sessionName: 'deck_proj_brain', stage: 'persist', message: 'db update failed' }, + ]); + expect(callOrder).toEqual([ + 'emitStopping', + 'stopWatchers', + 'killProcessRuntime', + 'verifyClosed', + 'persistSuccess', + 'emitFailure', + 'persistFailure', + ]); + }); +}); + +describe('killSessionProcesses', () => { + beforeEach(() => { + vi.clearAllMocks(); + execFileMock.mockImplementation((_file: string, _args: string[], _optsOrCb?: unknown, maybeCb?: unknown) => { + const cb = typeof _optsOrCb === 'function' + ? _optsOrCb as (err: Error | null, stdout?: string, stderr?: string) => void + : maybeCb as ((err: Error | null, stdout?: string, stderr?: string) => void) | undefined; + cb?.(null, '', ''); + }); + }); + + it('uses taskkill on Windows instead of pkill/kill', async () => { + const origPlatform = process.platform; + Object.defineProperty(process, 'platform', { value: 'win32' }); + getPanePidsMock.mockResolvedValue(['4321']); + + try { + await killSessionProcesses('deck_proj_brain'); + } finally { + Object.defineProperty(process, 'platform', { value: origPlatform }); + } + + expect(execFileMock).toHaveBeenCalledTimes(1); + expect(execFileMock).toHaveBeenCalledWith( + 'taskkill', + ['/F', '/T', '/PID', '4321'], + { windowsHide: true }, + expect.any(Function), + ); + }); + + it('uses pkill and kill on non-Windows platforms', async () => { + getPanePidsMock.mockResolvedValue(['1234']); + + await killSessionProcesses('deck_proj_brain'); + + expect(execFileMock).toHaveBeenCalledTimes(2); + expect(execFileMock).toHaveBeenNthCalledWith( + 1, + 'pkill', + ['-9', '-P', '1234'], + expect.any(Function), + ); + expect(execFileMock).toHaveBeenNthCalledWith( + 2, + 'kill', + ['-9', '1234'], + expect.any(Function), + ); + }); +}); + +describe('collectProjectCloseTargets', () => { + it('returns descendants before parents for project shutdown', () => { + const targets = collectProjectCloseTargets('proj', [ + makeRecord({ name: 'deck_proj_brain' }), + makeRecord({ name: 'deck_proj_w1', role: 'w1' }), + makeRecord({ name: 'deck_sub_parent', projectName: 'deck_sub_parent', role: 'w1', parentSession: 'deck_proj_w1' }), + makeRecord({ name: 'deck_sub_child', projectName: 'deck_sub_child', role: 'w1', parentSession: 'deck_sub_parent' }), + makeRecord({ name: 'deck_other_brain', projectName: 'other' }), + ]); + + expect(targets.map((target) => target.name)).toEqual([ + 'deck_sub_child', + 'deck_sub_parent', + 'deck_proj_brain', + 'deck_proj_w1', + ]); + }); +}); diff --git a/test/daemon/session-manager-restore.test.ts b/test/daemon/session-manager-restore.test.ts index 8866ca28a..49a312550 100644 --- a/test/daemon/session-manager-restore.test.ts +++ b/test/daemon/session-manager-restore.test.ts @@ -7,14 +7,15 @@ * main session's JSONL file and emitting its events under the sub-session name. */ -import { describe, it, expect, vi } from 'vitest'; +import { describe, it, expect, vi, beforeEach } from 'vitest'; // ── All mocks hoisted so factories can reference them ───────────────────────── const { storeMock, tmuxListMock, startWatchingMock, startWatchingFileMock, - isWatchingMock, restartSessionMock, getPaneStartCommandMock, upsertSessionMock, discoverLatestOpenCodeSessionIdMock, - opencodeStartWatchingMock, opencodeIsWatchingMock, + isWatchingMock, restartSessionMock, getPaneStartCommandMock, upsertSessionMock, updateSessionStateMock, + discoverLatestOpenCodeSessionIdMock, opencodeStartWatchingMock, opencodeIsWatchingMock, + newSessionMock, timelineEmitMock, } = vi.hoisted(() => ({ storeMock: vi.fn(), tmuxListMock: vi.fn().mockResolvedValue(['deck_Cd_brain', 'deck_sub_5907196l']), @@ -24,20 +25,26 @@ const { restartSessionMock: vi.fn().mockResolvedValue(undefined), getPaneStartCommandMock: vi.fn().mockResolvedValue('claude --dangerously-skip-permissions'), upsertSessionMock: vi.fn(), + updateSessionStateMock: vi.fn(), discoverLatestOpenCodeSessionIdMock: vi.fn().mockResolvedValue(undefined), opencodeStartWatchingMock: vi.fn().mockResolvedValue(undefined), opencodeIsWatchingMock: vi.fn().mockReturnValue(false), + newSessionMock: vi.fn().mockResolvedValue(undefined), + timelineEmitMock: vi.fn(), })); vi.mock('../../src/store/session-store.js', () => ({ listSessions: storeMock, // session-manager imports `listSessions as storeSessions` upsertSession: upsertSessionMock, + updateSessionState: updateSessionStateMock, getSession: vi.fn(() => null), + removeSession: vi.fn(), })); vi.mock('../../src/agent/tmux.js', () => ({ + BACKEND: 'tmux', listSessions: tmuxListMock, - newSession: vi.fn().mockResolvedValue(undefined), + newSession: newSessionMock, killSession: vi.fn().mockResolvedValue(undefined), sessionExists: vi.fn().mockResolvedValue(true), isPaneAlive: vi.fn().mockResolvedValue(true), @@ -77,6 +84,7 @@ vi.mock('../../src/daemon/codex-watcher.js', () => ({ vi.mock('../../src/agent/detect.js', () => ({ detectStatus: vi.fn(() => 'idle'), + isTransportAgent: vi.fn(() => false), })); vi.mock('../../src/daemon/opencode-history.js', () => ({ @@ -90,7 +98,7 @@ vi.mock('../../src/daemon/opencode-watcher.js', () => ({ })); vi.mock('../../src/daemon/timeline-emitter.js', () => ({ - timelineEmitter: { emit: vi.fn(), on: vi.fn(() => () => {}), epoch: 0, replay: vi.fn(() => ({ events: [], truncated: false })) }, + timelineEmitter: { emit: timelineEmitMock, on: vi.fn(() => () => {}), epoch: 0, replay: vi.fn(() => ({ events: [], truncated: false })) }, })); vi.mock('../../src/daemon/timeline-store.js', () => ({ @@ -101,12 +109,24 @@ vi.mock('../../src/agent/brain-dispatcher.js', () => ({ BrainDispatcher: vi.fn().mockImplementation(() => ({ start: vi.fn(), stop: vi.fn() })), })); -import { restoreFromStore } from '../../src/agent/session-manager.js'; +import { restoreFromStore, restartSession, respawnSession, setSessionEventCallback } from '../../src/agent/session-manager.js'; import { startWatching, startWatchingFile } from '../../src/daemon/jsonl-watcher.js'; // ── Tests ───────────────────────────────────────────────────────────────────── describe('restoreFromStore — sub-session JSONL watcher regression', () => { + beforeEach(() => { + vi.clearAllMocks(); + setSessionEventCallback(() => {}); + tmuxListMock.mockResolvedValue(['deck_Cd_brain', 'deck_sub_5907196l']); + isWatchingMock.mockReturnValue(false); + getPaneStartCommandMock.mockResolvedValue('claude --dangerously-skip-permissions'); + discoverLatestOpenCodeSessionIdMock.mockResolvedValue(undefined); + opencodeStartWatchingMock.mockResolvedValue(undefined); + opencodeIsWatchingMock.mockReturnValue(false); + newSessionMock.mockResolvedValue(undefined); + }); + it('does NOT call startWatching for deck_sub_* sessions (prevents JSONL file stealing)', async () => { storeMock.mockReturnValue([ // Main brain session — has ccSessionId @@ -196,4 +216,150 @@ describe('restoreFromStore — sub-session JSONL watcher regression', () => { opencodeSessionId: 'oc-sub-sqlite-789', })); }); + + it('emits a session-scoped error when restartSession hits loop protection', async () => { + const now = Date.now(); + const result = await restartSession({ + name: 'deck_loop_brain', + projectName: 'loop', + role: 'brain', + agentType: 'shell', + projectDir: '/proj', + state: 'running', + restarts: 3, + restartTimestamps: [now - 60_000, now - 120_000, now - 180_000], + createdAt: now, + updatedAt: now, + }); + + expect(result).toBe(false); + expect(updateSessionStateMock).toHaveBeenCalledWith('deck_loop_brain', 'error'); + expect(timelineEmitMock).toHaveBeenCalledWith( + 'deck_loop_brain', + 'assistant.text', + expect.objectContaining({ + text: '⚠️ Error: Restart loop detected: more than 3 restarts within 5 minutes', + streaming: false, + }), + expect.any(Object), + ); + }); + + it('emits a session-scoped error when respawnSession hits loop protection', async () => { + const now = Date.now(); + const result = await respawnSession({ + name: 'deck_loop_w1', + projectName: 'loop', + role: 'w1', + agentType: 'shell', + projectDir: '/proj', + state: 'running', + restarts: 3, + restartTimestamps: [now - 60_000, now - 120_000, now - 180_000], + createdAt: now, + updatedAt: now, + }); + + expect(result).toBe(false); + expect(updateSessionStateMock).toHaveBeenCalledWith('deck_loop_w1', 'error'); + expect(timelineEmitMock).toHaveBeenCalledWith( + 'deck_loop_w1', + 'assistant.text', + expect.objectContaining({ + text: '⚠️ Error: Restart loop detected: more than 3 restarts within 5 minutes', + streaming: false, + }), + expect.any(Object), + ); + }); + + it('persists idle before restarting a missing session', async () => { + const now = Date.now(); + + await restartSession({ + name: 'deck_restart_brain', + projectName: 'restart', + role: 'brain', + agentType: 'shell', + projectDir: '/proj', + state: 'running', + restarts: 0, + restartTimestamps: [], + createdAt: now, + updatedAt: now, + }); + + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_restart_brain', + state: 'idle', + })); + }); + + it('persists idle before respawning a dead pane', async () => { + const now = Date.now(); + + await respawnSession({ + name: 'deck_respawn_w1', + projectName: 'respawn', + role: 'w1', + agentType: 'shell', + projectDir: '/proj', + state: 'running', + restarts: 0, + restartTimestamps: [], + createdAt: now, + updatedAt: now, + }); + + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_respawn_w1', + state: 'idle', + })); + }); + + it('preserves error-state failed-close records for live sessions during restore', async () => { + storeMock.mockReturnValue([ + { + name: 'deck_failedclose_brain', + projectName: 'failedclose', + role: 'brain', + agentType: 'shell', + projectDir: '/proj', + state: 'error', + restarts: 0, + restartTimestamps: [], + createdAt: 1000, + updatedAt: 2000, + }, + { + name: 'deck_sub_failedclose', + projectName: 'deck_sub_failedclose', + role: 'w1', + agentType: 'shell', + projectDir: '/proj', + state: 'error', + parentSession: 'deck_failedclose_brain', + restarts: 0, + restartTimestamps: [], + createdAt: 1000, + updatedAt: 2000, + }, + ]); + tmuxListMock.mockResolvedValue(['deck_failedclose_brain', 'deck_sub_failedclose']); + + await restoreFromStore(); + + expect(updateSessionStateMock).not.toHaveBeenCalledWith('deck_failedclose_brain', 'idle'); + expect(updateSessionStateMock).not.toHaveBeenCalledWith('deck_failedclose_brain', 'stopped'); + expect(updateSessionStateMock).not.toHaveBeenCalledWith('deck_sub_failedclose', 'idle'); + expect(updateSessionStateMock).not.toHaveBeenCalledWith('deck_sub_failedclose', 'stopped'); + expect(upsertSessionMock).not.toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_failedclose_brain', + state: 'stopped', + })); + expect(upsertSessionMock).not.toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_sub_failedclose', + state: 'stopped', + })); + }); }); diff --git a/test/daemon/session-manager-stop-project.test.ts b/test/daemon/session-manager-stop-project.test.ts index d78f075c0..7a9cc6c0f 100644 --- a/test/daemon/session-manager-stop-project.test.ts +++ b/test/daemon/session-manager-stop-project.test.ts @@ -3,23 +3,35 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; const { storeMock, killSessionMock, + sessionExistsMock, removeSessionMock, + upsertSessionMock, stopWatchingMock, stopCodexWatchingMock, stopGeminiWatchingMock, + stopOpenCodeWatchingMock, + repoInvalidateMock, + timelineEmitMock, + serverSendMock, } = vi.hoisted(() => ({ storeMock: vi.fn(), killSessionMock: vi.fn().mockResolvedValue(undefined), + sessionExistsMock: vi.fn().mockResolvedValue(false), removeSessionMock: vi.fn(), + upsertSessionMock: vi.fn(), stopWatchingMock: vi.fn(), stopCodexWatchingMock: vi.fn(), stopGeminiWatchingMock: vi.fn(), + stopOpenCodeWatchingMock: vi.fn(), + repoInvalidateMock: vi.fn(), + timelineEmitMock: vi.fn(), + serverSendMock: vi.fn(), })); vi.mock('../../src/agent/tmux.js', () => ({ newSession: vi.fn().mockResolvedValue(undefined), killSession: killSessionMock, - sessionExists: vi.fn().mockResolvedValue(false), + sessionExists: sessionExistsMock, isPaneAlive: vi.fn().mockResolvedValue(true), respawnPane: vi.fn().mockResolvedValue(undefined), listSessions: vi.fn().mockResolvedValue([]), @@ -35,7 +47,7 @@ vi.mock('../../src/agent/tmux.js', () => ({ vi.mock('../../src/store/session-store.js', () => ({ getSession: vi.fn(() => null), - upsertSession: vi.fn(), + upsertSession: upsertSessionMock, removeSession: removeSessionMock, listSessions: storeMock, updateSessionState: vi.fn(), @@ -67,8 +79,14 @@ vi.mock('../../src/daemon/gemini-watcher.js', () => ({ isWatching: vi.fn().mockReturnValue(false), })); +vi.mock('../../src/daemon/opencode-watcher.js', () => ({ + startWatching: vi.fn().mockResolvedValue(undefined), + stopWatching: stopOpenCodeWatchingMock, + isWatching: vi.fn().mockReturnValue(false), +})); + vi.mock('../../src/repo/cache.js', () => ({ - repoCache: { invalidate: vi.fn() }, + repoCache: { invalidate: repoInvalidateMock }, })); vi.mock('../../src/agent/signal.js', () => ({ @@ -85,7 +103,7 @@ vi.mock('../../src/agent/provider-registry.js', () => ({ })); vi.mock('../../src/daemon/timeline-emitter.js', () => ({ - timelineEmitter: { emit: vi.fn(), on: vi.fn(() => () => {}), epoch: 0, replay: vi.fn(() => ({ events: [], truncated: false })) }, + timelineEmitter: { emit: timelineEmitMock, on: vi.fn(() => () => {}), epoch: 0, replay: vi.fn(() => ({ events: [], truncated: false })) }, })); vi.mock('../../src/agent/transport-session-runtime.js', () => ({ @@ -106,20 +124,34 @@ import { stopProject } from '../../src/agent/session-manager.js'; describe('stopProject', () => { beforeEach(() => { vi.clearAllMocks(); + sessionExistsMock.mockResolvedValue(false); + serverSendMock.mockImplementation(() => undefined); }); it('stops project sessions and nested sub-sessions recursively', async () => { storeMock.mockReturnValue([ - { name: 'deck_recon_brain', projectName: 'recon', projectDir: '/proj', state: 'running' }, - { name: 'deck_recon_w10', projectName: 'recon', projectDir: '/proj', state: 'idle' }, - { name: 'deck_sub_root', projectName: 'deck_sub_root', projectDir: '/proj', state: 'running', parentSession: 'deck_recon_w10' }, - { name: 'deck_sub_nested', projectName: 'deck_sub_nested', projectDir: '/proj', state: 'running', parentSession: 'deck_sub_root' }, - { name: 'deck_other_brain', projectName: 'other', projectDir: '/other', state: 'running' }, - { name: 'deck_sub_other', projectName: 'deck_sub_other', projectDir: '/other', state: 'running', parentSession: 'deck_other_brain' }, + { name: 'deck_recon_brain', projectName: 'recon', projectDir: '/proj', role: 'brain', agentType: 'claude-code', state: 'running', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_recon_w10', projectName: 'recon', projectDir: '/proj', role: 'w10', agentType: 'codex', state: 'idle', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_root', projectName: 'deck_sub_root', projectDir: '/proj', role: 'w1', agentType: 'claude-code', state: 'running', parentSession: 'deck_recon_w10', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_nested', projectName: 'deck_sub_nested', projectDir: '/proj', role: 'w1', agentType: 'claude-code', state: 'running', parentSession: 'deck_sub_root', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_other_brain', projectName: 'other', projectDir: '/other', role: 'brain', agentType: 'claude-code', state: 'running', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_other', projectName: 'deck_sub_other', projectDir: '/other', role: 'w1', agentType: 'claude-code', state: 'running', parentSession: 'deck_other_brain', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, ]); - await stopProject('recon'); + const result = await stopProject('recon', { send: serverSendMock }); + expect(result).toEqual({ + ok: true, + closed: ['deck_sub_nested', 'deck_sub_root', 'deck_recon_brain', 'deck_recon_w10'], + failed: [], + }); + + expect(killSessionMock.mock.calls.map((call) => call[0])).toEqual([ + 'deck_sub_nested', + 'deck_sub_root', + 'deck_recon_brain', + 'deck_recon_w10', + ]); expect(killSessionMock).toHaveBeenCalledWith('deck_recon_brain'); expect(killSessionMock).toHaveBeenCalledWith('deck_recon_w10'); expect(killSessionMock).toHaveBeenCalledWith('deck_sub_root'); @@ -133,5 +165,109 @@ describe('stopProject', () => { expect(removeSessionMock).toHaveBeenCalledWith('deck_sub_nested'); expect(removeSessionMock).not.toHaveBeenCalledWith('deck_other_brain'); expect(removeSessionMock).not.toHaveBeenCalledWith('deck_sub_other'); + + expect(serverSendMock).toHaveBeenCalledWith({ type: 'subsession.closed', id: 'nested', sessionName: 'deck_sub_nested' }); + expect(serverSendMock).toHaveBeenCalledWith({ type: 'subsession.closed', id: 'root', sessionName: 'deck_sub_root' }); + expect(repoInvalidateMock).toHaveBeenCalledTimes(1); + }); + + it('retains failed descendants for retry and does not emit false-success close events', async () => { + storeMock.mockReturnValue([ + { name: 'deck_recon_brain', projectName: 'recon', projectDir: '/proj', role: 'brain', agentType: 'claude-code', state: 'running', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_root', projectName: 'deck_sub_root', projectDir: '/proj', role: 'w1', agentType: 'claude-code', state: 'running', parentSession: 'deck_recon_brain', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + ]); + sessionExistsMock.mockImplementation(async (sessionName: string) => sessionName === 'deck_sub_root'); + + const result = await stopProject('recon', { send: serverSendMock }); + + expect(result.ok).toBe(false); + expect(result.closed).toEqual(['deck_recon_brain']); + expect(result.failed).toEqual( + expect.arrayContaining([ + expect.objectContaining({ sessionName: 'deck_sub_root', stage: 'verify' }), + ]), + ); + expect(removeSessionMock).not.toHaveBeenCalledWith('deck_sub_root'); + expect(removeSessionMock).toHaveBeenCalledWith('deck_recon_brain'); + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_sub_root', + state: 'error', + parentSession: 'deck_recon_brain', + })); + expect(serverSendMock).not.toHaveBeenCalledWith(expect.objectContaining({ + type: 'subsession.closed', + sessionName: 'deck_sub_root', + })); + expect(timelineEmitMock).not.toHaveBeenCalledWith('deck_sub_root', 'session.state', { state: 'stopped' }); + expect(serverSendMock).not.toHaveBeenCalledWith(expect.objectContaining({ + type: 'subsession.closed', + id: 'root', + })); + }); + + it('does not emit stopped or remove a descendant when subsession.closed persistence fails', async () => { + storeMock.mockReturnValue([ + { name: 'deck_recon_brain', projectName: 'recon', projectDir: '/proj', role: 'brain', agentType: 'claude-code', state: 'running', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_root', projectName: 'deck_sub_root', projectDir: '/proj', role: 'w1', agentType: 'claude-code', state: 'running', parentSession: 'deck_recon_brain', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + ]); + serverSendMock.mockImplementation((msg: { type?: string; sessionName?: string }) => { + if (msg.type === 'subsession.closed' && msg.sessionName === 'deck_sub_root') { + throw new Error('bridge offline'); + } + }); + + const result = await stopProject('recon', { send: serverSendMock }); + + expect(result.ok).toBe(false); + expect(result.failed).toEqual( + expect.arrayContaining([ + expect.objectContaining({ sessionName: 'deck_sub_root', stage: 'persist', message: 'bridge offline' }), + ]), + ); + expect(removeSessionMock).not.toHaveBeenCalledWith('deck_sub_root'); + expect(timelineEmitMock).not.toHaveBeenCalledWith('deck_sub_root', 'session.state', { state: 'stopped' }); + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_sub_root', + state: 'error', + })); + }); + + it('retains failed descendants for a later retry that can complete successfully', async () => { + storeMock + .mockReturnValueOnce([ + { name: 'deck_recon_brain', projectName: 'recon', projectDir: '/proj', role: 'brain', agentType: 'claude-code', state: 'running', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_root', projectName: 'deck_sub_root', projectDir: '/proj', role: 'w1', agentType: 'claude-code', state: 'running', parentSession: 'deck_recon_brain', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + ]) + .mockReturnValueOnce([ + { name: 'deck_recon_brain', projectName: 'recon', projectDir: '/proj', role: 'brain', agentType: 'claude-code', state: 'running', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 1 }, + { name: 'deck_sub_root', projectName: 'deck_sub_root', projectDir: '/proj', role: 'w1', agentType: 'claude-code', state: 'error', parentSession: 'deck_recon_brain', restarts: 0, restartTimestamps: [], createdAt: 1, updatedAt: 2 }, + ]); + sessionExistsMock + .mockResolvedValueOnce(true) + .mockResolvedValueOnce(false) + .mockResolvedValueOnce(false) + .mockResolvedValueOnce(false); + + const first = await stopProject('recon', { send: serverSendMock }); + const second = await stopProject('recon', { send: serverSendMock }); + + expect(first.ok).toBe(false); + expect(first.failed).toEqual( + expect.arrayContaining([ + expect.objectContaining({ sessionName: 'deck_sub_root', stage: 'verify' }), + ]), + ); + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_sub_root', + state: 'error', + })); + + expect(second).toEqual({ + ok: true, + closed: ['deck_sub_root', 'deck_recon_brain'], + failed: [], + }); + expect(removeSessionMock).toHaveBeenCalledWith('deck_sub_root'); + expect(serverSendMock).toHaveBeenCalledWith({ type: 'subsession.closed', id: 'root', sessionName: 'deck_sub_root' }); }); }); diff --git a/test/daemon/session-restoration.test.ts b/test/daemon/session-restoration.test.ts index 6ec1fcf17..6dceef54d 100644 --- a/test/daemon/session-restoration.test.ts +++ b/test/daemon/session-restoration.test.ts @@ -113,13 +113,14 @@ vi.mock('../../src/daemon/opencode-watcher.js', () => ({ isWatching: mocks.openCodeIsWatching, })); -import { restoreFromStore } from '../../src/agent/session-manager.js'; +import { restoreFromStore, setSessionEventCallback } from '../../src/agent/session-manager.js'; // ── Tests ───────────────────────────────────────────────────────────────────── describe('Session Restoration (all agents)', () => { beforeEach(() => { vi.clearAllMocks(); + setSessionEventCallback(() => {}); }); it('restores Gemini watcher for live sessions', async () => { @@ -269,4 +270,20 @@ describe('Session Restoration (all agents)', () => { opencodeSessionId: 'oc-main-sqlite-123', })); }); + + it('discovers orphan tmux sessions as idle until live state is observed', async () => { + const onSessionEvent = vi.fn(); + setSessionEventCallback(onSessionEvent); + mocks.storeListSessions.mockReturnValue([]); + mocks.tmuxListSessions.mockResolvedValue(['deck_orphan_brain']); + mocks.getPaneStartCommand.mockResolvedValueOnce('codex'); + + await restoreFromStore(); + + expect(mocks.storeUpsertSession).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_orphan_brain', + state: 'idle', + })); + expect(onSessionEvent).toHaveBeenCalledWith('started', 'deck_orphan_brain', 'idle'); + }); }); diff --git a/test/daemon/session-type-switch.test.ts b/test/daemon/session-type-switch.test.ts new file mode 100644 index 000000000..60b7df0ee --- /dev/null +++ b/test/daemon/session-type-switch.test.ts @@ -0,0 +1,36 @@ +import { describe, expect, it } from 'vitest'; +import { getCompatibleSessionIds } from '../../src/agent/session-manager.js'; + +describe('getCompatibleSessionIds', () => { + it('keeps Claude session ids stable across cli/sdk switches', () => { + const record = { + ccSessionId: 'cc-session-123', + codexSessionId: 'codex-thread-999', + geminiSessionId: 'gem-1', + opencodeSessionId: 'oc-1', + }; + + expect(getCompatibleSessionIds(record, 'claude-code')).toEqual({ + ccSessionId: 'cc-session-123', + }); + expect(getCompatibleSessionIds(record, 'claude-code-sdk')).toEqual({ + ccSessionId: 'cc-session-123', + }); + }); + + it('keeps Codex session ids stable across cli/sdk switches', () => { + const record = { + ccSessionId: 'cc-session-123', + codexSessionId: 'codex-thread-999', + geminiSessionId: 'gem-1', + opencodeSessionId: 'oc-1', + }; + + expect(getCompatibleSessionIds(record, 'codex')).toEqual({ + codexSessionId: 'codex-thread-999', + }); + expect(getCompatibleSessionIds(record, 'codex-sdk')).toEqual({ + codexSessionId: 'codex-thread-999', + }); + }); +}); diff --git a/test/daemon/subsession-manager.test.ts b/test/daemon/subsession-manager.test.ts index 5b2f5590a..0835f978e 100644 --- a/test/daemon/subsession-manager.test.ts +++ b/test/daemon/subsession-manager.test.ts @@ -8,8 +8,11 @@ const { capturePaneMock, timelineReadMock, geminiStartWatchingMock, geminiIsWatchingMock, codexStartWatchingByIdMock, codexIsWatchingMock, codexIsFileClaimedMock, + jsonlStopWatchingMock, codexStopWatchingMock, geminiStopWatchingMock, opencodeStopWatchingMock, + killSessionMock, timelineEmitMock, emitSessionInlineErrorMock, removeSessionMock, resolveGeminiSessionIdMock, injectGeminiMemoryMock, launchTransportSessionMock, getTransportRuntimeMock, + getAgentVersionMock, } = vi.hoisted(() => ({ upsertSessionMock: vi.fn(), startWatchingMock: vi.fn().mockResolvedValue(undefined), @@ -26,11 +29,19 @@ const { codexStartWatchingByIdMock: vi.fn().mockResolvedValue(undefined), codexIsWatchingMock: vi.fn().mockReturnValue(false), codexIsFileClaimedMock: vi.fn().mockReturnValue(false), + jsonlStopWatchingMock: vi.fn(), + codexStopWatchingMock: vi.fn(), + geminiStopWatchingMock: vi.fn(), + opencodeStopWatchingMock: vi.fn(), + killSessionMock: vi.fn().mockResolvedValue(undefined), + timelineEmitMock: vi.fn(), + emitSessionInlineErrorMock: vi.fn(), removeSessionMock: vi.fn(), resolveGeminiSessionIdMock: vi.fn().mockResolvedValue('resolved-gemini-uuid'), injectGeminiMemoryMock: vi.fn().mockResolvedValue(undefined), launchTransportSessionMock: vi.fn().mockResolvedValue(undefined), getTransportRuntimeMock: vi.fn().mockReturnValue(null), + getAgentVersionMock: vi.fn().mockResolvedValue(undefined), })); vi.mock('../../src/store/session-store.js', () => ({ @@ -42,7 +53,7 @@ vi.mock('../../src/store/session-store.js', () => ({ vi.mock('../../src/daemon/jsonl-watcher.js', () => ({ startWatchingFile: startWatchingFileMock, startWatching: startWatchingMock, - stopWatching: vi.fn(), + stopWatching: jsonlStopWatchingMock, isWatching: isWatchingMock, preClaimFile: vi.fn(), claudeProjectDir: (dir: string) => `/mock-claude-projects/${dir.replace(/\//g, '-')}`, @@ -55,7 +66,7 @@ vi.mock('../../src/daemon/codex-watcher.js', () => ({ startWatching: vi.fn().mockResolvedValue(undefined), startWatchingSpecificFile: vi.fn().mockResolvedValue(undefined), startWatchingById: codexStartWatchingByIdMock, - stopWatching: vi.fn(), + stopWatching: codexStopWatchingMock, isWatching: codexIsWatchingMock, isFileClaimedByOther: codexIsFileClaimedMock, findRolloutPathByUuid: vi.fn().mockResolvedValue(null), @@ -64,16 +75,23 @@ vi.mock('../../src/daemon/codex-watcher.js', () => ({ vi.mock('../../src/daemon/gemini-watcher.js', () => ({ startWatching: geminiStartWatchingMock, startWatchingDiscovered: vi.fn().mockResolvedValue(undefined), - stopWatching: vi.fn(), + stopWatching: geminiStopWatchingMock, isWatching: geminiIsWatchingMock, })); +vi.mock('../../src/daemon/opencode-watcher.js', () => ({ + startWatching: vi.fn().mockResolvedValue(undefined), + stopWatching: opencodeStopWatchingMock, + isWatching: vi.fn().mockReturnValue(false), +})); + vi.mock('../../src/agent/tmux.js', () => ({ newSession: newSessionMock, - killSession: vi.fn().mockResolvedValue(undefined), + killSession: killSessionMock, sessionExists: sessionExistsMock, capturePane: capturePaneMock, sendKey: vi.fn().mockResolvedValue(undefined), + getPanePids: vi.fn().mockResolvedValue([]), })); vi.mock('../../src/agent/session-manager.js', () => ({ @@ -82,6 +100,10 @@ vi.mock('../../src/agent/session-manager.js', () => ({ getTransportRuntime: getTransportRuntimeMock, })); +vi.mock('../../src/agent/agent-version.js', () => ({ + getAgentVersion: getAgentVersionMock, +})); + vi.mock('../../src/agent/drivers/gemini.js', () => ({ GeminiDriver: vi.fn().mockImplementation(() => ({ resolveSessionId: resolveGeminiSessionIdMock, @@ -96,7 +118,15 @@ vi.mock('../../src/daemon/timeline-store.js', () => ({ timelineStore: { read: timelineReadMock, append: vi.fn() }, })); -import { subSessionName, detectShells, startSubSession, rebuildSubSessions, readSubSessionResponse, normalizeShellBinForHost } from '../../src/daemon/subsession-manager.js'; +vi.mock('../../src/daemon/timeline-emitter.js', () => ({ + timelineEmitter: { emit: timelineEmitMock, on: vi.fn(() => () => {}), epoch: 0, replay: vi.fn(() => ({ events: [], truncated: false })) }, +})); + +vi.mock('../../src/daemon/session-error.js', () => ({ + emitSessionInlineError: emitSessionInlineErrorMock, +})); + +import { subSessionName, detectShells, startSubSession, stopSubSession, rebuildSubSessions, readSubSessionResponse, normalizeShellBinForHost } from '../../src/daemon/subsession-manager.js'; import { upsertSession } from '../../src/store/session-store.js'; import { startWatchingFile, startWatching } from '../../src/daemon/jsonl-watcher.js'; @@ -375,6 +405,111 @@ describe('SAFE_SESSION_NAME_RE — session name validation', () => { }); }); +describe('stopSubSession()', () => { + beforeEach(() => { + vi.clearAllMocks(); + sessionExistsMock.mockResolvedValue(false); + getTransportRuntimeMock.mockReturnValue(null); + getSessionMock.mockReturnValue({ + name: 'deck_sub_worker', + projectName: 'deck_sub_worker', + role: 'w1', + agentType: 'claude-code', + projectDir: '/proj', + state: 'running', + restarts: 0, + restartTimestamps: [], + createdAt: 1, + updatedAt: 1, + parentSession: 'deck_proj_brain', + }); + }); + + it('emits stopping before stopped and only announces subsession.closed after verified success', async () => { + const serverLink = { send: vi.fn() }; + + const result = await stopSubSession('deck_sub_worker', serverLink); + + expect(result).toEqual({ + ok: true, + closed: ['deck_sub_worker'], + failed: [], + }); + expect(jsonlStopWatchingMock).toHaveBeenCalledWith('deck_sub_worker'); + expect(codexStopWatchingMock).toHaveBeenCalledWith('deck_sub_worker'); + expect(geminiStopWatchingMock).toHaveBeenCalledWith('deck_sub_worker'); + expect(opencodeStopWatchingMock).toHaveBeenCalledWith('deck_sub_worker'); + expect(killSessionMock).toHaveBeenCalledWith('deck_sub_worker'); + expect(timelineEmitMock.mock.calls[0]).toEqual(['deck_sub_worker', 'session.state', { state: 'stopping' }]); + expect(timelineEmitMock).toHaveBeenCalledWith('deck_sub_worker', 'session.state', { state: 'stopped' }); + expect(removeSessionMock).toHaveBeenCalledWith('deck_sub_worker'); + expect(serverLink.send).toHaveBeenCalledWith({ + type: 'subsession.closed', + id: 'worker', + sessionName: 'deck_sub_worker', + }); + }); + + it('keeps authoritative state and emits error instead of stopped when close verification fails', async () => { + const serverLink = { send: vi.fn() }; + sessionExistsMock.mockResolvedValue(true); + + const result = await stopSubSession('deck_sub_worker', serverLink); + + expect(result.ok).toBe(false); + expect(result.closed).toEqual([]); + expect(result.failed).toEqual( + expect.arrayContaining([ + expect.objectContaining({ sessionName: 'deck_sub_worker', stage: 'verify' }), + ]), + ); + expect(timelineEmitMock.mock.calls[0]).toEqual(['deck_sub_worker', 'session.state', { state: 'stopping' }]); + expect(timelineEmitMock).toHaveBeenCalledWith( + 'deck_sub_worker', + 'session.state', + expect.objectContaining({ state: 'error' }), + ); + expect(timelineEmitMock).not.toHaveBeenCalledWith('deck_sub_worker', 'session.state', { state: 'stopped' }); + expect(emitSessionInlineErrorMock).toHaveBeenCalledWith( + 'deck_sub_worker', + 'Sub-session close failed during verify: session still exists after kill', + ); + expect(removeSessionMock).not.toHaveBeenCalledWith('deck_sub_worker'); + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_sub_worker', + state: 'error', + })); + expect(serverLink.send).not.toHaveBeenCalledWith(expect.objectContaining({ type: 'subsession.closed' })); + }); + + it('does not emit stopped or remove the sub-session when subsession.closed persistence fails', async () => { + const serverLink = { + send: vi.fn((msg: { type?: string }) => { + if (msg.type === 'subsession.closed') throw new Error('bridge offline'); + }), + }; + + const result = await stopSubSession('deck_sub_worker', serverLink); + + expect(result.ok).toBe(false); + expect(result.failed).toEqual( + expect.arrayContaining([ + expect.objectContaining({ sessionName: 'deck_sub_worker', stage: 'persist', message: 'bridge offline' }), + ]), + ); + expect(removeSessionMock).not.toHaveBeenCalledWith('deck_sub_worker'); + expect(timelineEmitMock).not.toHaveBeenCalledWith('deck_sub_worker', 'session.state', { state: 'stopped' }); + expect(upsertSessionMock).toHaveBeenCalledWith(expect.objectContaining({ + name: 'deck_sub_worker', + state: 'error', + })); + expect(emitSessionInlineErrorMock).toHaveBeenCalledWith( + 'deck_sub_worker', + 'Sub-session close failed during persist: bridge offline', + ); + }); +}); + describe('readSubSessionResponse()', () => { beforeEach(() => { vi.clearAllMocks(); @@ -453,7 +588,7 @@ describe('rebuildSubSessions — geminiSessionId preserved', () => { // upsertSession must include the stored geminiSessionId expect(upsertSessionMock).toHaveBeenCalledWith( - expect.objectContaining({ geminiSessionId: 'stored-uuid-1234' }), + expect.objectContaining({ geminiSessionId: 'stored-uuid-1234', state: 'idle' }), ); }); @@ -476,7 +611,7 @@ describe('rebuildSubSessions — geminiSessionId preserved', () => { }]); expect(upsertSessionMock).toHaveBeenCalledWith( - expect.objectContaining({ geminiSessionId: 'new-uuid-from-server' }), + expect.objectContaining({ geminiSessionId: 'new-uuid-from-server', state: 'idle' }), ); }); @@ -501,6 +636,9 @@ describe('rebuildSubSessions — geminiSessionId preserved', () => { 'deck_sub_rebuild3', 'fallback-uuid', ); + expect(upsertSessionMock).toHaveBeenCalledWith( + expect.objectContaining({ geminiSessionId: 'fallback-uuid', state: 'idle' }), + ); }); it('does NOT lose geminiSessionId when store has it (regression)', async () => { diff --git a/test/daemon/terminal-streamer-snapshot.test.ts b/test/daemon/terminal-streamer-snapshot.test.ts index b8cb57e7e..f8cc6fb5b 100644 --- a/test/daemon/terminal-streamer-snapshot.test.ts +++ b/test/daemon/terminal-streamer-snapshot.test.ts @@ -18,7 +18,8 @@ vi.mock('../../src/store/session-store.js', () => ({ upsertSession: vi.fn(), })); -import { capturePaneVisible, capturePaneHistory, getPaneSize, startPipePaneStream } from '../../src/agent/tmux.js'; +import { capturePaneVisible, capturePaneHistory, getPaneId, getPaneSize, startPipePaneStream, sessionExists } from '../../src/agent/tmux.js'; +import { getSession } from '../../src/store/session-store.js'; import { TerminalStreamer } from '../../src/daemon/terminal-streamer.js'; import { TimelineEmitter } from '../../src/daemon/timeline-emitter.js'; @@ -28,8 +29,11 @@ import { timelineEmitter } from '../../src/daemon/timeline-emitter.js'; const mockCapture = capturePaneVisible as ReturnType; const mockHistory = capturePaneHistory as ReturnType; +const mockGetPaneId = getPaneId as ReturnType; const mockSize = getPaneSize as ReturnType; const mockStartPipe = startPipePaneStream as ReturnType; +const mockSessionExists = sessionExists as ReturnType; +const mockGetSession = getSession as ReturnType; /** Flush all pending timers + microtasks so the capture loop runs. */ const flush = () => vi.advanceTimersByTimeAsync(200); @@ -45,6 +49,9 @@ describe('TerminalStreamer — snapshot behavior', () => { mockSize.mockResolvedValue({ cols: 80, rows: 4 }); mockCapture.mockResolvedValue('line0\nline1\nline2\nline3'); mockHistory.mockResolvedValue(''); + mockGetPaneId.mockResolvedValue('%1'); + mockSessionExists.mockResolvedValue(true); + mockGetSession.mockReturnValue({ paneId: '%1' }); // Mock startPipePaneStream to return a no-op stream (never emits data) const noopStream = { on: vi.fn(), destroy: vi.fn() }; @@ -166,4 +173,51 @@ describe('TerminalStreamer — snapshot behavior', () => { // No new diffs after unsubscribe expect(received.length).toBe(countAfterFirst); }); + + it('emits a session-scoped error after pipe-pane rebind retries are exhausted', async () => { + const session = 'broken-stream-session'; + mockStartPipe.mockRejectedValue(new Error('cat spawn failed')); + + streamer.subscribe({ + sessionName: session, + send: () => {}, + onError: () => {}, + }); + + await flush(); + await vi.advanceTimersByTimeAsync(61_000); + + expect(emitSpy).toHaveBeenCalledWith( + session, + 'assistant.text', + expect.objectContaining({ + text: '⚠️ Error: Terminal stream unavailable after max retries', + streaming: false, + }), + expect.any(Object), + ); + }); + + it('suppresses pane-id inline errors for transport sessions', async () => { + const session = 'deck_sub_qwen'; + mockGetSession.mockReturnValue({ agentType: 'qwen', runtimeType: 'transport' }); + mockGetPaneId.mockResolvedValue(undefined); + + streamer.subscribe({ + sessionName: session, + send: () => {}, + onError: () => {}, + }); + + await flush(); + + expect(emitSpy).not.toHaveBeenCalledWith( + session, + 'assistant.text', + expect.objectContaining({ + text: '⚠️ Error: Terminal stream unavailable: pane id not available. Restart the session to fix.', + }), + expect.any(Object), + ); + }); }); diff --git a/test/daemon/timeline-emitter.test.ts b/test/daemon/timeline-emitter.test.ts index bc28e4eef..f60cabf4c 100644 --- a/test/daemon/timeline-emitter.test.ts +++ b/test/daemon/timeline-emitter.test.ts @@ -70,6 +70,25 @@ describe('TimelineEmitter — seq counter', () => { expect(timelineStore.append).toHaveBeenCalledTimes(2); }); + it('preserves repeated user messages when allowDuplicate is set', () => { + emitter.emit('session-a', 'user.message', { text: 'retry', allowDuplicate: true }, { ts: 10 }); + emitter.emit('session-a', 'user.message', { text: 'retry', allowDuplicate: true }, { ts: 20 }); + + const { events } = emitter.replay('session-a', 0); + expect(events).toHaveLength(2); + expect(events[0]?.payload.text).toBe('retry'); + expect(events[1]?.payload.text).toBe('retry'); + }); + + it('still suppresses duplicate user messages without allowDuplicate', () => { + emitter.emit('session-a', 'user.message', { text: 'retry' }, { ts: 10 }); + emitter.emit('session-a', 'user.message', { text: 'retry' }, { ts: 20 }); + + const { events } = emitter.replay('session-a', 0); + expect(events).toHaveLength(1); + expect(events[0]?.payload.text).toBe('retry'); + }); + it('does not let a stale streaming update overwrite a newer final event with the same eventId', () => { emitter.emit('session-a', 'assistant.text', { text: 'partial', streaming: true }, { eventId: 'transport:session-a:msg-1', ts: 10 }); emitter.emit('session-a', 'assistant.text', { text: 'final', streaming: false }, { eventId: 'transport:session-a:msg-1', ts: 20 }); diff --git a/test/daemon/transport-session-runtime.test.ts b/test/daemon/transport-session-runtime.test.ts index 1d20ed7ce..46c88d9bd 100644 --- a/test/daemon/transport-session-runtime.test.ts +++ b/test/daemon/transport-session-runtime.test.ts @@ -98,7 +98,7 @@ describe('TransportSessionRuntime', () => { expect(runtime.sending).toBe(false); }); - it('cancel() delegates to provider.cancel and clears pending', () => { + it('cancel() delegates to provider.cancel and preserves pending', () => { runtime.send('first'); runtime.send('queued1'); runtime.send('queued2'); @@ -106,6 +106,19 @@ describe('TransportSessionRuntime', () => { runtime.cancel(); expect(mock.provider.cancel).toHaveBeenCalledWith('sess-1'); + expect(runtime.pendingCount).toBe(2); + }); + + it('cancelled turns drain pending messages into the next turn', () => { + runtime.send('first'); + runtime.send('queued1'); + runtime.send('queued2'); + + runtime.cancel(); + mock.fireError('sess-1', { code: 'CANCELLED', message: 'cancelled', recoverable: true }); + + expect(mock.provider.send).toHaveBeenCalledTimes(2); + expect(mock.provider.send).toHaveBeenNthCalledWith(2, 'sess-1', 'queued1\n\nqueued2', undefined, undefined); expect(runtime.pendingCount).toBe(0); }); diff --git a/test/daemon/transport-status-lifecycle.test.ts b/test/daemon/transport-status-lifecycle.test.ts index 9070f1e5c..0d14de2f1 100644 --- a/test/daemon/transport-status-lifecycle.test.ts +++ b/test/daemon/transport-status-lifecycle.test.ts @@ -200,18 +200,18 @@ describe('batched queuing', () => { expect(runtime.getStatus()).toBe('thinking'); }); - it('on cancel, pending messages are cleared (not drained)', () => { + it('on cancel, pending messages are drained into the next turn', () => { runtime.send('first'); - runtime.send('will-be-dropped'); - runtime.send('also-dropped'); + runtime.send('still-queued'); + runtime.send('send-after-stop'); runtime.cancel(); mock.fireCancelled('sess-1'); - // Cancel clears pending → no drain, no second send - expect(mock.provider.send).toHaveBeenCalledTimes(1); + expect(mock.provider.send).toHaveBeenCalledTimes(2); + expect(mock.provider.send).toHaveBeenNthCalledWith(2, 'sess-1', 'still-queued\n\nsend-after-stop', undefined, undefined); expect(runtime.pendingCount).toBe(0); - expect(runtime.getStatus()).toBe('idle'); + expect(runtime.getStatus()).toBe('thinking'); }); it('multiple turns with queuing: correct history order', () => { diff --git a/test/e2e/daemon-reconnect.test.ts b/test/e2e/daemon-reconnect.test.ts index d458e3763..8fc0698e3 100644 --- a/test/e2e/daemon-reconnect.test.ts +++ b/test/e2e/daemon-reconnect.test.ts @@ -132,7 +132,7 @@ describe.skipIf(SKIP)('Daemon reconnect resilience (e2e)', () => { // Session should still be there with the same pane expect(await sessionExists(name)).toBe(true); expect(await isPaneAlive(name)).toBe(true); - }); + }, 45_000); // ── 2. Dead pane is detected and session marked on restore ─────────────── diff --git a/test/e2e/main-session-shutdown.test.ts b/test/e2e/main-session-shutdown.test.ts new file mode 100644 index 000000000..412dd82ad --- /dev/null +++ b/test/e2e/main-session-shutdown.test.ts @@ -0,0 +1,136 @@ +/** + * E2E for daemon-owned main-session shutdown. + * + * Uses: + * - real tmux sessions + * - real session store + * - real stopProject()/restoreFromStore() flow + * + * Verifies: + * - project stop closes the full session tree + * - the daemon process remains usable after shutdown + * - descendants are not resurrected after store reload + restore + * + * Requires tmux. Skip with SKIP_TMUX_TESTS=1. + */ +import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest'; +import { tmpdir } from 'os'; +import { join } from 'path'; +import { mkdirSync } from 'fs'; +import { newSession, killSession, sessionExists, capturePane, listSessions } from '../../src/agent/tmux.js'; +import { loadStore, flushStore, upsertSession, getSession, removeSession, listSessions as storeSessions } from '../../src/store/session-store.js'; +import { stopProject, restoreFromStore } from '../../src/agent/session-manager.js'; + +const SKIP = process.env.SKIP_TMUX_TESTS === '1' || !!process.env.CLAUDECODE; +const RUN_ID = Math.random().toString(36).slice(2, 8); +const PROJECT = `shutdown${RUN_ID}`; +const MAIN_SESSION = `deck_${PROJECT}_brain`; +const WORKER_SESSION = `deck_${PROJECT}_w1`; +const SUB_ROOT = `deck_sub_${RUN_ID}root`; +const SUB_CHILD = `deck_sub_${RUN_ID}child`; +const PROBE_SESSION = `deck_${PROJECT}_probe`; + +const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); + +function makeRecord( + name: string, + role: 'brain' | `w${number}`, + overrides: Partial = {}, +): import('../../src/store/session-store.js').SessionRecord { + return { + name, + projectName: PROJECT, + role, + agentType: 'shell', + projectDir: tmpdir(), + state: 'running', + restarts: 0, + restartTimestamps: [], + createdAt: Date.now(), + updatedAt: Date.now(), + ...overrides, + }; +} + +async function createSleepSession(name: string): Promise { + await killSession(name).catch(() => {}); + await newSession(name, 'bash -lc "trap : TERM INT; sleep 999"', { cwd: tmpdir() }); + await wait(250); + expect(await sessionExists(name)).toBe(true); +} + +describe.skipIf(SKIP)('main-session shutdown e2e', () => { + beforeAll(async () => { + const testHome = join(tmpdir(), `imcodes-shutdown-${RUN_ID}`); + mkdirSync(testHome, { recursive: true }); + process.env.HOME = testHome; + await loadStore(); + }); + + afterEach(async () => { + for (const name of [MAIN_SESSION, WORKER_SESSION, SUB_ROOT, SUB_CHILD, PROBE_SESSION]) { + await killSession(name).catch(() => {}); + removeSession(name); + } + }); + + afterAll(async () => { + const live = await listSessions(); + for (const name of live) { + if (name === MAIN_SESSION || name === WORKER_SESSION || name === SUB_ROOT || name === SUB_CHILD || name === PROBE_SESSION) { + await killSession(name).catch(() => {}); + } + } + }); + + it('closes the full tree, remains usable, and does not resurrect descendants after restore', async () => { + await createSleepSession(MAIN_SESSION); + await createSleepSession(WORKER_SESSION); + await createSleepSession(SUB_ROOT); + await createSleepSession(SUB_CHILD); + + upsertSession(makeRecord(MAIN_SESSION, 'brain')); + upsertSession(makeRecord(WORKER_SESSION, 'w1')); + upsertSession(makeRecord(SUB_ROOT, 'w1', { + projectName: SUB_ROOT, + parentSession: WORKER_SESSION, + })); + upsertSession(makeRecord(SUB_CHILD, 'w1', { + projectName: SUB_CHILD, + parentSession: SUB_ROOT, + })); + + const serverLink = { send: () => {} }; + const result = await stopProject(PROJECT, serverLink); + + expect(result).toEqual({ + ok: true, + closed: [SUB_CHILD, SUB_ROOT, MAIN_SESSION, WORKER_SESSION], + failed: [], + }); + expect(await sessionExists(MAIN_SESSION)).toBe(false); + expect(await sessionExists(WORKER_SESSION)).toBe(false); + expect(await sessionExists(SUB_ROOT)).toBe(false); + expect(await sessionExists(SUB_CHILD)).toBe(false); + expect(getSession(MAIN_SESSION)).toBeUndefined(); + expect(getSession(SUB_ROOT)).toBeUndefined(); + expect(storeSessions().filter((session) => session.name.startsWith(`deck_sub_${RUN_ID}`))).toHaveLength(0); + + await killSession(PROBE_SESSION).catch(() => {}); + await newSession(PROBE_SESSION, 'bash -lc "echo PROBE_ALIVE; trap : TERM INT; sleep 999"', { cwd: tmpdir() }); + await wait(250); + expect(await sessionExists(PROBE_SESSION)).toBe(true); + const probePane = (await capturePane(PROBE_SESSION)).join('\n'); + expect(probePane).toContain('PROBE_ALIVE'); + + await flushStore(); + await loadStore(); + await restoreFromStore(); + + expect(getSession(SUB_ROOT)).toBeUndefined(); + expect(getSession(SUB_CHILD)).toBeUndefined(); + expect(storeSessions().filter((session) => session.name === SUB_ROOT || session.name === SUB_CHILD)).toHaveLength(0); + expect(await sessionExists(SUB_ROOT)).toBe(false); + expect(await sessionExists(SUB_CHILD)).toBe(false); + }, 30_000); +}); diff --git a/test/e2e/sdk-transport-flow.test.ts b/test/e2e/sdk-transport-flow.test.ts index a7391507a..41915b7d3 100644 --- a/test/e2e/sdk-transport-flow.test.ts +++ b/test/e2e/sdk-transport-flow.test.ts @@ -235,6 +235,7 @@ vi.mock('../../src/agent/brain-dispatcher.js', () => ({ BrainDispatcher: vi.fn() import { launchSession } from '../../src/agent/session-manager.js'; import { disconnectAll } from '../../src/agent/provider-registry.js'; import { handleWebCommand } from '../../src/daemon/command-handler.js'; +import { newSession } from '../../src/agent/tmux.js'; describe('sdk transport flow e2e', () => { @@ -312,6 +313,119 @@ describe('sdk transport flow e2e', () => { expect(mocks.store.get('deck_cxsdk_main_brain')?.codexSessionId).toBe('old-codex-thread-id'); }); + it('emits a brain-session inline error when session.restart fails', async () => { + const tmuxNewSession = newSession as ReturnType; + tmuxNewSession.mockRejectedValueOnce(new Error('tmux create failed')); + + mocks.store.set('deck_restart_fail_brain', { + name: 'deck_restart_fail_brain', + projectName: 'restart_fail', + role: 'brain', + agentType: 'shell', + projectDir: '/tmp/restart-fail', + state: 'running', + restarts: 0, + restartTimestamps: [], + createdAt: 1, + updatedAt: 1, + }); + + const serverLink = { send: vi.fn() } as any; + handleWebCommand({ + type: 'session.restart', + project: 'restart_fail', + }, serverLink); + await flushAsync(); + await new Promise((resolve) => setTimeout(resolve, 50)); + + expect(serverLink.send).toHaveBeenCalledWith(expect.objectContaining({ + type: 'session.error', + project: 'restart_fail', + message: 'tmux create failed', + })); + expect(mocks.emitted).toContainEqual(expect.objectContaining({ + session: 'deck_restart_fail_brain', + type: 'assistant.text', + payload: { + text: '⚠️ Error: tmux create failed', + streaming: false, + }, + })); + }); + + it('pushes an updated session_list after settings restart switches a main session to claude-code-sdk', async () => { + mocks.store.set('deck_settings_switch_brain', { + name: 'deck_settings_switch_brain', + projectName: 'settings_switch', + role: 'brain', + agentType: 'claude-code', + projectDir: '/tmp/settings-switch', + state: 'idle', + restarts: 0, + restartTimestamps: [], + createdAt: 1, + updatedAt: 1, + ccSessionId: 'cc-settings-switch', + }); + + const serverLink = { send: vi.fn() } as any; + handleWebCommand({ + type: 'session.restart', + sessionName: 'deck_settings_switch_brain', + agentType: 'claude-code-sdk', + }, serverLink); + await flushAsync(); + await waitForCondition(() => serverLink.send.mock.calls.some((call) => call[0]?.type === 'session_list')); + + const sessionListPayload = serverLink.send.mock.calls + .map((call) => call[0]) + .find((msg) => msg?.type === 'session_list'); + const switched = sessionListPayload?.sessions?.find((session: any) => session.name === 'deck_settings_switch_brain'); + + expect(switched).toMatchObject({ + name: 'deck_settings_switch_brain', + agentType: 'claude-code-sdk', + runtimeType: 'transport', + }); + }); + + it('pushes a corrective session_list when settings restart fails', async () => { + const tmuxNewSession = newSession as ReturnType; + tmuxNewSession.mockRejectedValueOnce(new Error('tmux create failed')); + mocks.store.set('deck_settings_fail_brain', { + name: 'deck_settings_fail_brain', + projectName: 'settings_fail', + role: 'brain', + agentType: 'shell', + projectDir: '/tmp/settings-fail', + state: 'idle', + restarts: 0, + restartTimestamps: [], + createdAt: 1, + updatedAt: 1, + }); + + const serverLink = { send: vi.fn() } as any; + handleWebCommand({ + type: 'session.restart', + sessionName: 'deck_settings_fail_brain', + agentType: 'shell', + }, serverLink); + await flushAsync(); + await waitForCondition(() => serverLink.send.mock.calls.some((call) => call[0]?.type === 'session.error')); + await waitForCondition(() => serverLink.send.mock.calls.some((call) => call[0]?.type === 'session_list')); + + const sessionListPayload = serverLink.send.mock.calls + .map((call) => call[0]) + .find((msg) => msg?.type === 'session_list'); + const session = sessionListPayload?.sessions?.find((entry: any) => entry.name === 'deck_settings_fail_brain'); + + expect(session).toMatchObject({ + name: 'deck_settings_fail_brain', + agentType: 'shell', + }); + }); + it('switches claude-code-sdk model through /model and updates display metadata', async () => { await launchSession({ name: SESSION_CC, diff --git a/test/shared-agent-types.test.ts b/test/shared-agent-types.test.ts new file mode 100644 index 000000000..c6712d5be --- /dev/null +++ b/test/shared-agent-types.test.ts @@ -0,0 +1,19 @@ +import { describe, expect, it } from 'vitest'; +import { getSessionRuntimeType, isTransportSessionAgentType } from '../shared/agent-types.js'; + +describe('shared agent type helpers', () => { + it('recognizes transport-backed session agent types', () => { + expect(isTransportSessionAgentType('claude-code-sdk')).toBe(true); + expect(isTransportSessionAgentType('codex-sdk')).toBe(true); + expect(isTransportSessionAgentType('qwen')).toBe(true); + expect(isTransportSessionAgentType('openclaw')).toBe(true); + expect(isTransportSessionAgentType('claude-code')).toBe(false); + }); + + it('maps session agent types to runtime types', () => { + expect(getSessionRuntimeType('claude-code-sdk')).toBe('transport'); + expect(getSessionRuntimeType('codex-sdk')).toBe('transport'); + expect(getSessionRuntimeType('claude-code')).toBe('process'); + expect(getSessionRuntimeType('shell')).toBe('process'); + }); +}); diff --git a/test/shared/session-display.test.ts b/test/shared/session-display.test.ts new file mode 100644 index 000000000..592e52a3a --- /dev/null +++ b/test/shared/session-display.test.ts @@ -0,0 +1,23 @@ +import { describe, expect, it } from 'vitest'; +import { isInternalSessionDisplayValue, pickReadableSessionDisplay } from '../../shared/session-display.js'; + +describe('session display helpers', () => { + it('treats internal session identifiers as unreadable display values', () => { + expect(isInternalSessionDisplayValue('deck_cd_brain')).toBe(true); + expect(isInternalSessionDisplayValue('deck_sub_ab12cd34')).toBe(true); + expect(isInternalSessionDisplayValue('bootmainxowfy6', 'bootmainxowfy6')).toBe(true); + expect(isInternalSessionDisplayValue('bootmainxowfy6')).toBe(true); + }); + + it('keeps human labels and project names readable', () => { + expect(isInternalSessionDisplayValue('cd')).toBe(false); + expect(isInternalSessionDisplayValue('Boot Main')).toBe(false); + expect(isInternalSessionDisplayValue('Worker 1')).toBe(false); + }); + + it('picks the first readable display candidate', () => { + expect(pickReadableSessionDisplay(['deck_sub_ab12cd34', 'deck_cd_brain', 'Readable Main'])).toBe('Readable Main'); + expect(pickReadableSessionDisplay(['bootmainxowfy6', 'Boot Main'], 'bootmainxowfy6')).toBe('Boot Main'); + expect(pickReadableSessionDisplay(['bootmainxowfy6', 'Boot Main'], 'deck_sub_ab12cd34')).toBe('Boot Main'); + }); +}); diff --git a/test/util/model-context.test.ts b/test/util/model-context.test.ts index 585b9f3b7..ab67c7b17 100644 --- a/test/util/model-context.test.ts +++ b/test/util/model-context.test.ts @@ -21,7 +21,14 @@ describe('model context inference', () => { expect(inferContextWindow('gpt-4.1-mini')).toBe(1_000_000); }); + it('maps claude opus family to 1M context', () => { + expect(inferContextWindow('opus[1M]')).toBe(1_000_000); + expect(inferContextWindow('claude-opus-4-1')).toBe(1_000_000); + expect(inferContextWindow('claude-opus-4-6')).toBe(1_000_000); + }); + it('prefers model mapping over stale explicit fallback values', () => { expect(resolveContextWindow(400_000, 'gpt-5.4')).toBe(1_000_000); + expect(resolveContextWindow(200_000, 'claude-opus-4-1')).toBe(1_000_000); }); }); diff --git a/test/util/windows-launch-artifacts.test.ts b/test/util/windows-launch-artifacts.test.ts index 6a30fba37..791592708 100644 --- a/test/util/windows-launch-artifacts.test.ts +++ b/test/util/windows-launch-artifacts.test.ts @@ -1,4 +1,11 @@ import { describe, expect, it, vi, beforeEach } from 'vitest'; +import { + UPGRADE_LOCK_FILE, + encodeCmdAsUtf8Bom, + encodeVbsAsUtf16, + writeVbsLauncher, + writeWatchdogCmd, +} from '../../src/util/windows-launch-artifacts.js'; // ── Mock fs so writeWatchdogCmd doesn't touch disk ───────────────────────── @@ -47,7 +54,6 @@ describe('writeWatchdogCmd', () => { }); it('generates watchdog with upgrade lock check', async () => { - const { writeWatchdogCmd, UPGRADE_LOCK_FILE } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: 'C:\\Program Files\\nodejs\\node.exe', imcodesScript: 'C:\\Users\\X\\AppData\\Roaming\\npm\\node_modules\\imcodes\\dist\\src\\index.js', @@ -77,7 +83,6 @@ describe('writeWatchdogCmd', () => { }); it('uses npm global shim instead of hard-coded node+script paths', async () => { - const { writeWatchdogCmd } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: 'C:\\Program Files\\nodejs\\node.exe', imcodesScript: 'C:\\Users\\X\\AppData\\Roaming\\npm\\node_modules\\imcodes\\dist\\src\\index.js', @@ -99,7 +104,6 @@ describe('writeWatchdogCmd', () => { const { existsSync } = await import('fs'); (existsSync as ReturnType).mockReturnValue(false); - const { writeWatchdogCmd } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: 'C:\\Program Files\\nodejs\\node.exe', imcodesScript: 'C:\\dev\\imcodes\\dist\\src\\index.js', @@ -117,7 +121,6 @@ describe('writeWatchdogCmd', () => { }); it('watchdog is an infinite loop with 5s retry', async () => { - const { writeWatchdogCmd } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: 'node.exe', imcodesScript: 'C:\\npm\\node_modules\\imcodes\\dist\\src\\index.js', @@ -142,7 +145,6 @@ describe('writeVbsLauncher', () => { }); it('runs watchdog CMD hidden (window style 0)', async () => { - const { writeVbsLauncher } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: '', imcodesScript: '', logPath: '', watchdogPath: 'C:\\Users\\X\\.imcodes\\daemon-watchdog.cmd', @@ -159,7 +161,6 @@ describe('writeVbsLauncher', () => { }); it('writes VBS as UTF-16 LE with BOM (required for non-ASCII paths)', async () => { - const { writeVbsLauncher } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: '', imcodesScript: '', logPath: '', watchdogPath: 'C:\\Users\\云科I\\.imcodes\\daemon-watchdog.cmd', @@ -183,7 +184,6 @@ describe('writeVbsLauncher', () => { }); it('uses On Error Resume Next so wscript never pops up an error dialog', async () => { - const { writeVbsLauncher } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: '', imcodesScript: '', logPath: '', watchdogPath: 'C:\\nonexistent\\path.cmd', @@ -198,7 +198,6 @@ describe('writeVbsLauncher', () => { describe('encodeCmdAsUtf8Bom', () => { it('prepends UTF-8 BOM (EF BB BF)', async () => { - const { encodeCmdAsUtf8Bom } = await import('../../src/util/windows-launch-artifacts.js'); const buf = encodeCmdAsUtf8Bom('@echo off\r\necho 云科\r\n'); expect(buf[0]).toBe(0xEF); expect(buf[1]).toBe(0xBB); @@ -211,7 +210,6 @@ describe('encodeCmdAsUtf8Bom', () => { describe('encodeVbsAsUtf16', () => { it('prepends UTF-16 LE BOM (FF FE)', async () => { - const { encodeVbsAsUtf16 } = await import('../../src/util/windows-launch-artifacts.js'); const buf = encodeVbsAsUtf16('WScript.Echo "云科"'); expect(buf[0]).toBe(0xFF); expect(buf[1]).toBe(0xFE); @@ -227,7 +225,6 @@ describe('writeWatchdogCmd encoding', () => { }); it('writes watchdog .cmd as UTF-8 with BOM (required for non-ASCII paths)', async () => { - const { writeWatchdogCmd } = await import('../../src/util/windows-launch-artifacts.js'); const paths = { nodeExe: 'C:\\Program Files\\nodejs\\node.exe', imcodesScript: 'C:\\Users\\云科I\\AppData\\Roaming\\npm\\node_modules\\imcodes\\dist\\src\\index.js', @@ -254,7 +251,6 @@ describe('writeWatchdogCmd encoding', () => { describe('UPGRADE_LOCK_FILE', () => { it('is under .imcodes directory', async () => { - const { UPGRADE_LOCK_FILE } = await import('../../src/util/windows-launch-artifacts.js'); expect(UPGRADE_LOCK_FILE).toContain('.imcodes'); expect(UPGRADE_LOCK_FILE).toContain('upgrade.lock'); }); diff --git a/test/util/windows-upgrade-script.test.ts b/test/util/windows-upgrade-script.test.ts index d74d0c421..3fb3ed60d 100644 --- a/test/util/windows-upgrade-script.test.ts +++ b/test/util/windows-upgrade-script.test.ts @@ -17,8 +17,10 @@ describe('buildWindowsCleanupScript', () => { it('generates a standalone cleanup cmd script', () => { const script = buildWindowsCleanupScript('C:\\Temp\\imcodes-upgrade-123'); expect(script).toContain('@echo off'); + expect(script).toContain('chcp 65001 >nul 2>&1'); expect(script).toContain('timeout /t 120 /nobreak >nul'); - expect(script).toContain('rmdir /s /q "C:\\Temp\\imcodes-upgrade-123"'); + expect(script).toContain('for %%I in ("%~dp0.") do set "SCRIPT_DIR=%%~fI"'); + expect(script).toContain('rmdir /s /q "%SCRIPT_DIR%"'); }); }); @@ -57,13 +59,21 @@ describe('buildWindowsUpgradeBatch', () => { // ── Lock file lifecycle ── it('creates upgrade lock BEFORE npm install', () => { - const lockIdx = batch.indexOf(`echo upgrade > "${INPUT.upgradeLockFile}"`); + const lockIdx = batch.indexOf('echo upgrade > "%UPGRADE_LOCK%"'); const installIdx = batch.indexOf(`call "${INPUT.npmCmd}" install`); expect(lockIdx).toBeGreaterThan(-1); expect(installIdx).toBeGreaterThan(-1); expect(lockIdx).toBeLessThan(installIdx); }); + it('switches cmd.exe to UTF-8 before touching path variables', () => { + expect(batch).toContain('chcp 65001 >nul 2>&1'); + expect(batch).toContain('set "LOG_FILE=%SCRIPT_DIR%\\upgrade.log"'); + expect(batch).toContain('set "CLEANUP_VBS=%SCRIPT_DIR%\\cleanup.vbs"'); + expect(batch).toContain('set "VBS_LAUNCHER=%USERPROFILE%\\.imcodes\\daemon-launcher.vbs"'); + expect(batch).toContain('set "UPGRADE_LOCK=%USERPROFILE%\\.imcodes\\upgrade.lock"'); + }); + it('every abort path deletes lock AND restarts VBS', () => { // Split on `goto :done` — each abort block must have both del lock + wscript const blocks = batch.split('goto :done'); @@ -72,7 +82,7 @@ describe('buildWindowsUpgradeBatch', () => { // At least 4 abort paths: install fail, no prefix, no shim, version mismatch expect(abortBlocks.length).toBeGreaterThanOrEqual(4); for (const block of abortBlocks) { - expect(block).toContain(`del "${INPUT.upgradeLockFile}"`); + expect(block).toContain('del "%UPGRADE_LOCK%"'); expect(block).toContain('wscript'); } }); @@ -156,9 +166,9 @@ describe('buildWindowsUpgradeBatch', () => { // No `start /min` either — flashes briefly in taskbar expect(batch).not.toContain('/min cmd /c'); // Cleanup must be invoked via wscript on the cleanup VBS - expect(batch).toContain(`wscript "${INPUT.cleanupVbsPath}"`); + expect(batch).toContain('wscript "%CLEANUP_VBS%"'); // Should be invoked at least 5 times (4 abort paths + 1 success) - const wscriptCleanupCalls = batch.match(new RegExp(`wscript "${INPUT.cleanupVbsPath.replace(/\\/g, '\\\\')}"`, 'g')) ?? []; + const wscriptCleanupCalls = batch.match(/wscript "%CLEANUP_VBS%"/g) ?? []; expect(wscriptCleanupCalls.length).toBeGreaterThanOrEqual(5); }); @@ -173,12 +183,25 @@ describe('buildWindowsUpgradeBatch', () => { const abortBlocks = batch.split('goto :done').slice(0, -1); for (const block of abortBlocks) { // Every abort must restart the daemon via VBS - expect(block).toContain(`wscript "${INPUT.vbsLauncherPath}"`); + expect(block).toContain('wscript "%VBS_LAUNCHER%"'); } // Success path must start new watchdog const successPath = batch.slice(batch.indexOf('Regenerating daemon launch chain')); - expect(successPath).toContain(`wscript "${INPUT.vbsLauncherPath}"`); + expect(successPath).toContain('wscript "%VBS_LAUNCHER%"'); + }); + + it('avoids embedding non-ASCII user paths directly in the batch body', () => { + const nonAscii = buildWindowsUpgradeBatch({ + ...INPUT, + logFile: 'C:\\Users\\云科1\\AppData\\Local\\Temp\\imcodes-upgrade-123\\upgrade.log', + cleanupVbsPath: 'C:\\Users\\云科1\\AppData\\Local\\Temp\\imcodes-upgrade-123\\cleanup.vbs', + vbsLauncherPath: 'C:\\Users\\云科1\\.imcodes\\daemon-launcher.vbs', + upgradeLockFile: 'C:\\Users\\云科1\\.imcodes\\upgrade.lock', + }); + expect(nonAscii).not.toContain('云科1'); + expect(nonAscii).toContain('%USERPROFILE%\\.imcodes\\daemon-launcher.vbs'); + expect(nonAscii).toContain('%SCRIPT_DIR%\\cleanup.vbs'); }); }); diff --git a/web/src/api.ts b/web/src/api.ts index ad22f8961..379e4fcec 100644 --- a/web/src/api.ts +++ b/web/src/api.ts @@ -577,6 +577,7 @@ export interface SubSessionData { quotaMeta?: import('../../shared/provider-quota.js').ProviderQuotaMeta | null; effort?: import('../../shared/effort-levels.js').TransportEffortLevel | null; transportConfig?: Record | null; + transportPendingMessages?: string[] | null; } export async function listSubSessions(serverId: string): Promise { @@ -678,6 +679,7 @@ export async function patchSubSession( serverId: string, subId: string, body: { + type?: string | null; label?: string | null; closedAt?: number | null; description?: string | null; @@ -698,7 +700,7 @@ export async function patchSubSession( export async function patchSession( serverId: string, sessionName: string, - body: { label?: string | null; description?: string | null; cwd?: string | null }, + body: { label?: string | null; description?: string | null; cwd?: string | null; agentType?: string | null }, ): Promise { await apiFetch(`/api/server/${serverId}/sessions/${sessionName}`, { method: 'PATCH', diff --git a/web/src/app.tsx b/web/src/app.tsx index 705065721..9eacd1244 100644 --- a/web/src/app.tsx +++ b/web/src/app.tsx @@ -1,6 +1,12 @@ import { useState, useEffect, useCallback, useRef, useMemo } from 'preact/hooks'; -import { FileBrowser } from './components/FileBrowser.js'; +import { + FileBrowser, + type FileBrowserPreviewRequest, + type FileBrowserPreviewState, + type FileBrowserPreviewUpdate, +} from './components/FileBrowser.js'; import { mapP2pStatusToUiState, type P2pActivePhase, type P2pProgressNodeStatus } from '@shared/p2p-status.js'; +import { DAEMON_MSG } from '@shared/daemon-events.js'; function mapP2pRunToDiscussion(r: Record) { const rawSnapshot = r.progress_snapshot; @@ -27,6 +33,13 @@ function mapP2pRunToDiscussion(r: Record) { phase: typeof n.phase === 'string' ? n.phase as 'initial' | 'hop' | 'summary' : undefined, status: String(n.status ?? 'pending') as P2pProgressNodeStatus, })) : undefined; + const hopStates = Array.isArray(source.hop_states) ? source.hop_states.map((hop: any) => ({ + hopIndex: Number(hop.hop_index ?? 0), + roundIndex: Number(hop.round_index ?? 0), + session: typeof hop.session === 'string' ? hop.session : undefined, + mode: typeof hop.mode === 'string' ? hop.mode : undefined, + status: String(hop.status ?? 'queued') as 'queued' | 'dispatched' | 'running' | 'completed' | 'timed_out' | 'failed' | 'cancelled', + })) : undefined; return { id, topic: `P2P ${mode} · ${initiatorLabel}`, @@ -35,6 +48,7 @@ function mapP2pRunToDiscussion(r: Record) { currentRound: source.current_round ?? 1, maxRounds: source.total_rounds ?? 1, completedHops: source.completed_hops_count ?? 0, + completedRoundHops: typeof source.completed_round_hops_count === 'number' ? source.completed_round_hops_count : undefined, totalHops, activeHop: source.active_hop_number ?? null, activeRoundHop: source.active_round_hop_number ?? null, @@ -48,6 +62,7 @@ function mapP2pRunToDiscussion(r: Record) { startedAt: source.created_at ? new Date(source.created_at).getTime() : undefined, hopStartedAt: typeof source.hop_started_at === 'number' ? source.hop_started_at : undefined, nodes, + hopStates, }; } import { useTranslation } from 'react-i18next'; @@ -84,6 +99,7 @@ import type { PanelRenderContext } from './components/PinnedPanelRegistry.js'; import './components/pinnedPanelTypes.js'; // register all panel types import { LOCAL_WEB_PREVIEW_PANEL_TYPE } from './components/pinnedPanelTypes.js'; import { LocalWebPreviewPanel } from './components/LocalWebPreviewPanel.js'; +import { getSessionRuntimeType } from '@shared/agent-types.js'; import { useSyncedPreference } from './hooks/useSyncedPreference.js'; import { useSubSessions } from './hooks/useSubSessions.js'; import { useProviderStatus } from './hooks/useProviderStatus.js'; @@ -101,6 +117,11 @@ import { REPO_MSG } from '@shared/repo-types.js'; import { shouldSubscribeTerminalRaw, type TerminalSubscribeViewMode } from './terminal-subscribe-mode.js'; import { onWatchCommand } from './watch-bridge.js'; import { watchProjectionStore } from './watch-projection.js'; +import { isIdleSessionStateTimelineEvent, isRunningTimelineEvent } from './timeline-running.js'; +import { extractTransportPendingMessages } from './transport-queue.js'; +import { ingestTimelineEventForCache } from './hooks/useTimeline.js'; +import { getMobileKeyboardState } from './mobile-keyboard.js'; +import { pickReadableSessionDisplay } from '@shared/session-display.js'; // On web: if opened by the native app for passkey auth, render the bridge page. const nativeCallback = typeof window !== 'undefined' @@ -109,6 +130,30 @@ const nativeCallback = typeof window !== 'undefined' type ViewMode = TerminalSubscribeViewMode; +function buildSessionToastLabel( + sessionName: string, + options: { + label?: string | null; + parentLabel?: string | null; + project?: string | null; + agentType?: string | null; + }, +): string { + const label = pickReadableSessionDisplay([options.label], sessionName); + const parentLabel = pickReadableSessionDisplay([options.parentLabel], sessionName); + const project = pickReadableSessionDisplay([options.project], sessionName); + const agentType = options.agentType?.trim() || undefined; + const typeSuffix = agentType ? `(${agentType})` : ''; + + if (sessionName.startsWith('deck_sub_')) { + const name = label || parentLabel || project || agentType || sessionName.replace(/^deck_sub_/, ''); + return `${name}${label ? typeSuffix : ''}${parentLabel && name !== parentLabel ? `@${parentLabel}` : ''}`; + } + + const name = label || project || sessionName; + return `${name}${typeSuffix}`; +} + /** A panel pinned to the sidebar. Uses sessionName as stable identity. */ export interface PinnedPanel { /** Unique ID for this pinned panel instance */ @@ -234,14 +279,18 @@ export function App() { const vv = window.visualViewport; if (!vv) return; let inputFocused = false; + let hadKeyboardOpen = false; let scrollTimer: ReturnType | undefined; const update = () => { document.documentElement.style.setProperty('--vvh', `${vv.height}px`); // Detect keyboard open: viewport shrink + optional input-focus fallback. // Chinese IME candidate bars can be ~40px, so use low threshold when input is focused. const shrink = window.innerHeight - vv.height; - const kbOpen = shrink > 40 || (inputFocused && shrink > 15); + const state = getMobileKeyboardState(inputFocused, shrink, hadKeyboardOpen); + hadKeyboardOpen = state.hadKeyboardOpen; + const { kbOpen, hideInputUi } = state; document.documentElement.classList.toggle('kb-open', kbOpen); + document.documentElement.classList.toggle('input-focused', hideInputUi); // Reset any scroll/offset caused by keyboard opening on mobile. // Always reset — iOS can have vv.offsetTop > 0 even when scrollY is 0. window.scrollTo(0, 0); @@ -279,6 +328,7 @@ export function App() { }; const onFocusOut = () => { inputFocused = false; + hadKeyboardOpen = false; document.documentElement.classList.remove('input-focused'); clearTimeout(scrollTimer); update(); @@ -566,6 +616,7 @@ export function App() { const stoppedNavTimerRef = useRef | null>(null); const [latencyMs, setLatencyMs] = useState(null); const [idleAlerts, setIdleAlerts] = useState>(new Set()); + const [idleFlashTokens, setIdleFlashTokens] = useState>(() => new Map()); const [toasts, setToasts] = useState>([]); const [detectedModels, setDetectedModels] = useState>(new Map()); const [subUsages, setSubUsages] = useState>(new Map()); @@ -607,6 +658,13 @@ export function App() { } return maxId; }, [subZIndexes, openSubIds]); + const flashIdleSession = useCallback((sessionName: string) => { + setIdleFlashTokens((prev) => { + const next = new Map(prev); + next.set(sessionName, (next.get(sessionName) ?? 0) + 1); + return next; + }); + }, []); const focusedSubIdRef = useRef(focusedSubId); focusedSubIdRef.current = focusedSubId; @@ -655,8 +713,9 @@ export function App() { const [showRepoPage, setShowRepoPage] = useState(false); const [repoFocusLatestAction, setRepoFocusLatestAction] = useState<{ token: number; failedJobName?: string; failedStepName?: string } | null>(null); const [pendingRepoToastSession, setPendingRepoToastSession] = useState<{ sessionName: string; focus: { token: number; failedJobName?: string; failedStepName?: string } } | null>(null); - /** File path for the floating file preview window (opened from pinned file browser) */ - const [previewFilePath, setPreviewFilePath] = useState(null); + /** Floating file preview request opened from pinned file browser. */ + const [previewFileRequest, setPreviewFileRequest] = useState(null); + const [previewFileCache, setPreviewFileCache] = useState>({}); const [repoContexts, setRepoContexts] = useState>(new Map()); const repoContextsRef = useRef(repoContexts); repoContextsRef.current = repoContexts; @@ -703,6 +762,7 @@ export function App() { currentRound: number; maxRounds: number; completedHops: number; + completedRoundHops?: number; totalHops: number; activeHop?: number | null; activeRoundHop?: number | null; @@ -720,6 +780,13 @@ export function App() { phase?: 'initial' | 'hop' | 'summary'; status: 'done' | 'active' | 'pending' | 'skipped'; }>; + hopStates?: Array<{ + hopIndex: number; + roundIndex: number; + session?: string; + mode?: string; + status: 'queued' | 'dispatched' | 'running' | 'completed' | 'timed_out' | 'failed' | 'cancelled'; + }>; /** Discussion file ID for navigation (P2P runs use discussion_id, not run id) */ fileId?: string; /** Epoch ms when the P2P run was created (for elapsed timer) */ @@ -860,6 +927,25 @@ export function App() { const subSessionsRef = useRef(subSessions); subSessionsRef.current = subSessions; + useEffect(() => { + const liveSessionNames = new Set([ + ...sessions.map((session) => session.name), + ...subSessions.map((sub) => sub.sessionName), + ]); + setIdleFlashTokens((prev) => { + let changed = false; + const next = new Map(); + for (const [sessionName, token] of prev) { + if (liveSessionNames.has(sessionName)) { + next.set(sessionName, token); + continue; + } + changed = true; + } + return changed ? next : prev; + }); + }, [sessions, subSessions]); + // When sub-sessions load from API (after session_list already fired), sync them to Watch projection useEffect(() => { if (subSessions.length === 0 || !selectedServerId) return; @@ -900,6 +986,38 @@ export function App() { ))); }, [setPinnedPanels]); + const handlePreviewFileRequest = useCallback((request: FileBrowserPreviewRequest) => { + const cached = previewFileCache[request.path]; + setPreviewFileRequest({ + ...request, + preview: request.preview ?? cached?.preview, + preferDiff: request.preferDiff ?? cached?.preferDiff, + }); + }, [previewFileCache]); + + const handlePreviewStateChange = useCallback((update: FileBrowserPreviewUpdate) => { + setPreviewFileCache((prev) => { + const existing = prev[update.path]; + if (existing?.preview === update.preview && existing.preferDiff === update.preferDiff) return prev; + return { + ...prev, + [update.path]: { + preferDiff: update.preferDiff, + preview: update.preview, + }, + }; + }); + setPreviewFileRequest((prev) => ( + prev?.path === update.path + ? { + ...prev, + preferDiff: prev.preferDiff ?? update.preferDiff, + preview: update.preview, + } + : prev + )); + }, []); + /** Generic unpin: remove from pinnedPanels + reopen the source floating window. */ const unpinPanel = useCallback((panel: PinnedPanel) => { setPinnedPanels((prev) => prev.filter((p) => p.id !== panel.id)); @@ -1089,6 +1207,9 @@ export function App() { quotaUsageLabel: s.quotaUsageLabel, quotaMeta: s.quotaMeta ?? existing?.quotaMeta, effort: s.effort ?? existing?.effort, + transportPendingMessages: (s.state === 'queued' || s.state === 'running') + ? (existing?.transportPendingMessages ?? []) + : [], }; })); setSessionsLoaded(true); @@ -1123,7 +1244,21 @@ export function App() { // Detect model from JSONL usage.update events (authoritative, overrides terminal scan) if (msg.type === 'timeline.event') { const event = msg.event; + ingestTimelineEventForCache(event, selectedServerId); watchProjectionStore.handleTimelineEvent(event); + if (isRunningTimelineEvent(event) && !event.sessionId.startsWith('deck_sub_')) { + setSessions((prev) => prev.map((s) => + s.name === event.sessionId && s.state !== 'running' + ? { ...s, state: 'running' as SessionInfo['state'] } + : s, + )); + } + if (isIdleSessionStateTimelineEvent(event)) { + flashIdleSession(event.sessionId); + if (!event.sessionId.startsWith('deck_sub_')) { + setIdleAlerts((prev) => new Set([...prev, event.sessionId])); + } + } if (event.type === 'ask.question') { setPendingQuestion({ sessionName: event.sessionId, @@ -1134,9 +1269,32 @@ export function App() { // Sync session state from live timeline events (running/idle) if (event.type === 'session.state' && !event.sessionId.startsWith('deck_sub_')) { const liveState = String(event.payload.state ?? ''); - if (liveState === 'running' || liveState === 'idle') { + const hasPendingMessagesField = Object.prototype.hasOwnProperty.call(event.payload ?? {}, 'pendingMessages'); + if (liveState === 'queued') { + const pendingMessages = extractTransportPendingMessages(event.payload.pendingMessages); setSessions((prev) => prev.map((s) => - s.name === event.sessionId ? { ...s, state: liveState as SessionInfo['state'] } : s, + s.name === event.sessionId + ? { ...s, transportPendingMessages: pendingMessages } + : s, + )); + } else if (liveState === 'running') { + const pendingMessages = hasPendingMessagesField + ? extractTransportPendingMessages(event.payload.pendingMessages) + : null; + setSessions((prev) => prev.map((s) => + s.name === event.sessionId + ? { + ...s, + state: 'running' as SessionInfo['state'], + transportPendingMessages: pendingMessages ?? (s.transportPendingMessages ?? []), + } + : s, + )); + } else if (liveState === 'idle') { + setSessions((prev) => prev.map((s) => + s.name === event.sessionId + ? { ...s, state: liveState as SessionInfo['state'], transportPendingMessages: [] } + : s, )); } } @@ -1197,21 +1355,19 @@ export function App() { const parentLabel = msg.parentLabel as string | undefined; const agentType = (msg.agentType as string | undefined) || localSub?.type || undefined; const rawProject = (msg.project as string) || sessionName; - let displayProject: string; - const typeSuffix = agentType ? `(${agentType})` : ''; - if (sessionName.startsWith('deck_sub_')) { - const name = label || agentType || sessionName.replace(/^deck_sub_/, ''); - displayProject = `${name}${label ? typeSuffix : ''}${parentLabel ? `@${parentLabel}` : ''}`; - } else { - const name = label || rawProject; - displayProject = `${name}${typeSuffix}`; - } + const displayProject = buildSessionToastLabel(sessionName, { + label, + parentLabel, + project: rawProject, + agentType, + }); if (!sessionName.startsWith('deck_sub_')) { // Main session: update state + tab alert - setSessions((prev) => prev.map((s) => s.name === sessionName ? { ...s, state: 'idle' as SessionInfo['state'] } : s)); + setSessions((prev) => prev.map((s) => s.name === sessionName ? { ...s, state: 'idle' as SessionInfo['state'], transportPendingMessages: [] } : s)); // Always flash the tab — even if it's the active one setIdleAlerts((prev) => new Set([...prev, sessionName])); } + flashIdleSession(sessionName); // Always show a toast (main + sub sessions) const id = Date.now(); setToasts((prev) => [...prev, { id, sessionName, project: displayProject, kind: 'idle' }]); @@ -1234,15 +1390,12 @@ export function App() { const parentLabel = msg.parentLabel as string | undefined; const agentType = (msg.agentType as string | undefined) || localSub?.type || undefined; const rawProject = msg.project || sessionName; - const typeSuffix = agentType ? `(${agentType})` : ''; - let displayProject: string; - if (sessionName.startsWith('deck_sub_')) { - const name = label || agentType || sessionName.replace(/^deck_sub_/, ''); - displayProject = `${name}${label ? typeSuffix : ''}${parentLabel ? `@${parentLabel}` : ''}`; - } else { - const name = label || rawProject; - displayProject = `${name}${typeSuffix}`; - } + const displayProject = buildSessionToastLabel(sessionName, { + label, + parentLabel, + project: rawProject, + agentType, + }); const id = Date.now(); setToasts((prev) => [...prev, { id, sessionName, project: displayProject, kind: 'notification', title: msg.title, message: msg.message }]); setTimeout(() => setToasts((prev) => prev.filter((t) => t.id !== id)), 8000); @@ -1330,6 +1483,9 @@ export function App() { }, 120_000); } } + if (msg.type === 'p2p.cancel_response' && msg.ok && msg.runId) { + setDiscussions((prev) => prev.filter((d) => d.id !== `p2p_${msg.runId}`)); + } if (msg.type === 'p2p.status_response') { const runs = Array.isArray(msg.runs) ? msg.runs @@ -1341,7 +1497,6 @@ export function App() { setDiscussions((prev) => { const retained = prev.filter((d) => { if (!d.id.startsWith('p2p_')) return true; - if (d.state === 'done' || d.state === 'failed') return true; return activeIds.has(d.id); }); const merged = [...retained]; @@ -1388,7 +1543,19 @@ export function App() { } } } - if (msg.type === 'daemon.disconnected') { + if (msg.type === DAEMON_MSG.UPGRADE_BLOCKED) { + const id = Date.now() + Math.random(); + setToasts((prev) => [...prev, { + id, + sessionName: '', + project: '', + kind: 'notification', + title: trans('toast.upgrade_blocked_title'), + message: trans('toast.upgrade_blocked_p2p_active'), + }]); + setTimeout(() => setToasts((prev) => prev.filter((x) => x.id !== id)), 8000); + } + if (msg.type === DAEMON_MSG.DISCONNECTED) { // Daemon went offline — keep existing session data visible, just update status setDaemonOnline(false); watchProjectionStore.setSnapshotStatus('stale'); @@ -1409,7 +1576,7 @@ export function App() { // Auto-dismiss after 10 seconds setTimeout(() => setToasts((prev) => prev.filter((x) => x.id !== id)), 10_000); } - if (msg.type === 'daemon.reconnected') { + if (msg.type === DAEMON_MSG.RECONNECTED) { setDaemonOnline(true); // Daemon process (re)started — all its subscriptions are gone. // Re-subscribe active targets first, then stagger the rest to avoid a herd. @@ -1813,32 +1980,11 @@ export function App() { const handleStopProject = useCallback((project: string) => { if (!wsRef.current) return; - const parentNames = new Set(sessionsRef.current.filter((s) => s.project === project).map((s) => s.name)); - const descendants = new Set(); - - let changed = true; - while (changed) { - changed = false; - for (const sub of subSessionsRef.current) { - if (!sub.parentSession) continue; - if (descendants.has(sub.id)) continue; - if (!parentNames.has(sub.parentSession)) continue; - descendants.add(sub.id); - parentNames.add(sub.sessionName); - changed = true; - } - } - - for (const subId of descendants) { - closeSubSession(subId); - } - + setSessions((prev) => prev.map((s) => + s.project === project ? { ...s, state: 'stopping' as SessionInfo['state'] } : s, + )); wsRef.current.sendSessionCommand('stop', { project }); - setSessions((prev) => prev.filter((s) => s.project !== project)); - if (sessions.some((s) => s.project === project && s.name === activeSession)) { - setActiveSession(null); - } - }, [sessions, activeSession, setActiveSession, closeSubSession]); + }, []); const handleRestartProject = useCallback((project: string, fresh?: boolean) => { wsRef.current?.sendSessionCommand('restart', { project, ...(fresh ? { fresh: true } : {}) }); @@ -2150,6 +2296,7 @@ export function App() { subSessions={subSessions} activeSession={activeSession} unreadCounts={unreadCounts} + idleFlashTokens={idleFlashTokens} p2pSessionLabels={p2pSessionLabels} onSelectSession={(name) => { setActiveSession(name); @@ -2190,7 +2337,8 @@ export function App() { serverId: selectedServerId ?? '', subSessions, inputRefsMap, - onPreviewFile: (path) => setPreviewFilePath(path), + onPreviewFile: (request) => handlePreviewFileRequest({ ...request, sourcePreviewLive: true }), + onPreviewStateChange: handlePreviewStateChange, activeSession, activeProjectDir: activeSessionInfo?.projectDir, sessions, @@ -2551,6 +2699,7 @@ export function App() { { setActiveSession(name); @@ -2685,7 +2835,8 @@ export function App() { serverId: selectedServerId ?? '', subSessions, inputRefsMap, - onPreviewFile: (path) => { setPreviewFilePath(path); closeSidebar(); }, + onPreviewFile: (request) => { handlePreviewFileRequest({ ...request, sourcePreviewLive: false }); closeSidebar(); }, + onPreviewStateChange: handlePreviewStateChange, activeSession, activeProjectDir: activeSessionInfo?.projectDir, sessions, @@ -2798,26 +2949,29 @@ export function App() { )} {/* Floating file preview — one file at a time, opened from pinned file browser */} - {previewFilePath && wsRef.current && ( + {previewFileRequest && wsRef.current && ( setPreviewFilePath(null)} + title={previewFileRequest.path.split(/[/\\]/).pop() ?? 'Preview'} + onClose={() => setPreviewFileRequest(null)} defaultW={700} defaultH={500} > {}} + onClose={() => setPreviewFileRequest(null)} /> )} @@ -2951,6 +3105,7 @@ export function App() { ws={wsRef.current} connected={connected} active={isOpen} + idleFlashToken={idleFlashTokens.get(sub.sessionName) ?? 0} onDiff={registerDiffApplyer} onHistory={registerHistoryApplyer} onMinimize={() => setOpenSubIds((prev) => { const s = new Set(prev); s.delete(sub.id); return s; })} @@ -3059,6 +3214,8 @@ export function App() { if (settingsTarget.subId) { // Sub-session: update local state to reflect saved label/description/cwd updateSubLocal(settingsTarget.subId, { + type: fields.type !== undefined ? fields.type : undefined, + runtimeType: fields.type !== undefined ? getSessionRuntimeType(fields.type) : undefined, label: fields.label !== undefined ? (fields.label ?? null) : undefined, description: fields.description !== undefined ? (fields.description ?? null) : undefined, cwd: fields.cwd !== undefined ? (fields.cwd ?? null) : undefined, @@ -3068,8 +3225,13 @@ export function App() { setSessions((prev) => prev.map((s) => { if (s.name !== settingsTarget.sessionName) return s; const updated = { ...s }; + if (fields.type !== undefined) { + updated.agentType = fields.type; + updated.runtimeType = getSessionRuntimeType(fields.type); + } if (fields.label !== undefined) updated.label = fields.label ?? null; if (fields.description !== undefined) updated.description = fields.description ?? null; + if (fields.cwd !== undefined) updated.projectDir = fields.cwd ?? updated.projectDir; return updated; })); } diff --git a/web/src/components/AtPicker.tsx b/web/src/components/AtPicker.tsx index 1916bfed1..34360bddb 100644 --- a/web/src/components/AtPicker.tsx +++ b/web/src/components/AtPicker.tsx @@ -5,8 +5,13 @@ import { useState, useEffect, useRef, useCallback, useMemo } from 'preact/hooks'; import { useTranslation } from 'react-i18next'; import type { ServerMessage } from '../ws-client.js'; -import { COMBO_PRESETS, COMBO_SEPARATOR, type P2pSavedConfig } from '@shared/p2p-modes.js'; -import { getUserPref, saveUserPref } from '../api.js'; +import { + buildP2pConfigSelection, + COMBO_PRESETS, + type P2pSavedConfig, +} from '@shared/p2p-modes.js'; +import { P2pComboManager } from './P2pComboManager.js'; +import { useP2pCustomCombos } from './p2p-combos.js'; interface SessionEntry { name: string; @@ -154,26 +159,6 @@ const MODE_COLORS: Record = { discuss: '#22c55e', }; -function comboModeColor(key: string): string { - const last = key.split(COMBO_SEPARATOR).pop()?.trim(); - return last ? (MODE_COLORS[last] ?? '#94a3b8') : '#94a3b8'; -} - -function comboModeLabel(key: string, t: (k: string) => string): string { - return key.split(COMBO_SEPARATOR).map((m) => t(`p2p.mode_${m.trim()}`)).join('→'); -} - -function buildEffectiveConfig(config: P2pSavedConfig, modeOverride: string): P2pSavedConfig { - if (modeOverride === 'config') return config; - const overriddenSessions: P2pSavedConfig['sessions'] = {}; - for (const [session, entry] of Object.entries(config.sessions)) { - overriddenSessions[session] = entry.enabled && entry.mode !== 'skip' - ? { ...entry, mode: modeOverride } - : { ...entry }; - } - return { ...config, sessions: overriddenSessions }; -} - export function AtPicker({ query, sessions, @@ -200,31 +185,8 @@ export function AtPicker({ const [configModeOverride, setConfigModeOverride] = useState('config'); const [configPickerFocus, setConfigPickerFocus] = useState<'mode' | 'rounds' | 'combo'>('rounds'); const [comboHighlight, setComboHighlight] = useState(0); - const [customCombos, setCustomCombos] = useState([]); - const [buildingCombo, setBuildingCombo] = useState([]); - const CUSTOM_COMBOS_PREF_KEY = 'p2p_custom_combos'; - const BUILDER_MODES = ['audit', 'review', 'plan', 'brainstorm', 'discuss'] as const; const CONFIG_ROUNDS_OPTIONS = [1, 2, 3, 5] as const; - - // Load custom combos from server on mount - useEffect(() => { - void getUserPref(CUSTOM_COMBOS_PREF_KEY).then((raw) => { - if (raw && typeof raw === 'string') { - try { setCustomCombos(JSON.parse(raw)); } catch { /* ignore */ } - } - }); - }, []); // eslint-disable-line react-hooks/exhaustive-deps - - const saveCustomCombos = useCallback((combos: string[]) => { - setCustomCombos(combos); - void saveUserPref(CUSTOM_COMBOS_PREF_KEY, JSON.stringify(combos)).catch(() => {}); - }, []); - - const allCombos = useMemo(() => { - const presetKeys = new Set(COMBO_PRESETS.map((c) => c.key)); - const custom = customCombos.filter((k) => !presetKeys.has(k)); - return { presets: COMBO_PRESETS, custom }; - }, [customCombos]); + const { customCombos, saveCustomCombos, allCombos } = useP2pCustomCombos(); const debounceRef = useRef | null>(null); const requestIdRef = useRef(null); const containerRef = useRef(null); @@ -403,13 +365,12 @@ export function AtPicker({ if (configPickerFocus === 'combo') { const allKeys = [...COMBO_PRESETS.map((c) => c.key), ...allCombos.custom]; const key = allKeys[comboHighlight] ?? allKeys[0]; - const pipeline = key.split(COMBO_SEPARATOR); - const cfg = buildEffectiveConfig(p2pConfig, pipeline[0]); - onSelectAllConfig?.(cfg, pipeline.length, key); + const selection = buildP2pConfigSelection(p2pConfig, key); + onSelectAllConfig?.(selection.config, selection.rounds, selection.modeOverride); } else { const rounds = CONFIG_ROUNDS_OPTIONS[configRoundsHighlight]; - const effectiveConfig = buildEffectiveConfig(p2pConfig, configModeOverride); - onSelectAllConfig?.(effectiveConfig, rounds, configModeOverride); + const selection = buildP2pConfigSelection(p2pConfig, configModeOverride, rounds); + onSelectAllConfig?.(selection.config, selection.rounds, selection.modeOverride); } setConfigRoundsPicker(false); setConfigPickerFocus('rounds'); @@ -509,7 +470,7 @@ export function AtPicker({ // ── Config rounds sub-picker (for @all+ with custom rounds) ── if (configRoundsPicker && p2pConfig) { const ALL_MODES = ['config', 'audit', 'review', 'plan', 'brainstorm', 'discuss']; - const effectiveConfig = buildEffectiveConfig(p2pConfig, configModeOverride); + const effectiveConfig = buildP2pConfigSelection(p2pConfig, configModeOverride).config; const participants = Object.entries(effectiveConfig.sessions) .filter(([, e]) => e.enabled && e.mode !== 'skip'); return ( @@ -561,8 +522,8 @@ export function AtPicker({ : modeBtnHoverStyle.boxShadow, } : modeBtnStyle} onClick={() => { - const cfg = buildEffectiveConfig(p2pConfig, configModeOverride); - onSelectAllConfig?.(cfg, r, configModeOverride); + const selection = buildP2pConfigSelection(p2pConfig, configModeOverride, r); + onSelectAllConfig?.(selection.config, selection.rounds, selection.modeOverride); setConfigRoundsPicker(false); setConfigPickerFocus('rounds'); }} @@ -595,116 +556,25 @@ export function AtPicker({ marginTop: 6, color: configPickerFocus === 'combo' ? '#93c5fd' : groupLabelStyle.color, }}>{t('p2p.combo_label')} -
- {/* Built-in presets */} - {COMBO_PRESETS.map((c, idx) => { - const color = comboModeColor(c.key); - const isHl = configPickerFocus === 'combo' && comboHighlight === idx; - return ( - - ); - })} - {/* User custom combos */} - {allCombos.custom.map((key) => { - const color = comboModeColor(key); - const pipeline = key.split(COMBO_SEPARATOR); - return ( - - - - - ); - })} -
- {/* Custom combo builder — hidden when at 5 custom limit */} - {(customCombos.length < 5 || buildingCombo.length > 0) && ( - <> -
- {buildingCombo.length > 0 && ( - - {buildingCombo.map((m) => t(`p2p.mode_${m}`)).join('→')} - - )} - {buildingCombo.length > 0 && ( - - )} - {buildingCombo.length >= 2 && ( - - )} -
-
- {BUILDER_MODES.map((m) => ( - - ))} -
- - )} + combo.key), ...allCombos.custom][comboHighlight] ?? null + : null} + onHoverCombo={(key) => { + const idx = [...COMBO_PRESETS.map((combo) => combo.key), ...allCombos.custom].indexOf(key); + if (idx >= 0) setComboHighlight(idx); + setConfigPickerFocus('combo'); + }} + onSelectCombo={(key) => { + const selection = buildP2pConfigSelection(p2pConfig, key); + onSelectAllConfig?.(selection.config, selection.rounds, selection.modeOverride); + setConfigRoundsPicker(false); + setConfigPickerFocus('rounds'); + }} + /> ); } diff --git a/web/src/components/ChatMarkdown.tsx b/web/src/components/ChatMarkdown.tsx index 520b0bff6..52a2e67ce 100644 --- a/web/src/components/ChatMarkdown.tsx +++ b/web/src/components/ChatMarkdown.tsx @@ -68,19 +68,19 @@ function renderToken( case 'heading': { const t = token as Tokens.Heading; const Tag = `h${t.depth}` as keyof h.JSX.IntrinsicElements; - return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink)}; + return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink, onDownload)}; } case 'paragraph': { const t = token as Tokens.Paragraph; - return

{renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink)}

; + return

{renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink, onDownload)}

; } case 'text': { const t = token as Tokens.Text; // Text tokens may have sub-tokens (e.g. from inline parsing) if ('tokens' in t && t.tokens && t.tokens.length > 0) { - return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink)}; + return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink, onDownload)}; } // Plain text — apply path/URL detection IF NOT already inside a link if (inLink) return {t.raw}; @@ -89,17 +89,17 @@ function renderToken( case 'strong': { const t = token as Tokens.Strong; - return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink)}; + return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink, onDownload)}; } case 'em': { const t = token as Tokens.Em; - return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink)}; + return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink, onDownload)}; } case 'del': { const t = token as Tokens.Del; - return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink)}; + return {renderInlineTokens(t.tokens, onPathClick, onUrlClick, inLink, onDownload)}; } case 'codespan': { @@ -129,13 +129,26 @@ function renderToken( const t = token as Tokens.Link; if (isLocalPath(t.href)) { return ( - onPathClick?.(t.href)} - title={t.href} - > - {renderInlineTokens(t.tokens, onPathClick, onUrlClick, true)} + + onPathClick?.(t.href)} + title={t.href} + > + {renderInlineTokens(t.tokens, onPathClick, onUrlClick, true, onDownload)} + + {onDownload && hasFileExtension(t.href) && ( + + )} ); } @@ -150,7 +163,7 @@ function renderToken( onUrlClick?.(t.href); }} > - {renderInlineTokens(t.tokens, onPathClick, onUrlClick, true)} + {renderInlineTokens(t.tokens, onPathClick, onUrlClick, true, onDownload)} ); } @@ -168,7 +181,7 @@ function renderToken( {t.header.map((cell, ci) => ( - {renderInlineTokens(cell.tokens, onPathClick, onUrlClick)} + {renderInlineTokens(cell.tokens, onPathClick, onUrlClick, false, onDownload)} ))} @@ -178,7 +191,7 @@ function renderToken( {row.map((cell, ci) => ( - {renderInlineTokens(cell.tokens, onPathClick, onUrlClick)} + {renderInlineTokens(cell.tokens, onPathClick, onUrlClick, false, onDownload)} ))} @@ -196,7 +209,7 @@ function renderToken( {t.items.map((item, li) => (
  • {item.task && } - {renderTokens(item.tokens, onPathClick, onUrlClick)} + {renderTokens(item.tokens, onPathClick, onUrlClick, false, onDownload)}
  • ))} @@ -205,7 +218,7 @@ function renderToken( case 'blockquote': { const t = token as Tokens.Blockquote; - return
    {renderTokens(t.tokens, onPathClick, onUrlClick)}
    ; + return
    {renderTokens(t.tokens, onPathClick, onUrlClick, false, onDownload)}
    ; } case 'hr': diff --git a/web/src/components/ChatView.tsx b/web/src/components/ChatView.tsx index 44a16a3bd..9d9e7c8e7 100644 --- a/web/src/components/ChatView.tsx +++ b/web/src/components/ChatView.tsx @@ -11,6 +11,7 @@ import type { TimelineEvent, WsClient } from '../ws-client.js'; import { FileBrowser } from './FileBrowser.js'; import { FloatingPanel } from './FloatingPanel.js'; import { ChatMarkdown } from './ChatMarkdown.js'; +import { useNowTicker } from '../hooks/useNowTicker.js'; interface Props { events: TimelineEvent[]; @@ -329,6 +330,12 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde const autoScrollRef = useRef(true); const [showScrollBtn, setShowScrollBtn] = useState(false); + const lastScrollTopRef = useRef(0); + const suppressLoadOlderUntilRef = useRef(0); + + const suppressLoadOlder = useCallback((durationMs = 1200) => { + suppressLoadOlderUntilRef.current = Date.now() + durationMs; + }, []); // Track tool.call events to trigger file panel refresh const [filePanelRefreshTrigger, setFilePanelRefreshTrigger] = useState(0); @@ -397,7 +404,9 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde const el = scrollRef.current; if (!el) return; autoScrollRef.current = true; + suppressLoadOlder(); el.scrollTop = el.scrollHeight; + lastScrollTopRef.current = el.scrollTop; }; // On session change, reset scroll position to bottom @@ -411,21 +420,34 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde }, [sessionId]); // On mobile: when keyboard opens, viewport shrinks and scrollTop can reset to 0. - // Save scrollTop on focusin, restore it when visualViewport height decreases (keyboard appeared). + // Save the relative bottom offset on focusin, then restore against the new layout + // when visualViewport height decreases (keyboard appeared). Using absolute scrollTop + // is brittle on iOS and can replay a stale 0 value, snapping the chat to the top. useEffect(() => { const vv = window.visualViewport; if (!vv) return; - let savedScrollTop = 0; + let savedBottomOffset = 0; + let savedWasNearBottom = true; let prevHeight = vv.height; const onFocusIn = () => { - savedScrollTop = scrollRef.current?.scrollTop ?? 0; + const el = scrollRef.current; + if (!el) return; + savedBottomOffset = Math.max(0, el.scrollHeight - el.scrollTop - el.clientHeight); + savedWasNearBottom = savedBottomOffset < 150; + suppressLoadOlder(); }; const onResize = () => { const el = scrollRef.current; if (!el) return; - if (vv.height < prevHeight) { - // Keyboard appeared — restore scroll position - el.scrollTop = savedScrollTop; + if (vv.height !== prevHeight) { + suppressLoadOlder(); + if (savedWasNearBottom || autoScrollRef.current) { + requestAnimationFrame(() => scrollToBottom()); + } else if (vv.height < prevHeight) { + const targetTop = Math.max(0, el.scrollHeight - el.clientHeight - savedBottomOffset); + el.scrollTop = targetTop; + lastScrollTopRef.current = el.scrollTop; + } } prevHeight = vv.height; }; @@ -474,6 +496,13 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde } }, [lastVisibleTs]); + // Any visible content update should force-follow to the latest message. + // Skip while prepending older history so anchor restoration can preserve position. + useLayoutEffect(() => { + if (loadingOlder || scrollAnchorRef.current) return; + scrollToBottom(); + }, [preview, viewItems, loading, loadingOlder]); + // Restore scroll position after Load Older prepends events useLayoutEffect(() => { const anchor = scrollAnchorRef.current; @@ -485,14 +514,14 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde scrollAnchorRef.current = null; }, [events]); - // Subsequent auto-scroll (new messages while at bottom) — use rAF for smooth updates. + // Fallback for timestamp-based message additions. The layout effect above handles + // streaming edits and other view changes that do not advance timestamps. useEffect(() => { const changed = lastVisibleTs !== prevVisibleTsRef.current; prevVisibleTsRef.current = lastVisibleTs; if (!changed && !preview) return; requestAnimationFrame(() => { - if (preview) { scrollToBottom(); return; } - if (autoScrollRef.current) scrollToBottom(); + scrollToBottom(); }); }, [lastVisibleTs, preview]); @@ -508,17 +537,31 @@ export function ChatView({ events, loading, refreshing: _refreshing, loadingOlde const handleScroll = () => { const el = scrollRef.current; if (!el) return; + const scrollTop = el.scrollTop; + const scrollHeight = el.scrollHeight; + const clientHeight = el.clientHeight; + const wasAutoFollowing = autoScrollRef.current; + const transientTopJump = wasAutoFollowing + && scrollTop < 100 + && lastScrollTopRef.current > 100 + && Date.now() < suppressLoadOlderUntilRef.current; + if (transientTopJump) { + setShowScrollBtn(false); + requestAnimationFrame(() => scrollToBottom()); + return; + } // Use generous threshold — 150px from bottom still counts as "at bottom" - const atBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 150; + const atBottom = scrollHeight - scrollTop - clientHeight < 150; autoScrollRef.current = atBottom; setShowScrollBtn(!atBottom); if (!atBottom) lastScrollActivityRef.current = Date.now(); + lastScrollTopRef.current = scrollTop; // Auto-trigger load older when scrolled near top - if (el.scrollTop < 100 && onLoadOlder && hasOlderHistory && !loadingOlder && !loading) { + if (scrollTop < 100 && onLoadOlder && hasOlderHistory && !loadingOlder && !loading) { const now = Date.now(); if (now - lastLoadOlderAtRef.current >= LOAD_OLDER_COOLDOWN_MS) { lastLoadOlderAtRef.current = now; - scrollAnchorRef.current = { scrollHeight: el.scrollHeight }; + scrollAnchorRef.current = { scrollHeight }; onLoadOlder(); } } @@ -1151,11 +1194,7 @@ const ChatEvent = memo(function ChatEvent({ event, nextTs, onPathClick, serverId function ActiveThinkingLabel({ startTs }: { startTs: number }) { const { t } = useTranslation(); - const [now, setNow] = useState(() => Date.now()); - useEffect(() => { - const timer = setInterval(() => setNow(Date.now()), 1000); - return () => clearInterval(timer); - }, []); + const now = useNowTicker(true); const sec = Math.max(0, Math.round((now - startTs) / 1000)); return <>{t('chat.thinking_running', { sec })}; } diff --git a/web/src/components/FileBrowser.tsx b/web/src/components/FileBrowser.tsx index 96c103953..bb64a8dcf 100644 --- a/web/src/components/FileBrowser.tsx +++ b/web/src/components/FileBrowser.tsx @@ -1,3 +1,4 @@ +import { DAEMON_MSG } from '@shared/daemon-events.js'; /** * FileBrowser — universal reusable file/directory browser. * @@ -120,6 +121,8 @@ export interface FileBrowserProps { highlightPath?: string; /** When set, automatically open the file preview on mount (skips manual click) */ autoPreviewPath?: string; + /** When autoPreviewPath is set, start in diff mode instead of source mode. */ + autoPreviewPreferDiff?: boolean; /** Paths already inserted — shown with a badge to avoid duplicates */ alreadyInserted?: string[]; /** Hide the footer (select/confirm buttons) — for embedded panel views */ @@ -132,8 +135,14 @@ export interface FileBrowserProps { serverId?: string; onConfirm: (paths: string[]) => void; onClose?: () => void; + /** Seed external preview state so a new host can reuse an existing load. */ + initialPreview?: FileBrowserPreviewState; + /** Keep an external preview host in sync with this FileBrowser's preview state. */ + onPreviewStateChange?: (update: FileBrowserPreviewUpdate) => void; + /** Trust a hydrated loading preview instead of starting a second read immediately. */ + skipAutoPreviewIfLoading?: boolean; /** When set, file clicks open an external preview (e.g. floating window) instead of inline split */ - onPreviewFile?: (path: string) => void; + onPreviewFile?: (request: FileBrowserPreviewRequest) => void; /** Default panel tab — 'files' or 'changes'. Default: 'files' */ defaultTab?: 'files' | 'changes'; } @@ -147,7 +156,7 @@ type FsNode = { isLoading?: boolean; }; -type PreviewState = +export type FileBrowserPreviewState = | { status: 'idle' } | { status: 'loading'; path: string } | { status: 'ok'; path: string; content: string; diff?: string; diffHtml?: string; downloadId?: string } @@ -155,6 +164,19 @@ type PreviewState = | { status: 'office'; path: string; data: string; mimeType: string; downloadId?: string } | { status: 'error'; path: string; error: string; downloadId?: string }; +export interface FileBrowserPreviewRequest { + path: string; + preferDiff?: boolean; + preview?: FileBrowserPreviewState; + sourcePreviewLive?: boolean; +} + +export interface FileBrowserPreviewUpdate { + path: string; + preferDiff?: boolean; + preview: FileBrowserPreviewState; +} + /** File extensions that can be previewed with office document libraries. */ const OFFICE_EXTENSIONS: Record = { '.pdf': 'application/pdf', @@ -185,6 +207,7 @@ export function FileBrowser({ initialPath, highlightPath, autoPreviewPath, + autoPreviewPreferDiff = false, alreadyInserted = [], hideFooter = false, changesRootPath, @@ -192,6 +215,9 @@ export function FileBrowser({ serverId, onConfirm, onClose, + initialPreview, + onPreviewStateChange, + skipAutoPreviewIfLoading = false, onPreviewFile, defaultTab = 'files', }: FileBrowserProps) { @@ -209,8 +235,11 @@ export function FileBrowser({ const [currentLabel, setCurrentLabel] = useState(startPath); const [error, setError] = useState(null); const [showHidden, setShowHidden] = useState(false); - const [preview, setPreview] = useState({ status: 'idle' }); - const [showDiff, setShowDiff] = useState(false); + const [preview, setPreview] = useState(() => initialPreview ?? { status: 'idle' }); + const [showDiff, setShowDiff] = useState(() => { + if (initialPreview?.status === 'ok' && initialPreview.diffHtml && autoPreviewPreferDiff) return true; + return false; + }); const [lightbox, setLightbox] = useState(null); const [downloadError, setDownloadError] = useState(null); @@ -263,6 +292,7 @@ export function FileBrowser({ const pendingGitDiffRef = useRef(new Map()); // requestId → filePath const pendingMkdirRef = useRef(new Map()); const mountedRef = useRef(true); + const dismissedAutoPreviewPathRef = useRef(null); // History navigation const historyRef = useRef([startPath]); @@ -294,7 +324,7 @@ export function FileBrowser({ if (!mountedRef.current) return; // WS reconnected — clear loaded cache so directories re-fetch on next expand/navigate - if (msg.type === 'daemon.reconnected' || (msg.type === 'session.event' && (msg as any).event === 'connected')) { + if (msg.type === DAEMON_MSG.RECONNECTED || (msg.type === 'session.event' && (msg as any).event === 'connected')) { loadedRef.current.clear(); pendingRef.current.clear(); pendingChangesRef.current.clear(); @@ -524,22 +554,24 @@ export function FileBrowser({ timersRef.current.set(requestId, timer); }, [ws, includeFiles, t]); - const fetchPreview = useCallback((filePath: string) => { - if (onPreviewFile) { onPreviewFile(filePath); return; } + const fetchPreview = useCallback((filePath: string, preferDiff = false) => { if (editDirtyRef.current) { if (!window.confirm(t('fileBrowser.unsavedChanges'))) return; } + dismissedAutoPreviewPathRef.current = null; setEditDirty(false); setEditContent(''); setOriginalMtime(undefined); setIsEditing(() => { try { return localStorage.getItem(PREF_KEY) === '1'; } catch { return false; } }); - setPreview({ status: 'loading', path: filePath }); - setShowDiff(false); + const loadingPreview: FileBrowserPreviewState = { status: 'loading', path: filePath }; + setPreview(loadingPreview); + setShowDiff(preferDiff); + if (onPreviewFile) onPreviewFile({ path: filePath, preferDiff, preview: loadingPreview }); const requestId = ws.fsReadFile(filePath); pendingReadRef.current.set(requestId, filePath); const diffId = ws.fsGitDiff(filePath); pendingGitDiffRef.current.set(diffId, filePath); - }, [ws, t]); + }, [ws, t, onPreviewFile]); const [expandedPaths, setExpandedPaths] = useState>(() => new Set([startPath])); @@ -611,10 +643,56 @@ export function FileBrowser({ fetchDir(startPath); }, [startPath, fetchDir]); + useEffect(() => { + if (!initialPreview || initialPreview.status === 'idle') return; + setPreview(initialPreview); + }, [initialPreview]); + + useEffect(() => { + if (!autoPreviewPath) { + dismissedAutoPreviewPathRef.current = null; + return; + } + if (dismissedAutoPreviewPathRef.current && dismissedAutoPreviewPathRef.current !== autoPreviewPath) { + dismissedAutoPreviewPathRef.current = null; + } + }, [autoPreviewPath]); + + useEffect(() => { + if (!onPreviewStateChange) return; + if (preview.status === 'idle') return; + onPreviewStateChange({ + path: (preview as { path: string }).path, + preferDiff: showDiff, + preview, + }); + }, [onPreviewStateChange, preview, showDiff]); + // Auto-preview file on open (e.g. when clicking a path link in chat) useEffect(() => { - if (autoPreviewPath) fetchPreview(autoPreviewPath); - }, [autoPreviewPath, fetchPreview]); + if (!autoPreviewPath) return; + if (dismissedAutoPreviewPathRef.current === autoPreviewPath && preview.status === 'idle') return; + const currentPreviewPath = preview.status !== 'idle' ? (preview as { path: string }).path : null; + if (currentPreviewPath === autoPreviewPath && preview.status !== 'idle') { + setShowDiff(autoPreviewPreferDiff); + if (preview.status === 'loading' && initialPreview?.status === 'loading' && !skipAutoPreviewIfLoading) { + fetchPreview(autoPreviewPath, autoPreviewPreferDiff); + } + return; + } + fetchPreview(autoPreviewPath, autoPreviewPreferDiff); + }, [autoPreviewPath, autoPreviewPreferDiff, fetchPreview, initialPreview, preview, skipAutoPreviewIfLoading]); + + const dismissPreview = useCallback(() => { + if (editDirty && !window.confirm(t('fileBrowser.unsavedChanges'))) return; + if (autoPreviewPath) dismissedAutoPreviewPathRef.current = autoPreviewPath; + setIsEditing(false); + setEditDirty(false); + setPreview({ status: 'idle' }); + if (autoPreviewPath && onClose) { + onClose(); + } + }, [autoPreviewPath, editDirty, onClose, t]); // Auto-refresh preview content every 5s when a file is being previewed (paused during editing) useEffect(() => { @@ -742,12 +820,14 @@ export function FileBrowser({ : t('file_browser.select'); const alreadySet = new Set(alreadyInserted); + const usesExternalPreview = !!onPreviewFile; const hasPreview = mode !== 'dir-only' && preview.status !== 'idle'; + const hasInlinePreview = hasPreview && !usesExternalPreview; const previewPath = preview.status !== 'idle' ? (preview as { path: string }).path : null; const tree = ( -
    +
    {data.map((root) => (
    {previewPath!.split(/[/\\]/).pop()} {preview.status === 'ok' && !isEditing && ( @@ -842,10 +919,7 @@ export function FileBrowser({ )}
    {/* Conflict dialog rendered inside FileEditor */} @@ -958,7 +1032,7 @@ export function FileBrowser({
    fetchPreview(f.path)} + onClick={() => fetchPreview(f.path, true)} title={f.path} > {f.code === '??' ? 'U' : f.code} @@ -1103,6 +1177,8 @@ export function FileBrowser({
    ) : null; + const showEmbeddedChangesSection = !!changesSection && !usesExternalPreview; + if (layout === 'panel') { const tabs = changesRootPath ? (
    @@ -1140,10 +1216,10 @@ export function FileBrowser({ {tabs} {breadcrumb} {newFolderDialog} -
    0 ? ' fb-body-with-changes' : ''}`}> +
    {tree} - {changesSection} + {showEmbeddedChangesSection ? changesSection : null}
    {hasPreview && (