From 1b00e80f7373ee1bfe502fb232af8caf9e813867 Mon Sep 17 00:00:00 2001
From: NagyVikt
Date: Wed, 22 Apr 2026 15:54:52 +0200
Subject: [PATCH 1/2] Pull protected-main doctor flow out of the CLI entrypoint
The protected-main doctor path was still living inline inside src/cli/main.js, which kept sandbox lifecycle, merge-back, and output rendering coupled to the top-level command dispatcher. This commit moves that lifecycle into src/doctor/index.js, moves the remaining shared branch/config helpers into src/git/index.js, and keeps the doctor CLI behavior covered by focused regression tests.
Constraint: gx doctor protected-branch behavior and output had to stay stable during the extraction
Rejected: Fold the overlapping scaffold/DI extraction into this commit | unrelated branch drift was already present in the worktree
Confidence: medium
Scope-risk: moderate
Reversibility: clean
Directive: Do not move the protected-main doctor lifecycle or shared branch/config helpers back into src/cli/main.js; reconcile the overlapping scaffold/DI branch drift separately before finish
Tested: node --check src/cli/main.js src/doctor/index.js src/git/index.js; node --test test/cli-args-dispatch.test.js; node --test test/doctor.test.js; openspec validate agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38 --type change --strict; openspec validate --specs
Not-tested: gx branch finish --branch agent/codex/extract-doctor-sandbox-module-2026-04-22-15-38 --base main --via-pr --wait-for-merge --cleanup; npm test
---
.../proposal.md | 18 +
.../specs/cli-modularization/spec.md | 19 +
.../tasks.md | 40 +
src/cli/main.js | 989 +-----------------
src/doctor/index.js | 950 +++++++++++++++++
src/git/index.js | 538 +++++++++-
test/cli-args-dispatch.test.js | 33 +-
7 files changed, 1619 insertions(+), 968 deletions(-)
create mode 100644 openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/proposal.md
create mode 100644 openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/specs/cli-modularization/spec.md
create mode 100644 openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/tasks.md
create mode 100644 src/doctor/index.js
diff --git a/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/proposal.md b/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/proposal.md
new file mode 100644
index 0000000..6998507
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/proposal.md
@@ -0,0 +1,18 @@
+## Why
+
+- `src/cli/main.js` still carries the protected-main doctor sandbox lifecycle inline even after the earlier parser/dispatch and doctor-foundations passes.
+- That keeps sandbox bootstrapping, nested CLI execution, auto-commit, merge-back, lock sync, and output rendering coupled to the top-level CLI file.
+- The review surfaced the next highest-value slice clearly: move the doctor lifecycle into its own module and move the remaining generic git helpers out of `main.js`.
+
+## What Changes
+
+- Add `src/doctor/index.js` as the dedicated home for the protected-main `gx doctor` sandbox lifecycle.
+- Move the remaining shared branch/config helpers used by that lifecycle into `src/git/index.js`.
+- Keep the current CLI surface and doctor output stable while shrinking `src/cli/main.js`.
+- Add focused modularization coverage that fails if `main.js` regains local doctor lifecycle ownership.
+
+## Impact
+
+- Primary surface: `gx doctor` on protected branches, especially the sandbox auto-finish + merge-back path.
+- Secondary surface: any other CLI path that uses `currentBranchName`, `readGitConfig`, `aheadBehind`, `workingTreeIsDirty`, or `branchMergedIntoBase`.
+- Risk is moderate because the doctor flow is behaviorally sensitive, so verification stays focused on doctor and CLI modularization regressions.
diff --git a/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/specs/cli-modularization/spec.md b/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/specs/cli-modularization/spec.md
new file mode 100644
index 0000000..bd3ad47
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/specs/cli-modularization/spec.md
@@ -0,0 +1,19 @@
+## ADDED Requirements
+
+### Requirement: Protected-main doctor lifecycle lives under `src/doctor`
+The CLI SHALL keep the protected-main `gx doctor` sandbox lifecycle in a dedicated `src/doctor` module instead of defining that lifecycle inline in `src/cli/main.js`.
+
+#### Scenario: Main delegates protected-main doctor execution
+- **GIVEN** a maintainer inspects the refactored CLI entrypoint
+- **WHEN** they follow the protected-main `gx doctor` path
+- **THEN** `src/cli/main.js` delegates the sandbox lifecycle into `src/doctor`
+- **AND** the observable doctor output and exit behavior remain unchanged.
+
+### Requirement: Shared git helpers are single-sourced under `src/git`
+The CLI SHALL keep reusable branch/config helpers in `src/git` instead of redefining them in `src/cli/main.js`.
+
+#### Scenario: Doctor and finish reuse the same git helpers
+- **GIVEN** the doctor lifecycle and finish flows both need branch/config helpers
+- **WHEN** the CLI resolves current branch, git config, ahead/behind counts, or merge status
+- **THEN** those helpers come from `src/git`
+- **AND** `src/cli/main.js` does not reintroduce local copies of those helpers.
diff --git a/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/tasks.md b/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/tasks.md
new file mode 100644
index 0000000..10ff968
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38/tasks.md
@@ -0,0 +1,40 @@
+## Definition of Done
+
+This change is complete only when all of the following are true:
+
+- Every checkbox below is checked.
+- The branch `agent/codex/extract-doctor-sandbox-module-2026-04-22-15-38` reaches `MERGED` state on `origin` and the PR URL + final merge state are recorded in the completion handoff.
+- If any step blocks, add a `BLOCKED:` line under section 4 and stop.
+
+## Handoff
+
+- Handoff: change=`agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38`; branch=`agent/codex/extract-doctor-sandbox-module-2026-04-22-15-38`; scope=`src/cli/main.js`, `src/doctor/index.js`, `src/git/index.js`, `test/cli-args-dispatch.test.js`; action=`move the protected-main doctor lifecycle into src/doctor and the remaining shared git helpers into src/git without changing doctor behavior`.
+
+## 1. Specification
+
+- [x] 1.1 Lock the cleanup scope to doctor lifecycle extraction plus shared git-helper relocation only.
+- [x] 1.2 Add a `cli-modularization` delta that requires the protected-main doctor flow to live under `src/doctor`.
+
+## 2. Implementation
+
+- [x] 2.1 Add `src/doctor/index.js` and move the protected-main doctor sandbox lifecycle out of `src/cli/main.js`.
+- [x] 2.2 Move `readGitConfig`, `currentBranchName`, `workingTreeIsDirty`, `aheadBehind`, `branchExists`, and `branchMergedIntoBase` into `src/git/index.js`.
+- [x] 2.3 Keep `src/cli/main.js` as the command-level integrator only and update the modularization regression test to guard the new boundary.
+- [x] 2.4 Fix the protected-base stash cleanup path so successful merge-back cannot leak a leftover stash if sandbox cleanup fails later.
+
+## 3. Verification
+
+- [x] 3.1 Run `node --check src/cli/main.js src/doctor/index.js src/git/index.js`.
+- [x] 3.2 Run `node --test test/cli-args-dispatch.test.js test/doctor.test.js`.
+- [x] 3.3 Run `openspec validate agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38 --type change --strict`.
+- [x] 3.4 Run `openspec validate --specs`.
+
+Verification note: `node --check src/cli/main.js src/doctor/index.js src/git/index.js` passed; `node --test test/cli-args-dispatch.test.js` passed (10/10); `node --test test/doctor.test.js` passed (17/17); `openspec validate agent-codex-extract-doctor-sandbox-module-2026-04-22-15-38 --type change --strict` returned valid; `openspec validate --specs` returned `No items found to validate`.
+
+## 4. Cleanup
+
+- [ ] 4.1 Run `gx branch finish --branch agent/codex/extract-doctor-sandbox-module-2026-04-22-15-38 --base main --via-pr --wait-for-merge --cleanup`.
+- [ ] 4.2 Record the PR URL and final merge state (`MERGED`) in the completion handoff.
+- [ ] 4.3 Confirm the sandbox worktree is removed and no local/remote refs remain for the branch.
+
+BLOCKED: the worktree also contains an overlapping scaffold/DI extraction attempt (`src/scaffold/index.js` plus `openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/`) that was not part of this narrow doctor-module pass. Do not run the cleanup/finish pipeline for this branch until that parallel scope is either integrated intentionally or moved off the branch.
diff --git a/src/cli/main.js b/src/cli/main.js
index e62e8cc..901aee5 100755
--- a/src/cli/main.js
+++ b/src/cli/main.js
@@ -4,6 +4,7 @@ const hooksModule = require('../hooks');
const sandboxModule = require('../sandbox');
const toolchainModule = require('../toolchain');
const finishModule = require('../finish');
+const doctorModule = require('../doctor');
const {
fs,
path,
@@ -72,6 +73,14 @@ const {
resolveRepoRoot,
isGitRepo,
discoverNestedGitRepos,
+ gitRefExists,
+ readGitConfig,
+ currentBranchName,
+ ensureRepoBranch,
+ workingTreeIsDirty,
+ aheadBehind,
+ branchExists,
+ branchMergedIntoBase,
} = require('../git');
const {
run,
@@ -1015,51 +1024,10 @@ function buildSandboxSetupArgs(options, sandboxTarget) {
return args;
}
-function buildSandboxDoctorArgs(options, sandboxTarget) {
- const args = ['doctor', '--target', sandboxTarget];
- if (options.dryRun) args.push('--dry-run');
- appendForceArgs(args, options);
- if (options.skipAgents) args.push('--skip-agents');
- if (options.skipPackageJson) args.push('--skip-package-json');
- if (options.skipGitignore) args.push('--no-gitignore');
- if (!options.dropStaleLocks) args.push('--keep-stale-locks');
- args.push(options.waitForMerge ? '--wait-for-merge' : '--no-wait-for-merge');
- if (options.verboseAutoFinish) args.push('--verbose-auto-finish');
- if (options.json) args.push('--json');
- return args;
-}
-
function isSpawnFailure(result) {
return Boolean(result?.error) && typeof result?.status !== 'number';
}
-function ensureRepoBranch(repoRoot, branch) {
- const current = currentBranchName(repoRoot);
- if (current === branch) {
- return { ok: true, changed: false };
- }
-
- const checkoutResult = run('git', ['-C', repoRoot, 'checkout', branch], { timeout: 20_000 });
- if (isSpawnFailure(checkoutResult)) {
- return {
- ok: false,
- changed: false,
- stdout: checkoutResult.stdout || '',
- stderr: checkoutResult.stderr || '',
- };
- }
- if (checkoutResult.status !== 0) {
- return {
- ok: false,
- changed: false,
- stdout: checkoutResult.stdout || '',
- stderr: checkoutResult.stderr || '',
- };
- }
-
- return { ok: true, changed: true };
-}
-
function protectedBaseSandboxBranchPrefix() {
const now = new Date();
const stamp = [
@@ -1078,10 +1046,6 @@ function protectedBaseSandboxWorktreePath(repoRoot, branchName) {
return path.join(repoRoot, defaultAgentWorktreeRelativeDir(), branchName.replace(/\//g, '__'));
}
-function gitRefExists(repoRoot, ref) {
- return run('git', ['-C', repoRoot, 'show-ref', '--verify', '--quiet', ref]).status === 0;
-}
-
function resolveProtectedBaseSandboxStartRef(repoRoot, baseBranch) {
run('git', ['-C', repoRoot, 'fetch', 'origin', baseBranch, '--quiet'], { timeout: 20_000 });
if (gitRefExists(repoRoot, `refs/remotes/origin/${baseBranch}`)) {
@@ -1260,852 +1224,6 @@ function cleanupProtectedBaseSandbox(repoRoot, metadata) {
return result;
}
-function parseGitPathList(output) {
- return String(output || '')
- .split('\n')
- .map((line) => line.trim())
- .filter((line) => line && line !== LOCK_FILE_RELATIVE);
-}
-
-function collectDoctorChangedPaths(worktreePath) {
- const changed = new Set();
- const commands = [
- ['diff', '--name-only'],
- ['diff', '--cached', '--name-only'],
- ['ls-files', '--others', '--exclude-standard'],
- ];
- for (const gitArgs of commands) {
- const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
- for (const filePath of parseGitPathList(result.stdout)) {
- changed.add(filePath);
- }
- }
- return Array.from(changed);
-}
-
-function collectDoctorDeletedPaths(worktreePath) {
- const deleted = new Set();
- const commands = [
- ['diff', '--name-only', '--diff-filter=D'],
- ['diff', '--cached', '--name-only', '--diff-filter=D'],
- ];
- for (const gitArgs of commands) {
- const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
- for (const filePath of parseGitPathList(result.stdout)) {
- deleted.add(filePath);
- }
- }
- return Array.from(deleted);
-}
-
-function collectWorktreeDirtyPaths(worktreePath) {
- const dirty = new Set();
- const commands = [
- ['diff', '--name-only'],
- ['diff', '--cached', '--name-only'],
- ['ls-files', '--others', '--exclude-standard'],
- ];
- for (const gitArgs of commands) {
- const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
- for (const filePath of parseGitPathList(result.stdout)) {
- dirty.add(filePath);
- }
- }
- return Array.from(dirty);
-}
-
-function collectDoctorForceAddPaths(worktreePath) {
- return REQUIRED_MANAGED_REPO_FILES
- .filter((relativePath) => relativePath.startsWith('scripts/') || relativePath.startsWith('.githooks/'))
- .filter((relativePath) => fs.existsSync(path.join(worktreePath, relativePath)));
-}
-
-function stripDoctorSandboxLocks(rawContent, branchName) {
- if (!rawContent || !branchName) {
- return rawContent;
- }
- try {
- const parsed = JSON.parse(rawContent);
- const locks = parsed && typeof parsed === 'object' && parsed.locks && typeof parsed.locks === 'object'
- ? parsed.locks
- : null;
- if (!locks) {
- return rawContent;
- }
- let changed = false;
- const filteredLocks = {};
- for (const [filePath, lockInfo] of Object.entries(locks)) {
- if (lockInfo && lockInfo.branch === branchName) {
- changed = true;
- continue;
- }
- filteredLocks[filePath] = lockInfo;
- }
- if (!changed) {
- return rawContent;
- }
- return `${JSON.stringify({ ...parsed, locks: filteredLocks }, null, 2)}\n`;
- } catch {
- return rawContent;
- }
-}
-
-function claimDoctorChangedLocks(metadata) {
- if (!metadata.branch) {
- return {
- status: 'skipped',
- note: 'missing sandbox branch metadata',
- changedCount: 0,
- deletedCount: 0,
- };
- }
-
- const changedPaths = Array.from(new Set([
- ...collectDoctorChangedPaths(metadata.worktreePath),
- ...collectDoctorForceAddPaths(metadata.worktreePath),
- ]));
- const deletedPaths = collectDoctorDeletedPaths(metadata.worktreePath);
- if (changedPaths.length > 0) {
- runPackageAsset('lockTool', ['claim', '--branch', metadata.branch, ...changedPaths], {
- cwd: metadata.worktreePath,
- timeout: 30_000,
- });
- }
- if (deletedPaths.length > 0) {
- runPackageAsset('lockTool', ['allow-delete', '--branch', metadata.branch, ...deletedPaths], {
- cwd: metadata.worktreePath,
- timeout: 30_000,
- });
- }
-
- return {
- status: 'claimed',
- note: 'claimed locks for doctor auto-commit',
- changedCount: changedPaths.length,
- deletedCount: deletedPaths.length,
- };
-}
-
-function autoCommitDoctorSandboxChanges(metadata) {
- if (!metadata.worktreePath || !metadata.branch) {
- return {
- status: 'skipped',
- note: 'missing sandbox branch metadata',
- };
- }
-
- claimDoctorChangedLocks(metadata);
- run(
- 'git',
- ['-C', metadata.worktreePath, 'add', '-A', '--', '.', `:(exclude)${LOCK_FILE_RELATIVE}`],
- { timeout: 20_000 },
- );
- const forceAddPaths = collectDoctorForceAddPaths(metadata.worktreePath);
- if (forceAddPaths.length > 0) {
- run(
- 'git',
- ['-C', metadata.worktreePath, 'add', '-f', '--', ...forceAddPaths],
- { timeout: 20_000 },
- );
- }
- const staged = run(
- 'git',
- ['-C', metadata.worktreePath, 'diff', '--cached', '--name-only', '--', '.', `:(exclude)${LOCK_FILE_RELATIVE}`],
- { timeout: 20_000 },
- );
- const stagedFiles = parseGitPathList(staged.stdout);
- if (stagedFiles.length === 0) {
- return {
- status: 'no-changes',
- note: 'no committable doctor changes found in sandbox',
- };
- }
-
- const commitResult = run(
- 'git',
- ['-C', metadata.worktreePath, 'commit', '-m', 'Auto-finish: gx doctor repairs'],
- { timeout: 30_000 },
- );
- if (commitResult.status !== 0) {
- return {
- status: 'failed',
- note: 'doctor sandbox auto-commit failed',
- stdout: commitResult.stdout || '',
- stderr: commitResult.stderr || '',
- };
- }
-
- return {
- status: 'committed',
- note: 'doctor sandbox repairs committed',
- commitMessage: 'Auto-finish: gx doctor repairs',
- stagedFiles,
- };
-}
-
-function hasOriginRemote(repoRoot) {
- return run('git', ['-C', repoRoot, 'remote', 'get-url', 'origin']).status === 0;
-}
-
-function originRemoteLooksLikeGithub(repoRoot) {
- const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
- if (!originUrl) {
- return false;
- }
- return /github\.com[:/]/i.test(originUrl);
-}
-
-function isCommandAvailable(commandName) {
- return run('which', [commandName]).status === 0;
-}
-
-function extractAgentBranchFinishPrUrl(output) {
- const match = String(output || '').match(/\[agent-branch-finish\] PR:\s*(\S+)/);
- return match ? match[1] : '';
-}
-
-function doctorFinishFlowIsPending(output) {
- return (
- /\[agent-branch-finish\] PR merge not completed yet; leaving PR open\./.test(output) ||
- /\[agent-branch-finish\] Merge pending review\/check policy\. Branch cleanup skipped for now\./.test(output) ||
- /\[agent-branch-finish\] PR auto-merge enabled; waiting for required checks\/reviews\./.test(output)
- );
-}
-
-function finishDoctorSandboxBranch(blocked, metadata, options = {}) {
- if (!hasOriginRemote(blocked.repoRoot)) {
- return {
- status: 'skipped',
- note: 'origin remote missing; skipped auto-finish',
- };
- }
- const explicitGhBin = Boolean(String(process.env.GUARDEX_GH_BIN || '').trim());
- if (!explicitGhBin && !originRemoteLooksLikeGithub(blocked.repoRoot)) {
- return {
- status: 'skipped',
- note: 'origin remote is not GitHub; skipped auto-finish PR flow',
- };
- }
-
- const ghBin = process.env.GUARDEX_GH_BIN || 'gh';
- if (!isCommandAvailable(ghBin)) {
- return {
- status: 'skipped',
- note: `'${ghBin}' not available; skipped auto-finish PR flow`,
- };
- }
- const ghAuthStatus = run(ghBin, ['auth', 'status'], { timeout: 20_000 });
- if (ghAuthStatus.status !== 0) {
- return {
- status: 'skipped',
- note: `'${ghBin}' auth unavailable; skipped auto-finish PR flow`,
- stderr: ghAuthStatus.stderr || '',
- };
- }
-
- const rawWaitTimeoutSeconds = Number.parseInt(process.env.GUARDEX_FINISH_WAIT_TIMEOUT_SECONDS || '1800', 10);
- const waitTimeoutSeconds =
- Number.isFinite(rawWaitTimeoutSeconds) && rawWaitTimeoutSeconds >= 30 ? rawWaitTimeoutSeconds : 1800;
- const finishTimeoutMs = Math.max(180_000, (waitTimeoutSeconds + 60) * 1000);
- const waitForMergeArg = options.waitForMerge === false ? '--no-wait-for-merge' : '--wait-for-merge';
-
- const finishResult = runPackageAsset(
- 'branchFinish',
- ['--branch', metadata.branch, '--base', blocked.branch, '--via-pr', waitForMergeArg, '--cleanup'],
- { cwd: metadata.worktreePath, timeout: finishTimeoutMs },
- );
- if (isSpawnFailure(finishResult)) {
- return {
- status: 'failed',
- note: 'doctor sandbox finish flow errored',
- stdout: finishResult.stdout || '',
- stderr: finishResult.stderr || '',
- };
- }
- if (finishResult.status !== 0) {
- return {
- status: 'failed',
- note: 'doctor sandbox finish flow failed',
- stdout: finishResult.stdout || '',
- stderr: finishResult.stderr || '',
- };
- }
-
- const combinedOutput = `${finishResult.stdout || ''}\n${finishResult.stderr || ''}`;
- if (doctorFinishFlowIsPending(combinedOutput)) {
- return {
- status: 'pending',
- note: 'PR created and waiting for merge policy/checks',
- prUrl: extractAgentBranchFinishPrUrl(combinedOutput),
- stdout: finishResult.stdout || '',
- stderr: finishResult.stderr || '',
- };
- }
-
- return {
- status: 'completed',
- note: 'doctor sandbox finish flow completed',
- stdout: finishResult.stdout || '',
- stderr: finishResult.stderr || '',
- };
-}
-
-function mergeDoctorSandboxRepairsBackToProtectedBase(options, blocked, metadata, autoCommitResult, finishResult) {
- if (options.dryRun) {
- return {
- status: autoCommitResult.status === 'committed' ? 'would-merge' : 'skipped',
- note: autoCommitResult.status === 'committed'
- ? 'dry run: would fast-forward tracked doctor repairs into the protected base workspace'
- : 'dry run skips tracked repair merge',
- };
- }
-
- if (autoCommitResult.status !== 'committed') {
- return {
- status: autoCommitResult.status === 'no-changes' ? 'unchanged' : 'skipped',
- note: autoCommitResult.status === 'no-changes'
- ? 'no tracked doctor repairs needed in the protected base workspace'
- : 'tracked doctor repair merge skipped',
- };
- }
-
- if (finishResult.status !== 'skipped') {
- return {
- status: 'skipped',
- note: finishResult.status === 'failed'
- ? 'tracked doctor repairs remain in the sandbox after finish failure'
- : 'tracked doctor repairs are being delivered through the sandbox finish flow',
- };
- }
-
- const allowedPaths = new Set([
- ...(autoCommitResult.stagedFiles || []),
- ...OMX_SCAFFOLD_DIRECTORIES,
- ...Array.from(OMX_SCAFFOLD_FILES.keys()),
- ...REQUIRED_MANAGED_REPO_FILES,
- 'bin',
- 'package.json',
- '.gitignore',
- 'AGENTS.md',
- ]);
- const dirtyPaths = collectWorktreeDirtyPaths(blocked.repoRoot);
- let stashRef = '';
- if (dirtyPaths.length > 0) {
- const unexpectedPaths = dirtyPaths.filter((filePath) => {
- if (allowedPaths.has(filePath)) {
- return false;
- }
- return !AGENT_WORKTREE_RELATIVE_DIRS.some(
- (relativeDir) => filePath === relativeDir || filePath.startsWith(`${relativeDir}/`),
- );
- });
- if (unexpectedPaths.length > 0) {
- return {
- status: 'failed',
- note: `protected branch workspace has unrelated local changes: ${unexpectedPaths.join(', ')}`,
- };
- }
- const stashMessage = `guardex-doctor-merge-${Date.now()}`;
- const stashResult = run(
- 'git',
- ['-C', blocked.repoRoot, 'stash', 'push', '--all', '--message', stashMessage],
- { timeout: 30_000 },
- );
- if (isSpawnFailure(stashResult)) {
- return {
- status: 'failed',
- note: 'could not stash protected branch doctor drift before merge',
- stdout: stashResult.stdout || '',
- stderr: stashResult.stderr || '',
- };
- }
- if (stashResult.status !== 0) {
- return {
- status: 'failed',
- note: 'stashing protected branch doctor drift failed',
- stdout: stashResult.stdout || '',
- stderr: stashResult.stderr || '',
- };
- }
-
- const stashLookup = run(
- 'git',
- ['-C', blocked.repoRoot, 'stash', 'list'],
- { timeout: 20_000 },
- );
- stashRef = String(stashLookup.stdout || '')
- .split('\n')
- .find((line) => line.includes(stashMessage))
- ?.split(':')[0]
- ?.trim() || '';
- }
-
- const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
- if (!restoreResult.ok) {
- if (stashRef) {
- run('git', ['-C', blocked.repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
- }
- return {
- status: 'failed',
- note: `could not restore protected branch '${blocked.branch}' before applying sandbox repairs`,
- stdout: restoreResult.stdout || '',
- stderr: restoreResult.stderr || '',
- };
- }
-
- const mergeResult = run(
- 'git',
- ['-C', blocked.repoRoot, 'merge', '--ff-only', metadata.branch],
- { timeout: 30_000 },
- );
- if (isSpawnFailure(mergeResult)) {
- if (stashRef) {
- run('git', ['-C', blocked.repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
- }
- return {
- status: 'failed',
- note: 'tracked doctor repair merge errored',
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- };
- }
- if (mergeResult.status !== 0) {
- if (stashRef) {
- run('git', ['-C', blocked.repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
- }
- return {
- status: 'failed',
- note: 'tracked doctor repair merge failed',
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- };
- }
-
- let cleanupResult;
- try {
- cleanupResult = cleanupProtectedBaseSandbox(blocked.repoRoot, metadata);
- } catch (error) {
- return {
- status: 'failed',
- note: `tracked doctor repair merge succeeded but sandbox cleanup failed: ${error.message}`,
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- };
- }
-
- let hookRefreshResult;
- try {
- hookRefreshResult = configureHooks(blocked.repoRoot, false);
- } catch (error) {
- return {
- status: 'failed',
- note: `tracked doctor repair merge succeeded but local hook refresh failed: ${error.message}`,
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- };
- }
-
- if (stashRef) {
- run('git', ['-C', blocked.repoRoot, 'stash', 'drop', stashRef], { timeout: 20_000 });
- }
-
- return {
- status: 'merged',
- note: 'fast-forwarded tracked doctor repairs into the protected base workspace',
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- cleanup: cleanupResult,
- hookRefresh: hookRefreshResult,
- };
-}
-
-/**
- * @param {string} [note]
- * @returns {OperationResult}
- */
-function createDoctorSkippedOperation(note = 'sandbox doctor did not complete successfully') {
- return {
- status: 'skipped',
- note,
- };
-}
-
-/**
- * @param {string} [note]
- * @returns {AutoFinishSummary}
- */
-function createSkippedDoctorAutoFinishSummary(note = 'sandbox doctor did not complete successfully') {
- return {
- enabled: false,
- attempted: 0,
- completed: 0,
- skipped: 0,
- failed: 0,
- details: [`Skipped auto-finish sweep (${note}).`],
- };
-}
-
-/**
- * Default the lifecycle to skipped states until the nested doctor run succeeds.
- *
- * @param {string} [note]
- * @returns {DoctorSandboxExecution}
- */
-function createDoctorSandboxExecutionState(note = 'sandbox doctor did not complete successfully') {
- return {
- autoCommit: createDoctorSkippedOperation(note),
- finish: createDoctorSkippedOperation(note),
- protectedBaseRepairSync: createDoctorSkippedOperation(note),
- lockSync: createDoctorSkippedOperation(note),
- omxScaffoldSync: createDoctorSkippedOperation(note),
- autoFinish: createSkippedDoctorAutoFinishSummary(note),
- sandboxLockContent: null,
- };
-}
-
-/**
- * @param {string} repoRoot
- * @param {boolean} dryRun
- * @returns {OperationResult}
- */
-function summarizeDoctorOmxScaffoldSync(repoRoot, dryRun) {
- const omxScaffoldOps = ensureOmxScaffold(repoRoot, dryRun);
- const changedOmxPaths = omxScaffoldOps.filter((operation) => operation.status !== 'unchanged');
- if (changedOmxPaths.length === 0) {
- return {
- status: 'unchanged',
- note: '.omx scaffold already in sync',
- operations: omxScaffoldOps,
- };
- }
- return {
- status: dryRun ? 'would-sync' : 'synced',
- note: `${dryRun ? 'would sync' : 'synced'} ${changedOmxPaths.length} .omx path(s)`,
- operations: omxScaffoldOps,
- };
-}
-
-/**
- * @param {string} repoRoot
- * @param {SandboxMetadata} metadata
- * @returns {DoctorLockSyncState}
- */
-function syncDoctorLockRegistryBeforeMerge(repoRoot, metadata) {
- const sandboxLockPath = path.join(metadata.worktreePath, LOCK_FILE_RELATIVE);
- const baseLockPath = path.join(repoRoot, LOCK_FILE_RELATIVE);
- if (!fs.existsSync(baseLockPath)) {
- return {
- result: {
- status: 'skipped',
- note: `${LOCK_FILE_RELATIVE} missing in protected base workspace`,
- },
- sandboxLockContent: null,
- };
- }
- if (!fs.existsSync(sandboxLockPath)) {
- return {
- result: {
- status: 'skipped',
- note: `${LOCK_FILE_RELATIVE} missing in sandbox worktree`,
- },
- sandboxLockContent: null,
- };
- }
-
- const sourceContent = stripDoctorSandboxLocks(
- fs.readFileSync(sandboxLockPath, 'utf8'),
- metadata.branch,
- );
- const destinationContent = fs.readFileSync(baseLockPath, 'utf8');
- if (sourceContent === destinationContent) {
- return {
- result: {
- status: 'unchanged',
- note: `${LOCK_FILE_RELATIVE} already in sync`,
- },
- sandboxLockContent: sourceContent,
- };
- }
-
- fs.mkdirSync(path.dirname(baseLockPath), { recursive: true });
- fs.writeFileSync(baseLockPath, sourceContent, 'utf8');
- return {
- result: {
- status: 'synced',
- note: `${LOCK_FILE_RELATIVE} synced from sandbox`,
- },
- sandboxLockContent: sourceContent,
- };
-}
-
-/**
- * @param {string} repoRoot
- * @param {string | null} sandboxLockContent
- * @returns {OperationResult}
- */
-function syncDoctorLockRegistryAfterMerge(repoRoot, sandboxLockContent) {
- if (sandboxLockContent === null) {
- return {
- status: 'skipped',
- note: `${LOCK_FILE_RELATIVE} missing in sandbox worktree`,
- };
- }
-
- const baseLockPath = path.join(repoRoot, LOCK_FILE_RELATIVE);
- if (!fs.existsSync(baseLockPath)) {
- fs.mkdirSync(path.dirname(baseLockPath), { recursive: true });
- fs.writeFileSync(baseLockPath, sandboxLockContent, 'utf8');
- return {
- status: 'synced',
- note: `${LOCK_FILE_RELATIVE} recreated from sandbox`,
- };
- }
-
- const destinationContent = fs.readFileSync(baseLockPath, 'utf8');
- if (sandboxLockContent === destinationContent) {
- return {
- status: 'unchanged',
- note: `${LOCK_FILE_RELATIVE} already in sync`,
- };
- }
-
- fs.mkdirSync(path.dirname(baseLockPath), { recursive: true });
- fs.writeFileSync(baseLockPath, sandboxLockContent, 'utf8');
- return {
- status: 'synced',
- note: `${LOCK_FILE_RELATIVE} synced from sandbox`,
- };
-}
-
-/**
- * @param {object} options
- * @param {{ repoRoot: string, branch: string }} blocked
- * @param {SandboxMetadata} metadata
- * @returns {DoctorSandboxExecution}
- */
-function executeDoctorSandboxLifecycle(options, blocked, metadata) {
- const execution = createDoctorSandboxExecutionState();
- const dryRun = Boolean(options.dryRun);
-
- execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(blocked.repoRoot, dryRun);
-
- if (!dryRun) {
- execution.autoCommit = autoCommitDoctorSandboxChanges(metadata);
- if (execution.autoCommit.status === 'committed') {
- execution.finish = finishDoctorSandboxBranch(blocked, metadata, options);
- } else if (execution.autoCommit.status === 'no-changes') {
- execution.finish = createDoctorSkippedOperation('no doctor changes to auto-finish');
- } else if (execution.autoCommit.status !== 'failed') {
- execution.finish = createDoctorSkippedOperation('auto-commit did not run');
- }
- } else {
- execution.autoCommit = createDoctorSkippedOperation('dry-run skips doctor sandbox auto-commit');
- execution.finish = createDoctorSkippedOperation('dry-run skips doctor sandbox finish flow');
- }
-
- const lockSyncState = syncDoctorLockRegistryBeforeMerge(blocked.repoRoot, metadata);
- execution.lockSync = lockSyncState.result;
- execution.sandboxLockContent = lockSyncState.sandboxLockContent;
-
- execution.protectedBaseRepairSync = mergeDoctorSandboxRepairsBackToProtectedBase(
- options,
- blocked,
- metadata,
- execution.autoCommit,
- execution.finish,
- );
-
- execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(blocked.repoRoot, dryRun);
- execution.lockSync = syncDoctorLockRegistryAfterMerge(
- blocked.repoRoot,
- execution.sandboxLockContent,
- );
- execution.autoFinish = autoFinishReadyAgentBranches(blocked.repoRoot, {
- baseBranch: blocked.branch,
- dryRun: options.dryRun,
- waitForMerge: options.waitForMerge,
- excludeBranches: [metadata.branch],
- });
-
- return execution;
-}
-
-function emitDoctorSandboxJsonOutput(nestedResult, execution) {
- if (nestedResult.stdout) {
- if (nestedResult.status === 0) {
- try {
- const parsed = JSON.parse(nestedResult.stdout);
- process.stdout.write(
- JSON.stringify(
- {
- ...parsed,
- protectedBaseRepairSync: execution.protectedBaseRepairSync,
- sandboxOmxScaffoldSync: execution.omxScaffoldSync,
- sandboxLockSync: execution.lockSync,
- sandboxAutoCommit: execution.autoCommit,
- sandboxFinish: execution.finish,
- autoFinish: execution.autoFinish,
- },
- null,
- 2,
- ) + '\n',
- );
- } catch {
- process.stdout.write(nestedResult.stdout);
- }
- } else {
- process.stdout.write(nestedResult.stdout);
- }
- }
- if (nestedResult.stderr) process.stderr.write(nestedResult.stderr);
-}
-
-/**
- * @param {object} options
- * @param {{ branch: string }} blocked
- * @param {SandboxMetadata} metadata
- * @param {SandboxStartResult} startResult
- * @param {any} nestedResult
- * @param {DoctorSandboxExecution} execution
- */
-function emitDoctorSandboxConsoleOutput(options, blocked, metadata, startResult, nestedResult, execution) {
- console.log(
- `[${TOOL_NAME}] doctor detected protected branch '${blocked.branch}'. ` +
- `Running repairs in sandbox branch '${metadata.branch || 'agent/'}'.`,
- );
- if (startResult.stdout) process.stdout.write(startResult.stdout);
- if (startResult.stderr) process.stderr.write(startResult.stderr);
- if (nestedResult.stdout) process.stdout.write(nestedResult.stdout);
- if (nestedResult.stderr) process.stderr.write(nestedResult.stderr);
- if (nestedResult.status !== 0) {
- return;
- }
-
- if (execution.autoCommit.status === 'committed') {
- console.log(
- `[${TOOL_NAME}] Auto-committed doctor repairs in sandbox branch '${metadata.branch}'.`,
- );
- } else if (execution.autoCommit.status === 'failed') {
- console.log(`[${TOOL_NAME}] Doctor sandbox auto-commit failed; branch left for manual follow-up.`);
- if (execution.autoCommit.stdout) process.stdout.write(execution.autoCommit.stdout);
- if (execution.autoCommit.stderr) process.stderr.write(execution.autoCommit.stderr);
- } else {
- console.log(`[${TOOL_NAME}] Doctor sandbox auto-commit skipped: ${execution.autoCommit.note}.`);
- }
-
- if (execution.protectedBaseRepairSync.status === 'merged') {
- console.log(`[${TOOL_NAME}] Fast-forwarded tracked doctor repairs into the protected branch workspace.`);
- } else if (execution.protectedBaseRepairSync.status === 'unchanged') {
- console.log(`[${TOOL_NAME}] Protected branch workspace already had the tracked doctor repairs.`);
- } else if (execution.protectedBaseRepairSync.status === 'would-merge') {
- console.log(`[${TOOL_NAME}] Dry run: would fast-forward tracked doctor repairs into the protected branch workspace.`);
- } else if (execution.protectedBaseRepairSync.status === 'failed') {
- console.log(`[${TOOL_NAME}] Protected branch tracked repair merge failed: ${execution.protectedBaseRepairSync.note}.`);
- if (execution.protectedBaseRepairSync.stdout) process.stdout.write(execution.protectedBaseRepairSync.stdout);
- if (execution.protectedBaseRepairSync.stderr) process.stderr.write(execution.protectedBaseRepairSync.stderr);
- } else {
- console.log(`[${TOOL_NAME}] Protected branch tracked repair merge skipped: ${execution.protectedBaseRepairSync.note}.`);
- }
-
- if (execution.lockSync.status === 'synced') {
- console.log(
- `[${TOOL_NAME}] Synced repaired lock registry back to protected branch workspace (${LOCK_FILE_RELATIVE}).`,
- );
- } else if (execution.lockSync.status === 'unchanged') {
- console.log(`[${TOOL_NAME}] Lock registry already synced in protected branch workspace.`);
- } else {
- console.log(`[${TOOL_NAME}] Lock registry sync skipped: ${execution.lockSync.note}.`);
- }
-
- if (execution.finish.status === 'completed') {
- console.log(`[${TOOL_NAME}] Auto-finish flow completed for sandbox branch '${metadata.branch}'.`);
- if (execution.finish.stdout) process.stdout.write(execution.finish.stdout);
- if (execution.finish.stderr) process.stderr.write(execution.finish.stderr);
- } else if (execution.finish.status === 'pending') {
- console.log(
- `[${TOOL_NAME}] Auto-finish pending for sandbox branch '${metadata.branch}': ${execution.finish.note}.`,
- );
- if (execution.finish.prUrl) {
- console.log(`[${TOOL_NAME}] PR: ${execution.finish.prUrl}`);
- }
- if (execution.finish.stdout) process.stdout.write(execution.finish.stdout);
- if (execution.finish.stderr) process.stderr.write(execution.finish.stderr);
- } else if (execution.finish.status === 'failed') {
- console.log(`[${TOOL_NAME}] Auto-finish flow failed for sandbox branch '${metadata.branch}'.`);
- if (execution.finish.stdout) process.stdout.write(execution.finish.stdout);
- if (execution.finish.stderr) process.stderr.write(execution.finish.stderr);
- } else {
- console.log(`[${TOOL_NAME}] Auto-finish skipped: ${execution.finish.note}.`);
- }
-
- printAutoFinishSummary(execution.autoFinish, {
- baseBranch: blocked.branch,
- verbose: options.verboseAutoFinish,
- });
- if (execution.omxScaffoldSync.status === 'synced') {
- console.log(`[${TOOL_NAME}] Synced .omx scaffold back to protected branch workspace.`);
- } else if (execution.omxScaffoldSync.status === 'unchanged') {
- console.log(`[${TOOL_NAME}] .omx scaffold already aligned in protected branch workspace.`);
- } else if (execution.omxScaffoldSync.status === 'would-sync') {
- console.log(`[${TOOL_NAME}] Dry run: would sync .omx scaffold back to protected branch workspace.`);
- } else {
- console.log(`[${TOOL_NAME}] .omx scaffold sync skipped: ${execution.omxScaffoldSync.note}.`);
- }
-}
-
-function setDoctorSandboxExitCode(nestedResult, execution) {
- if (typeof nestedResult.status === 'number') {
- let exitCode = nestedResult.status;
- if (exitCode === 0 && execution.autoCommit.status === 'failed') {
- exitCode = 1;
- }
- if (
- exitCode === 0 &&
- execution.autoCommit.status === 'committed' &&
- (execution.finish.status === 'failed' || execution.finish.status === 'pending')
- ) {
- exitCode = 1;
- }
- if (exitCode === 0 && execution.protectedBaseRepairSync.status === 'failed') {
- exitCode = 1;
- }
- process.exitCode = exitCode;
- return;
- }
- process.exitCode = 1;
-}
-
-function runDoctorInSandbox(options, blocked) {
- /** @type {SandboxStartResult} */
- const startResult = startProtectedBaseSandbox(blocked, {
- taskName: `${SHORT_TOOL_NAME}-doctor`,
- sandboxSuffix: 'gx-doctor',
- });
- const metadata = startResult.metadata;
-
- const sandboxTarget = resolveSandboxTarget(blocked.repoRoot, metadata.worktreePath, options.target);
- const nestedResult = run(
- process.execPath,
- [__filename, ...buildSandboxDoctorArgs(options, sandboxTarget)],
- { cwd: metadata.worktreePath },
- );
- if (isSpawnFailure(nestedResult)) {
- throw nestedResult.error;
- }
-
- const execution = nestedResult.status === 0
- ? executeDoctorSandboxLifecycle(options, blocked, metadata)
- : createDoctorSandboxExecutionState();
-
- if (options.json) {
- emitDoctorSandboxJsonOutput(nestedResult, execution);
- } else {
- emitDoctorSandboxConsoleOutput(options, blocked, metadata, startResult, nestedResult, execution);
- }
-
- setDoctorSandboxExitCode(nestedResult, execution);
-}
-
function runSetupInSandbox(options, blocked, repoLabel = '') {
const startResult = startProtectedBaseSandbox(blocked, {
taskName: `${SHORT_TOOL_NAME}-setup`,
@@ -2459,6 +1577,14 @@ function hasSignificantWorkingTreeChanges(worktreePath) {
return false;
}
+function originRemoteLooksLikeGithub(repoRoot) {
+ const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
+ if (!originUrl) {
+ return false;
+ }
+ return /github\.com[:/]/i.test(originUrl);
+}
+
function autoFinishReadyAgentBranches(repoRoot, options = {}) {
const baseBranch = String(options.baseBranch || '').trim();
const dryRun = Boolean(options.dryRun);
@@ -2655,14 +1781,6 @@ function writeProtectedBranches(repoRoot, branches) {
gitRun(repoRoot, ['config', GIT_PROTECTED_BRANCHES_KEY, branches.join(' ')]);
}
-function readGitConfig(repoRoot, key) {
- const result = gitRun(repoRoot, ['config', '--get', key], { allowFailure: true });
- if (result.status !== 0) {
- return '';
- }
- return (result.stdout || '').trim();
-}
-
function resolveBaseBranch(repoRoot, explicitBase) {
if (explicitBase) {
return explicitBase;
@@ -2681,18 +1799,6 @@ function resolveSyncStrategy(repoRoot, explicitStrategy) {
return strategy;
}
-function currentBranchName(repoRoot) {
- const result = gitRun(repoRoot, ['branch', '--show-current'], { allowFailure: true });
- if (result.status !== 0) {
- throw new Error('Unable to detect current branch');
- }
- const branch = (result.stdout || '').trim();
- if (!branch) {
- throw new Error('Detached HEAD is not supported for sync operations');
- }
- return branch;
-}
-
function repoHasHeadCommit(repoRoot) {
return gitRun(repoRoot, ['rev-parse', '--verify', 'HEAD'], { allowFailure: true }).status === 0;
}
@@ -2753,23 +1859,6 @@ function printSetupRepoHints(repoRoot, baseBranch, repoLabel = '') {
}
}
-function workingTreeIsDirty(repoRoot) {
- const result = gitRun(repoRoot, ['status', '--porcelain'], { allowFailure: true });
- if (result.status !== 0) {
- throw new Error('Unable to inspect git working tree status');
- }
- const lines = (result.stdout || '').split('\n').filter((line) => line.length > 0);
- const significant = lines.filter((line) => {
- const pathPart = (line.length > 3 ? line.slice(3) : '').trim();
- if (!pathPart) return false;
- if (pathPart === LOCK_FILE_RELATIVE) return false;
- if (pathPart.startsWith(`${LOCK_FILE_RELATIVE} -> `)) return false;
- if (pathPart.endsWith(` -> ${LOCK_FILE_RELATIVE}`)) return false;
- return true;
- });
- return significant.length > 0;
-}
-
function ensureOriginBaseRef(repoRoot, baseBranch) {
const fetch = gitRun(repoRoot, ['fetch', 'origin', baseBranch, '--quiet'], { allowFailure: true });
if (fetch.status !== 0) {
@@ -2785,19 +1874,6 @@ function ensureOriginBaseRef(repoRoot, baseBranch) {
}
}
-function aheadBehind(repoRoot, branchRef, baseRef) {
- const result = gitRun(repoRoot, ['rev-list', '--left-right', '--count', `${branchRef}...${baseRef}`], {
- allowFailure: true,
- });
- if (result.status !== 0) {
- throw new Error(`Unable to compute ahead/behind for ${branchRef} vs ${baseRef}`);
- }
- const parts = (result.stdout || '').trim().split(/\s+/).filter(Boolean);
- const ahead = Number.parseInt(parts[0] || '0', 10);
- const behind = Number.parseInt(parts[1] || '0', 10);
- return { ahead: Number.isFinite(ahead) ? ahead : 0, behind: Number.isFinite(behind) ? behind : 0 };
-}
-
function lockRegistryStatus(repoRoot) {
const result = gitRun(repoRoot, ['status', '--porcelain', '--', LOCK_FILE_RELATIVE], { allowFailure: true });
if (result.status !== 0) {
@@ -2989,13 +2065,6 @@ function claimLocksForAutoCommit(repoRoot, worktreePath, branch) {
}
}
-function branchExists(repoRoot, branch) {
- const result = gitRun(repoRoot, ['show-ref', '--verify', '--quiet', `refs/heads/${branch}`], {
- allowFailure: true,
- });
- return result.status === 0;
-}
-
function resolveFinishBaseBranch(repoRoot, _sourceBranch, explicitBase) {
if (explicitBase) {
return explicitBase;
@@ -3009,22 +2078,6 @@ function resolveFinishBaseBranch(repoRoot, _sourceBranch, explicitBase) {
return DEFAULT_BASE_BRANCH;
}
-function branchMergedIntoBase(repoRoot, branch, baseBranch) {
- if (!branchExists(repoRoot, baseBranch)) {
- return false;
- }
- const result = gitRun(repoRoot, ['merge-base', '--is-ancestor', branch, baseBranch], {
- allowFailure: true,
- });
- if (result.status === 0) {
- return true;
- }
- if (result.status === 1) {
- return false;
- }
- throw new Error(`Unable to determine merge status for ${branch} -> ${baseBranch}`);
-}
-
function autoCommitWorktreeForFinish(repoRoot, worktreePath, branch, options) {
const hasChanges = worktreeHasLocalChanges(worktreePath);
if (!hasChanges) {
@@ -4336,7 +3389,13 @@ function doctor(rawArgs) {
const blocked = protectedBaseWriteBlock(singleRepoOptions, { requireBootstrap: false });
if (blocked) {
- runDoctorInSandbox(singleRepoOptions, blocked);
+ doctorModule.runDoctorInSandbox(singleRepoOptions, blocked, {
+ startProtectedBaseSandbox,
+ cleanupProtectedBaseSandbox,
+ ensureOmxScaffold,
+ configureHooks,
+ autoFinishReadyAgentBranches,
+ });
return;
}
diff --git a/src/doctor/index.js b/src/doctor/index.js
new file mode 100644
index 0000000..70404dc
--- /dev/null
+++ b/src/doctor/index.js
@@ -0,0 +1,950 @@
+const {
+ fs,
+ path,
+ TOOL_NAME,
+ SHORT_TOOL_NAME,
+ CLI_ENTRY_PATH,
+ LOCK_FILE_RELATIVE,
+ REQUIRED_MANAGED_REPO_FILES,
+ AGENT_WORKTREE_RELATIVE_DIRS,
+ OMX_SCAFFOLD_DIRECTORIES,
+ OMX_SCAFFOLD_FILES,
+} = require('../context');
+const { run, runPackageAsset } = require('../core/runtime');
+const { readGitConfig, ensureRepoBranch } = require('../git');
+const { printAutoFinishSummary } = require('../output');
+
+/**
+ * @typedef {Object} AutoFinishSummary
+ * @property {boolean} [enabled]
+ * @property {number} [attempted]
+ * @property {number} [completed]
+ * @property {number} [skipped]
+ * @property {number} [failed]
+ * @property {string[]} [details]
+ * @property {string} [baseBranch]
+ */
+
+/**
+ * @typedef {Object} OperationResult
+ * @property {string} status
+ * @property {string} note
+ * @property {string} [stdout]
+ * @property {string} [stderr]
+ * @property {string} [prUrl]
+ * @property {string[]} [stagedFiles]
+ * @property {string} [commitMessage]
+ * @property {unknown[]} [operations]
+ * @property {OperationResult} [cleanup]
+ * @property {OperationResult} [hookRefresh]
+ */
+
+/**
+ * @typedef {Object} SandboxMetadata
+ * @property {string} branch
+ * @property {string} worktreePath
+ */
+
+/**
+ * @typedef {Object} SandboxStartResult
+ * @property {SandboxMetadata} metadata
+ * @property {string} [stdout]
+ * @property {string} [stderr]
+ */
+
+/**
+ * @typedef {Object} DoctorLockSyncState
+ * @property {OperationResult} result
+ * @property {string | null} sandboxLockContent
+ */
+
+/**
+ * @typedef {Object} DoctorSandboxExecution
+ * @property {OperationResult} autoCommit
+ * @property {OperationResult} finish
+ * @property {OperationResult} protectedBaseRepairSync
+ * @property {OperationResult} lockSync
+ * @property {OperationResult} omxScaffoldSync
+ * @property {AutoFinishSummary} autoFinish
+ * @property {string | null} sandboxLockContent
+ */
+
+function requireDoctorIntegration(name, value) {
+ if (typeof value !== 'function') {
+ throw new Error(`doctor integration missing: ${name}`);
+ }
+ return value;
+}
+
+function appendForceArgs(args, options) {
+ if (!options.force) {
+ return;
+ }
+ args.push('--force');
+ if (Array.isArray(options.forceManagedPaths) && options.forceManagedPaths.length > 0) {
+ args.push(...options.forceManagedPaths);
+ }
+}
+
+function resolveSandboxTarget(repoRoot, worktreePath, targetPath) {
+ const resolvedTarget = path.resolve(targetPath);
+ const relativeTarget = path.relative(repoRoot, resolvedTarget);
+ if (relativeTarget.startsWith('..') || path.isAbsolute(relativeTarget)) {
+ throw new Error(`sandbox target must stay inside repo root: ${resolvedTarget}`);
+ }
+ if (!relativeTarget || relativeTarget === '.') {
+ return worktreePath;
+ }
+ return path.join(worktreePath, relativeTarget);
+}
+
+function buildSandboxDoctorArgs(options, sandboxTarget) {
+ const args = ['doctor', '--target', sandboxTarget];
+ if (options.dryRun) args.push('--dry-run');
+ appendForceArgs(args, options);
+ if (options.skipAgents) args.push('--skip-agents');
+ if (options.skipPackageJson) args.push('--skip-package-json');
+ if (options.skipGitignore) args.push('--no-gitignore');
+ if (!options.dropStaleLocks) args.push('--keep-stale-locks');
+ args.push(options.waitForMerge ? '--wait-for-merge' : '--no-wait-for-merge');
+ if (options.verboseAutoFinish) args.push('--verbose-auto-finish');
+ if (options.json) args.push('--json');
+ return args;
+}
+
+function isSpawnFailure(result) {
+ return Boolean(result?.error) && typeof result?.status !== 'number';
+}
+
+function parseGitPathList(output) {
+ return String(output || '')
+ .split('\n')
+ .map((line) => line.trim())
+ .filter((line) => line && line !== LOCK_FILE_RELATIVE);
+}
+
+function collectWorktreePaths(worktreePath, commands) {
+ const changed = new Set();
+ for (const gitArgs of commands) {
+ const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
+ for (const filePath of parseGitPathList(result.stdout)) {
+ changed.add(filePath);
+ }
+ }
+ return Array.from(changed);
+}
+
+function collectDoctorChangedPaths(worktreePath) {
+ return collectWorktreePaths(worktreePath, [
+ ['diff', '--name-only'],
+ ['diff', '--cached', '--name-only'],
+ ['ls-files', '--others', '--exclude-standard'],
+ ]);
+}
+
+function collectDoctorDeletedPaths(worktreePath) {
+ return collectWorktreePaths(worktreePath, [
+ ['diff', '--name-only', '--diff-filter=D'],
+ ['diff', '--cached', '--name-only', '--diff-filter=D'],
+ ]);
+}
+
+function collectWorktreeDirtyPaths(worktreePath) {
+ return collectWorktreePaths(worktreePath, [
+ ['diff', '--name-only'],
+ ['diff', '--cached', '--name-only'],
+ ['ls-files', '--others', '--exclude-standard'],
+ ]);
+}
+
+function collectDoctorForceAddPaths(worktreePath) {
+ return REQUIRED_MANAGED_REPO_FILES
+ .filter((relativePath) => relativePath.startsWith('scripts/') || relativePath.startsWith('.githooks/'))
+ .filter((relativePath) => fs.existsSync(path.join(worktreePath, relativePath)));
+}
+
+function stripDoctorSandboxLocks(rawContent, branchName) {
+ if (!rawContent || !branchName) {
+ return rawContent;
+ }
+ try {
+ const parsed = JSON.parse(rawContent);
+ const locks = parsed && typeof parsed === 'object' && parsed.locks && typeof parsed.locks === 'object'
+ ? parsed.locks
+ : null;
+ if (!locks) {
+ return rawContent;
+ }
+ let changed = false;
+ const filteredLocks = {};
+ for (const [filePath, lockInfo] of Object.entries(locks)) {
+ if (lockInfo && lockInfo.branch === branchName) {
+ changed = true;
+ continue;
+ }
+ filteredLocks[filePath] = lockInfo;
+ }
+ if (!changed) {
+ return rawContent;
+ }
+ return `${JSON.stringify({ ...parsed, locks: filteredLocks }, null, 2)}\n`;
+ } catch {
+ return rawContent;
+ }
+}
+
+function claimDoctorChangedLocks(metadata) {
+ if (!metadata.branch) {
+ return {
+ status: 'skipped',
+ note: 'missing sandbox branch metadata',
+ changedCount: 0,
+ deletedCount: 0,
+ };
+ }
+
+ const changedPaths = Array.from(new Set([
+ ...collectDoctorChangedPaths(metadata.worktreePath),
+ ...collectDoctorForceAddPaths(metadata.worktreePath),
+ ]));
+ const deletedPaths = collectDoctorDeletedPaths(metadata.worktreePath);
+ if (changedPaths.length > 0) {
+ runPackageAsset('lockTool', ['claim', '--branch', metadata.branch, ...changedPaths], {
+ cwd: metadata.worktreePath,
+ timeout: 30_000,
+ });
+ }
+ if (deletedPaths.length > 0) {
+ runPackageAsset('lockTool', ['allow-delete', '--branch', metadata.branch, ...deletedPaths], {
+ cwd: metadata.worktreePath,
+ timeout: 30_000,
+ });
+ }
+
+ return {
+ status: 'claimed',
+ note: 'claimed locks for doctor auto-commit',
+ changedCount: changedPaths.length,
+ deletedCount: deletedPaths.length,
+ };
+}
+
+function autoCommitDoctorSandboxChanges(metadata) {
+ if (!metadata.worktreePath || !metadata.branch) {
+ return {
+ status: 'skipped',
+ note: 'missing sandbox branch metadata',
+ };
+ }
+
+ claimDoctorChangedLocks(metadata);
+ run(
+ 'git',
+ ['-C', metadata.worktreePath, 'add', '-A', '--', '.', `:(exclude)${LOCK_FILE_RELATIVE}`],
+ { timeout: 20_000 },
+ );
+ const forceAddPaths = collectDoctorForceAddPaths(metadata.worktreePath);
+ if (forceAddPaths.length > 0) {
+ run(
+ 'git',
+ ['-C', metadata.worktreePath, 'add', '-f', '--', ...forceAddPaths],
+ { timeout: 20_000 },
+ );
+ }
+ const staged = run(
+ 'git',
+ ['-C', metadata.worktreePath, 'diff', '--cached', '--name-only', '--', '.', `:(exclude)${LOCK_FILE_RELATIVE}`],
+ { timeout: 20_000 },
+ );
+ const stagedFiles = parseGitPathList(staged.stdout);
+ if (stagedFiles.length === 0) {
+ return {
+ status: 'no-changes',
+ note: 'no committable doctor changes found in sandbox',
+ };
+ }
+
+ const commitResult = run(
+ 'git',
+ ['-C', metadata.worktreePath, 'commit', '-m', 'Auto-finish: gx doctor repairs'],
+ { timeout: 30_000 },
+ );
+ if (commitResult.status !== 0) {
+ return {
+ status: 'failed',
+ note: 'doctor sandbox auto-commit failed',
+ stdout: commitResult.stdout || '',
+ stderr: commitResult.stderr || '',
+ };
+ }
+
+ return {
+ status: 'committed',
+ note: 'doctor sandbox repairs committed',
+ commitMessage: 'Auto-finish: gx doctor repairs',
+ stagedFiles,
+ };
+}
+
+function hasOriginRemote(repoRoot) {
+ return run('git', ['-C', repoRoot, 'remote', 'get-url', 'origin']).status === 0;
+}
+
+function originRemoteLooksLikeGithub(repoRoot) {
+ const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
+ if (!originUrl) {
+ return false;
+ }
+ return /github\.com[:/]/i.test(originUrl);
+}
+
+function isCommandAvailable(commandName) {
+ return run('which', [commandName]).status === 0;
+}
+
+function extractAgentBranchFinishPrUrl(output) {
+ const match = String(output || '').match(/\[agent-branch-finish\] PR:\s*(\S+)/);
+ return match ? match[1] : '';
+}
+
+function doctorFinishFlowIsPending(output) {
+ return (
+ /\[agent-branch-finish\] PR merge not completed yet; leaving PR open\./.test(output) ||
+ /\[agent-branch-finish\] Merge pending review\/check policy\. Branch cleanup skipped for now\./.test(output) ||
+ /\[agent-branch-finish\] PR auto-merge enabled; waiting for required checks\/reviews\./.test(output)
+ );
+}
+
+function finishDoctorSandboxBranch(blocked, metadata, options = {}) {
+ if (!hasOriginRemote(blocked.repoRoot)) {
+ return {
+ status: 'skipped',
+ note: 'origin remote missing; skipped auto-finish',
+ };
+ }
+ const explicitGhBin = Boolean(String(process.env.GUARDEX_GH_BIN || '').trim());
+ if (!explicitGhBin && !originRemoteLooksLikeGithub(blocked.repoRoot)) {
+ return {
+ status: 'skipped',
+ note: 'origin remote is not GitHub; skipped auto-finish PR flow',
+ };
+ }
+
+ const ghBin = process.env.GUARDEX_GH_BIN || 'gh';
+ if (!isCommandAvailable(ghBin)) {
+ return {
+ status: 'skipped',
+ note: `'${ghBin}' not available; skipped auto-finish PR flow`,
+ };
+ }
+ const ghAuthStatus = run(ghBin, ['auth', 'status'], { timeout: 20_000 });
+ if (ghAuthStatus.status !== 0) {
+ return {
+ status: 'skipped',
+ note: `'${ghBin}' auth unavailable; skipped auto-finish PR flow`,
+ stderr: ghAuthStatus.stderr || '',
+ };
+ }
+
+ const rawWaitTimeoutSeconds = Number.parseInt(process.env.GUARDEX_FINISH_WAIT_TIMEOUT_SECONDS || '1800', 10);
+ const waitTimeoutSeconds =
+ Number.isFinite(rawWaitTimeoutSeconds) && rawWaitTimeoutSeconds >= 30 ? rawWaitTimeoutSeconds : 1800;
+ const finishTimeoutMs = Math.max(180_000, (waitTimeoutSeconds + 60) * 1000);
+ const waitForMergeArg = options.waitForMerge === false ? '--no-wait-for-merge' : '--wait-for-merge';
+
+ const finishResult = runPackageAsset(
+ 'branchFinish',
+ ['--branch', metadata.branch, '--base', blocked.branch, '--via-pr', waitForMergeArg, '--cleanup'],
+ { cwd: metadata.worktreePath, timeout: finishTimeoutMs },
+ );
+ if (isSpawnFailure(finishResult)) {
+ return {
+ status: 'failed',
+ note: 'doctor sandbox finish flow errored',
+ stdout: finishResult.stdout || '',
+ stderr: finishResult.stderr || '',
+ };
+ }
+ if (finishResult.status !== 0) {
+ return {
+ status: 'failed',
+ note: 'doctor sandbox finish flow failed',
+ stdout: finishResult.stdout || '',
+ stderr: finishResult.stderr || '',
+ };
+ }
+
+ const combinedOutput = `${finishResult.stdout || ''}\n${finishResult.stderr || ''}`;
+ if (doctorFinishFlowIsPending(combinedOutput)) {
+ return {
+ status: 'pending',
+ note: 'PR created and waiting for merge policy/checks',
+ prUrl: extractAgentBranchFinishPrUrl(combinedOutput),
+ stdout: finishResult.stdout || '',
+ stderr: finishResult.stderr || '',
+ };
+ }
+
+ return {
+ status: 'completed',
+ note: 'doctor sandbox finish flow completed',
+ stdout: finishResult.stdout || '',
+ stderr: finishResult.stderr || '',
+ };
+}
+
+function applyStash(repoRoot, stashRef) {
+ if (!stashRef) {
+ return;
+ }
+ run('git', ['-C', repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
+}
+
+function dropStash(repoRoot, stashRef) {
+ if (!stashRef) {
+ return;
+ }
+ run('git', ['-C', repoRoot, 'stash', 'drop', stashRef], { timeout: 20_000 });
+}
+
+function mergeDoctorSandboxRepairsBackToProtectedBase(options, blocked, metadata, autoCommitResult, finishResult, integrations) {
+ if (options.dryRun) {
+ return {
+ status: autoCommitResult.status === 'committed' ? 'would-merge' : 'skipped',
+ note: autoCommitResult.status === 'committed'
+ ? 'dry run: would fast-forward tracked doctor repairs into the protected base workspace'
+ : 'dry run skips tracked repair merge',
+ };
+ }
+
+ if (autoCommitResult.status !== 'committed') {
+ return {
+ status: autoCommitResult.status === 'no-changes' ? 'unchanged' : 'skipped',
+ note: autoCommitResult.status === 'no-changes'
+ ? 'no tracked doctor repairs needed in the protected base workspace'
+ : 'tracked doctor repair merge skipped',
+ };
+ }
+
+ if (finishResult.status !== 'skipped') {
+ return {
+ status: 'skipped',
+ note: finishResult.status === 'failed'
+ ? 'tracked doctor repairs remain in the sandbox after finish failure'
+ : 'tracked doctor repairs are being delivered through the sandbox finish flow',
+ };
+ }
+
+ const allowedPaths = new Set([
+ ...(autoCommitResult.stagedFiles || []),
+ ...OMX_SCAFFOLD_DIRECTORIES,
+ ...Array.from(OMX_SCAFFOLD_FILES.keys()),
+ ...REQUIRED_MANAGED_REPO_FILES,
+ 'bin',
+ 'package.json',
+ '.gitignore',
+ 'AGENTS.md',
+ ]);
+ const dirtyPaths = collectWorktreeDirtyPaths(blocked.repoRoot);
+ let stashRef = '';
+ let mergeSucceeded = false;
+
+ try {
+ if (dirtyPaths.length > 0) {
+ const unexpectedPaths = dirtyPaths.filter((filePath) => {
+ if (allowedPaths.has(filePath)) {
+ return false;
+ }
+ return !AGENT_WORKTREE_RELATIVE_DIRS.some(
+ (relativeDir) => filePath === relativeDir || filePath.startsWith(`${relativeDir}/`),
+ );
+ });
+ if (unexpectedPaths.length > 0) {
+ return {
+ status: 'failed',
+ note: `protected branch workspace has unrelated local changes: ${unexpectedPaths.join(', ')}`,
+ };
+ }
+
+ const stashMessage = `guardex-doctor-merge-${Date.now()}`;
+ const stashResult = run(
+ 'git',
+ ['-C', blocked.repoRoot, 'stash', 'push', '--all', '--message', stashMessage],
+ { timeout: 30_000 },
+ );
+ if (isSpawnFailure(stashResult)) {
+ return {
+ status: 'failed',
+ note: 'could not stash protected branch doctor drift before merge',
+ stdout: stashResult.stdout || '',
+ stderr: stashResult.stderr || '',
+ };
+ }
+ if (stashResult.status !== 0) {
+ return {
+ status: 'failed',
+ note: 'stashing protected branch doctor drift failed',
+ stdout: stashResult.stdout || '',
+ stderr: stashResult.stderr || '',
+ };
+ }
+
+ const stashLookup = run(
+ 'git',
+ ['-C', blocked.repoRoot, 'stash', 'list'],
+ { timeout: 20_000 },
+ );
+ stashRef = String(stashLookup.stdout || '')
+ .split('\n')
+ .find((line) => line.includes(stashMessage))
+ ?.split(':')[0]
+ ?.trim() || '';
+ }
+
+ const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
+ if (!restoreResult.ok) {
+ return {
+ status: 'failed',
+ note: `could not restore protected branch '${blocked.branch}' before applying sandbox repairs`,
+ stdout: restoreResult.stdout || '',
+ stderr: restoreResult.stderr || '',
+ };
+ }
+
+ const mergeResult = run(
+ 'git',
+ ['-C', blocked.repoRoot, 'merge', '--ff-only', metadata.branch],
+ { timeout: 30_000 },
+ );
+ if (isSpawnFailure(mergeResult)) {
+ return {
+ status: 'failed',
+ note: 'tracked doctor repair merge errored',
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
+ }
+ if (mergeResult.status !== 0) {
+ return {
+ status: 'failed',
+ note: 'tracked doctor repair merge failed',
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
+ }
+ mergeSucceeded = true;
+
+ let cleanupResult;
+ try {
+ cleanupResult = integrations.cleanupProtectedBaseSandbox(blocked.repoRoot, metadata);
+ } catch (error) {
+ return {
+ status: 'failed',
+ note: `tracked doctor repair merge succeeded but sandbox cleanup failed: ${error.message}`,
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
+ }
+
+ let hookRefreshResult;
+ try {
+ hookRefreshResult = integrations.configureHooks(blocked.repoRoot, false);
+ } catch (error) {
+ return {
+ status: 'failed',
+ note: `tracked doctor repair merge succeeded but local hook refresh failed: ${error.message}`,
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
+ }
+
+ return {
+ status: 'merged',
+ note: 'fast-forwarded tracked doctor repairs into the protected base workspace',
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ cleanup: cleanupResult,
+ hookRefresh: hookRefreshResult,
+ };
+ } finally {
+ if (mergeSucceeded) {
+ dropStash(blocked.repoRoot, stashRef);
+ } else {
+ applyStash(blocked.repoRoot, stashRef);
+ }
+ }
+}
+
+function createDoctorSkippedOperation(note = 'sandbox doctor did not complete successfully') {
+ return {
+ status: 'skipped',
+ note,
+ };
+}
+
+function createSkippedDoctorAutoFinishSummary(note = 'sandbox doctor did not complete successfully') {
+ return {
+ enabled: false,
+ attempted: 0,
+ completed: 0,
+ skipped: 0,
+ failed: 0,
+ details: [`Skipped auto-finish sweep (${note}).`],
+ };
+}
+
+function createDoctorSandboxExecutionState(note = 'sandbox doctor did not complete successfully') {
+ return {
+ autoCommit: createDoctorSkippedOperation(note),
+ finish: createDoctorSkippedOperation(note),
+ protectedBaseRepairSync: createDoctorSkippedOperation(note),
+ lockSync: createDoctorSkippedOperation(note),
+ omxScaffoldSync: createDoctorSkippedOperation(note),
+ autoFinish: createSkippedDoctorAutoFinishSummary(note),
+ sandboxLockContent: null,
+ };
+}
+
+function summarizeDoctorOmxScaffoldSync(repoRoot, dryRun, ensureOmxScaffold) {
+ const omxScaffoldOps = ensureOmxScaffold(repoRoot, dryRun);
+ const changedOmxPaths = omxScaffoldOps.filter((operation) => operation.status !== 'unchanged');
+ if (changedOmxPaths.length === 0) {
+ return {
+ status: 'unchanged',
+ note: '.omx scaffold already in sync',
+ operations: omxScaffoldOps,
+ };
+ }
+ return {
+ status: dryRun ? 'would-sync' : 'synced',
+ note: `${dryRun ? 'would sync' : 'synced'} ${changedOmxPaths.length} .omx path(s)`,
+ operations: omxScaffoldOps,
+ };
+}
+
+function syncDoctorLockRegistryBeforeMerge(repoRoot, metadata) {
+ const sandboxLockPath = path.join(metadata.worktreePath, LOCK_FILE_RELATIVE);
+ const baseLockPath = path.join(repoRoot, LOCK_FILE_RELATIVE);
+ if (!fs.existsSync(baseLockPath)) {
+ return {
+ result: {
+ status: 'skipped',
+ note: `${LOCK_FILE_RELATIVE} missing in protected base workspace`,
+ },
+ sandboxLockContent: null,
+ };
+ }
+ if (!fs.existsSync(sandboxLockPath)) {
+ return {
+ result: {
+ status: 'skipped',
+ note: `${LOCK_FILE_RELATIVE} missing in sandbox worktree`,
+ },
+ sandboxLockContent: null,
+ };
+ }
+
+ const sourceContent = stripDoctorSandboxLocks(
+ fs.readFileSync(sandboxLockPath, 'utf8'),
+ metadata.branch,
+ );
+ const destinationContent = fs.readFileSync(baseLockPath, 'utf8');
+ if (sourceContent === destinationContent) {
+ return {
+ result: {
+ status: 'unchanged',
+ note: `${LOCK_FILE_RELATIVE} already in sync`,
+ },
+ sandboxLockContent: sourceContent,
+ };
+ }
+
+ fs.mkdirSync(path.dirname(baseLockPath), { recursive: true });
+ fs.writeFileSync(baseLockPath, sourceContent, 'utf8');
+ return {
+ result: {
+ status: 'synced',
+ note: `${LOCK_FILE_RELATIVE} synced from sandbox`,
+ },
+ sandboxLockContent: sourceContent,
+ };
+}
+
+function syncDoctorLockRegistryAfterMerge(repoRoot, sandboxLockContent) {
+ if (sandboxLockContent === null) {
+ return {
+ status: 'skipped',
+ note: `${LOCK_FILE_RELATIVE} missing in sandbox worktree`,
+ };
+ }
+
+ const baseLockPath = path.join(repoRoot, LOCK_FILE_RELATIVE);
+ if (!fs.existsSync(baseLockPath)) {
+ fs.mkdirSync(path.dirname(baseLockPath), { recursive: true });
+ fs.writeFileSync(baseLockPath, sandboxLockContent, 'utf8');
+ return {
+ status: 'synced',
+ note: `${LOCK_FILE_RELATIVE} recreated from sandbox`,
+ };
+ }
+
+ const destinationContent = fs.readFileSync(baseLockPath, 'utf8');
+ if (sandboxLockContent === destinationContent) {
+ return {
+ status: 'unchanged',
+ note: `${LOCK_FILE_RELATIVE} already in sync`,
+ };
+ }
+
+ fs.mkdirSync(path.dirname(baseLockPath), { recursive: true });
+ fs.writeFileSync(baseLockPath, sandboxLockContent, 'utf8');
+ return {
+ status: 'synced',
+ note: `${LOCK_FILE_RELATIVE} synced from sandbox`,
+ };
+}
+
+function executeDoctorSandboxLifecycle(options, blocked, metadata, integrations) {
+ const execution = createDoctorSandboxExecutionState();
+ const dryRun = Boolean(options.dryRun);
+
+ execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(
+ blocked.repoRoot,
+ dryRun,
+ integrations.ensureOmxScaffold,
+ );
+
+ if (!dryRun) {
+ execution.autoCommit = autoCommitDoctorSandboxChanges(metadata);
+ if (execution.autoCommit.status === 'committed') {
+ execution.finish = finishDoctorSandboxBranch(blocked, metadata, options);
+ } else if (execution.autoCommit.status === 'no-changes') {
+ execution.finish = createDoctorSkippedOperation('no doctor changes to auto-finish');
+ } else if (execution.autoCommit.status !== 'failed') {
+ execution.finish = createDoctorSkippedOperation('auto-commit did not run');
+ }
+ } else {
+ execution.autoCommit = createDoctorSkippedOperation('dry-run skips doctor sandbox auto-commit');
+ execution.finish = createDoctorSkippedOperation('dry-run skips doctor sandbox finish flow');
+ }
+
+ const lockSyncState = syncDoctorLockRegistryBeforeMerge(blocked.repoRoot, metadata);
+ execution.lockSync = lockSyncState.result;
+ execution.sandboxLockContent = lockSyncState.sandboxLockContent;
+
+ execution.protectedBaseRepairSync = mergeDoctorSandboxRepairsBackToProtectedBase(
+ options,
+ blocked,
+ metadata,
+ execution.autoCommit,
+ execution.finish,
+ integrations,
+ );
+
+ execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(
+ blocked.repoRoot,
+ dryRun,
+ integrations.ensureOmxScaffold,
+ );
+ execution.lockSync = syncDoctorLockRegistryAfterMerge(
+ blocked.repoRoot,
+ execution.sandboxLockContent,
+ );
+ execution.autoFinish = integrations.autoFinishReadyAgentBranches(blocked.repoRoot, {
+ baseBranch: blocked.branch,
+ dryRun: options.dryRun,
+ waitForMerge: options.waitForMerge,
+ excludeBranches: [metadata.branch],
+ });
+
+ return execution;
+}
+
+function emitDoctorSandboxJsonOutput(nestedResult, execution) {
+ if (nestedResult.stdout) {
+ if (nestedResult.status === 0) {
+ try {
+ const parsed = JSON.parse(nestedResult.stdout);
+ process.stdout.write(
+ JSON.stringify(
+ {
+ ...parsed,
+ protectedBaseRepairSync: execution.protectedBaseRepairSync,
+ sandboxOmxScaffoldSync: execution.omxScaffoldSync,
+ sandboxLockSync: execution.lockSync,
+ sandboxAutoCommit: execution.autoCommit,
+ sandboxFinish: execution.finish,
+ autoFinish: execution.autoFinish,
+ },
+ null,
+ 2,
+ ) + '\n',
+ );
+ } catch {
+ process.stdout.write(nestedResult.stdout);
+ }
+ } else {
+ process.stdout.write(nestedResult.stdout);
+ }
+ }
+ if (nestedResult.stderr) process.stderr.write(nestedResult.stderr);
+}
+
+function emitDoctorSandboxConsoleOutput(options, blocked, metadata, startResult, nestedResult, execution) {
+ console.log(
+ `[${TOOL_NAME}] doctor detected protected branch '${blocked.branch}'. ` +
+ `Running repairs in sandbox branch '${metadata.branch || 'agent/'}'.`,
+ );
+ if (startResult.stdout) process.stdout.write(startResult.stdout);
+ if (startResult.stderr) process.stderr.write(startResult.stderr);
+ if (nestedResult.stdout) process.stdout.write(nestedResult.stdout);
+ if (nestedResult.stderr) process.stderr.write(nestedResult.stderr);
+ if (nestedResult.status !== 0) {
+ return;
+ }
+
+ if (execution.autoCommit.status === 'committed') {
+ console.log(
+ `[${TOOL_NAME}] Auto-committed doctor repairs in sandbox branch '${metadata.branch}'.`,
+ );
+ } else if (execution.autoCommit.status === 'failed') {
+ console.log(`[${TOOL_NAME}] Doctor sandbox auto-commit failed; branch left for manual follow-up.`);
+ if (execution.autoCommit.stdout) process.stdout.write(execution.autoCommit.stdout);
+ if (execution.autoCommit.stderr) process.stderr.write(execution.autoCommit.stderr);
+ } else {
+ console.log(`[${TOOL_NAME}] Doctor sandbox auto-commit skipped: ${execution.autoCommit.note}.`);
+ }
+
+ if (execution.protectedBaseRepairSync.status === 'merged') {
+ console.log(`[${TOOL_NAME}] Fast-forwarded tracked doctor repairs into the protected branch workspace.`);
+ } else if (execution.protectedBaseRepairSync.status === 'unchanged') {
+ console.log(`[${TOOL_NAME}] Protected branch workspace already had the tracked doctor repairs.`);
+ } else if (execution.protectedBaseRepairSync.status === 'would-merge') {
+ console.log(`[${TOOL_NAME}] Dry run: would fast-forward tracked doctor repairs into the protected branch workspace.`);
+ } else if (execution.protectedBaseRepairSync.status === 'failed') {
+ console.log(`[${TOOL_NAME}] Protected branch tracked repair merge failed: ${execution.protectedBaseRepairSync.note}.`);
+ if (execution.protectedBaseRepairSync.stdout) process.stdout.write(execution.protectedBaseRepairSync.stdout);
+ if (execution.protectedBaseRepairSync.stderr) process.stderr.write(execution.protectedBaseRepairSync.stderr);
+ } else {
+ console.log(`[${TOOL_NAME}] Protected branch tracked repair merge skipped: ${execution.protectedBaseRepairSync.note}.`);
+ }
+
+ if (execution.lockSync.status === 'synced') {
+ console.log(
+ `[${TOOL_NAME}] Synced repaired lock registry back to protected branch workspace (${LOCK_FILE_RELATIVE}).`,
+ );
+ } else if (execution.lockSync.status === 'unchanged') {
+ console.log(`[${TOOL_NAME}] Lock registry already synced in protected branch workspace.`);
+ } else {
+ console.log(`[${TOOL_NAME}] Lock registry sync skipped: ${execution.lockSync.note}.`);
+ }
+
+ if (execution.finish.status === 'completed') {
+ console.log(`[${TOOL_NAME}] Auto-finish flow completed for sandbox branch '${metadata.branch}'.`);
+ if (execution.finish.stdout) process.stdout.write(execution.finish.stdout);
+ if (execution.finish.stderr) process.stderr.write(execution.finish.stderr);
+ } else if (execution.finish.status === 'pending') {
+ console.log(
+ `[${TOOL_NAME}] Auto-finish pending for sandbox branch '${metadata.branch}': ${execution.finish.note}.`,
+ );
+ if (execution.finish.prUrl) {
+ console.log(`[${TOOL_NAME}] PR: ${execution.finish.prUrl}`);
+ }
+ if (execution.finish.stdout) process.stdout.write(execution.finish.stdout);
+ if (execution.finish.stderr) process.stderr.write(execution.finish.stderr);
+ } else if (execution.finish.status === 'failed') {
+ console.log(`[${TOOL_NAME}] Auto-finish flow failed for sandbox branch '${metadata.branch}'.`);
+ if (execution.finish.stdout) process.stdout.write(execution.finish.stdout);
+ if (execution.finish.stderr) process.stderr.write(execution.finish.stderr);
+ } else {
+ console.log(`[${TOOL_NAME}] Auto-finish skipped: ${execution.finish.note}.`);
+ }
+
+ printAutoFinishSummary(execution.autoFinish, {
+ baseBranch: blocked.branch,
+ verbose: options.verboseAutoFinish,
+ });
+ if (execution.omxScaffoldSync.status === 'synced') {
+ console.log(`[${TOOL_NAME}] Synced .omx scaffold back to protected branch workspace.`);
+ } else if (execution.omxScaffoldSync.status === 'unchanged') {
+ console.log(`[${TOOL_NAME}] .omx scaffold already aligned in protected branch workspace.`);
+ } else if (execution.omxScaffoldSync.status === 'would-sync') {
+ console.log(`[${TOOL_NAME}] Dry run: would sync .omx scaffold back to protected branch workspace.`);
+ } else {
+ console.log(`[${TOOL_NAME}] .omx scaffold sync skipped: ${execution.omxScaffoldSync.note}.`);
+ }
+}
+
+function setDoctorSandboxExitCode(nestedResult, execution) {
+ if (typeof nestedResult.status === 'number') {
+ let exitCode = nestedResult.status;
+ if (exitCode === 0 && execution.autoCommit.status === 'failed') {
+ exitCode = 1;
+ }
+ if (
+ exitCode === 0 &&
+ execution.autoCommit.status === 'committed' &&
+ (execution.finish.status === 'failed' || execution.finish.status === 'pending')
+ ) {
+ exitCode = 1;
+ }
+ if (exitCode === 0 && execution.protectedBaseRepairSync.status === 'failed') {
+ exitCode = 1;
+ }
+ process.exitCode = exitCode;
+ return;
+ }
+ process.exitCode = 1;
+}
+
+function runDoctorInSandbox(options, blocked, rawIntegrations = {}) {
+ const integrations = {
+ startProtectedBaseSandbox: requireDoctorIntegration(
+ 'startProtectedBaseSandbox',
+ rawIntegrations.startProtectedBaseSandbox,
+ ),
+ cleanupProtectedBaseSandbox: requireDoctorIntegration(
+ 'cleanupProtectedBaseSandbox',
+ rawIntegrations.cleanupProtectedBaseSandbox,
+ ),
+ ensureOmxScaffold: requireDoctorIntegration('ensureOmxScaffold', rawIntegrations.ensureOmxScaffold),
+ configureHooks: requireDoctorIntegration('configureHooks', rawIntegrations.configureHooks),
+ autoFinishReadyAgentBranches: requireDoctorIntegration(
+ 'autoFinishReadyAgentBranches',
+ rawIntegrations.autoFinishReadyAgentBranches,
+ ),
+ };
+
+ /** @type {SandboxStartResult} */
+ const startResult = integrations.startProtectedBaseSandbox(blocked, {
+ taskName: `${SHORT_TOOL_NAME}-doctor`,
+ sandboxSuffix: 'gx-doctor',
+ });
+ const metadata = startResult.metadata;
+
+ const sandboxTarget = resolveSandboxTarget(blocked.repoRoot, metadata.worktreePath, options.target);
+ const nestedResult = run(
+ process.execPath,
+ [CLI_ENTRY_PATH, ...buildSandboxDoctorArgs(options, sandboxTarget)],
+ { cwd: metadata.worktreePath },
+ );
+ if (isSpawnFailure(nestedResult)) {
+ throw nestedResult.error;
+ }
+
+ const execution = nestedResult.status === 0
+ ? executeDoctorSandboxLifecycle(options, blocked, metadata, integrations)
+ : createDoctorSandboxExecutionState();
+
+ if (options.json) {
+ emitDoctorSandboxJsonOutput(nestedResult, execution);
+ } else {
+ emitDoctorSandboxConsoleOutput(options, blocked, metadata, startResult, nestedResult, execution);
+ }
+
+ setDoctorSandboxExitCode(nestedResult, execution);
+}
+
+module.exports = {
+ runDoctorInSandbox,
+};
diff --git a/src/git/index.js b/src/git/index.js
index f5f63c2..e2bc231 100644
--- a/src/git/index.js
+++ b/src/git/index.js
@@ -1,5 +1,15 @@
const fs = require('node:fs');
-const { path } = require('../context');
+const {
+ path,
+ GIT_PROTECTED_BRANCHES_KEY,
+ GIT_BASE_BRANCH_KEY,
+ GIT_SYNC_STRATEGY_KEY,
+ DEFAULT_PROTECTED_BRANCHES,
+ DEFAULT_BASE_BRANCH,
+ DEFAULT_SYNC_STRATEGY,
+ COMPOSE_HINT_FILES,
+ LOCK_FILE_RELATIVE,
+} = require('../context');
const { run } = require('../core/runtime');
function gitRun(repoRoot, args, { allowFailure = false } = {}) {
@@ -113,10 +123,536 @@ function discoverNestedGitRepos(rootPath, opts = {}) {
return root ? [root, ...rest] : [];
}
+function parseBranchList(rawValue) {
+ return String(rawValue || '')
+ .split(/[\s,]+/)
+ .map((item) => item.trim())
+ .filter(Boolean);
+}
+
+function uniquePreserveOrder(items) {
+ const seen = new Set();
+ const result = [];
+ for (const item of items) {
+ if (seen.has(item)) continue;
+ seen.add(item);
+ result.push(item);
+ }
+ return result;
+}
+
+function readConfiguredProtectedBranches(repoRoot) {
+ const result = gitRun(repoRoot, ['config', '--get', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
+ if (result.status !== 0) {
+ return null;
+ }
+ const parsed = uniquePreserveOrder(parseBranchList(result.stdout.trim()));
+ if (parsed.length === 0) {
+ return null;
+ }
+ return parsed;
+}
+
+function listLocalUserBranches(repoRoot) {
+ const result = gitRun(repoRoot, ['for-each-ref', '--format=%(refname:short)', 'refs/heads'], { allowFailure: true });
+ const branchNames = result.status === 0
+ ? uniquePreserveOrder(
+ String(result.stdout || '')
+ .split('\n')
+ .map((item) => item.trim())
+ .filter(Boolean),
+ )
+ : [];
+
+ const additionalUserBranches = branchNames.filter(
+ (branchName) =>
+ !branchName.startsWith('agent/') &&
+ !DEFAULT_PROTECTED_BRANCHES.includes(branchName),
+ );
+ if (additionalUserBranches.length > 0) {
+ return additionalUserBranches;
+ }
+
+ const current = gitRun(repoRoot, ['branch', '--show-current'], { allowFailure: true });
+ if (current.status !== 0) {
+ return [];
+ }
+
+ const branchName = String(current.stdout || '').trim();
+ if (
+ !branchName ||
+ branchName.startsWith('agent/') ||
+ DEFAULT_PROTECTED_BRANCHES.includes(branchName)
+ ) {
+ return [];
+ }
+
+ return [branchName];
+}
+
+function listLocalAgentBranches(repoRoot) {
+ const result = gitRun(
+ repoRoot,
+ ['for-each-ref', '--format=%(refname:short)', 'refs/heads/agent/'],
+ { allowFailure: true },
+ );
+ if (result.status !== 0) {
+ return [];
+ }
+ return uniquePreserveOrder(
+ String(result.stdout || '')
+ .split('\n')
+ .map((item) => item.trim())
+ .filter(Boolean),
+ );
+}
+
+function mapWorktreePathsByBranch(repoRoot) {
+ const result = gitRun(repoRoot, ['worktree', 'list', '--porcelain'], { allowFailure: true });
+ const map = new Map();
+ if (result.status !== 0) {
+ return map;
+ }
+
+ const lines = String(result.stdout || '').split('\n');
+ let currentWorktree = '';
+ for (const line of lines) {
+ if (line.startsWith('worktree ')) {
+ currentWorktree = line.slice('worktree '.length).trim();
+ continue;
+ }
+ if (line.startsWith('branch refs/heads/')) {
+ const branchName = line.slice('branch refs/heads/'.length).trim();
+ if (currentWorktree && branchName) {
+ map.set(branchName, currentWorktree);
+ }
+ }
+ }
+ return map;
+}
+
+function gitRefExists(repoRoot, ref) {
+ return run('git', ['-C', repoRoot, 'show-ref', '--verify', '--quiet', ref]).status === 0;
+}
+
+function hasSignificantWorkingTreeChanges(worktreePath) {
+ const result = run('git', [
+ '-C',
+ worktreePath,
+ 'status',
+ '--porcelain',
+ '--untracked-files=normal',
+ '--',
+ ]);
+ if (result.status !== 0) {
+ return true;
+ }
+
+ const lines = String(result.stdout || '')
+ .split('\n')
+ .map((line) => line.trimEnd())
+ .filter((line) => line.length > 0);
+
+ for (const line of lines) {
+ const pathPart = (line.length > 3 ? line.slice(3) : '').trim();
+ if (!pathPart) continue;
+ if (pathPart === LOCK_FILE_RELATIVE) continue;
+ if (pathPart.startsWith(`${LOCK_FILE_RELATIVE} -> `)) continue;
+ if (pathPart.endsWith(` -> ${LOCK_FILE_RELATIVE}`)) continue;
+ return true;
+ }
+ return false;
+}
+
+function readProtectedBranches(repoRoot) {
+ const result = gitRun(repoRoot, ['config', '--get', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
+ if (result.status !== 0) {
+ return [...DEFAULT_PROTECTED_BRANCHES];
+ }
+
+ const parsed = uniquePreserveOrder(parseBranchList(result.stdout.trim()));
+ if (parsed.length === 0) {
+ return [...DEFAULT_PROTECTED_BRANCHES];
+ }
+ return parsed;
+}
+
+function writeProtectedBranches(repoRoot, branches) {
+ if (branches.length === 0) {
+ gitRun(repoRoot, ['config', '--unset-all', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
+ return;
+ }
+ gitRun(repoRoot, ['config', GIT_PROTECTED_BRANCHES_KEY, branches.join(' ')]);
+}
+
+function readGitConfig(repoRoot, key) {
+ const result = gitRun(repoRoot, ['config', '--get', key], { allowFailure: true });
+ if (result.status !== 0) {
+ return '';
+ }
+ return (result.stdout || '').trim();
+}
+
+function resolveBaseBranch(repoRoot, explicitBase) {
+ if (explicitBase) {
+ return explicitBase;
+ }
+ const configured = readGitConfig(repoRoot, GIT_BASE_BRANCH_KEY);
+ return configured || DEFAULT_BASE_BRANCH;
+}
+
+function resolveSyncStrategy(repoRoot, explicitStrategy) {
+ const strategy = (explicitStrategy || readGitConfig(repoRoot, GIT_SYNC_STRATEGY_KEY) || DEFAULT_SYNC_STRATEGY)
+ .trim()
+ .toLowerCase();
+ if (strategy !== 'rebase' && strategy !== 'merge') {
+ throw new Error(`Invalid sync strategy '${strategy}' (expected: rebase or merge)`);
+ }
+ return strategy;
+}
+
+function currentBranchName(repoRoot) {
+ const result = gitRun(repoRoot, ['branch', '--show-current'], { allowFailure: true });
+ if (result.status !== 0) {
+ throw new Error('Unable to detect current branch');
+ }
+ const branch = (result.stdout || '').trim();
+ if (!branch) {
+ throw new Error('Detached HEAD is not supported for sync operations');
+ }
+ return branch;
+}
+
+function repoHasHeadCommit(repoRoot) {
+ return gitRun(repoRoot, ['rev-parse', '--verify', 'HEAD'], { allowFailure: true }).status === 0;
+}
+
+function readBranchDisplayName(repoRoot) {
+ const symbolic = gitRun(repoRoot, ['symbolic-ref', '--quiet', '--short', 'HEAD'], { allowFailure: true });
+ if (symbolic.status === 0) {
+ const branch = String(symbolic.stdout || '').trim();
+ if (!branch) {
+ return '(unknown)';
+ }
+ return repoHasHeadCommit(repoRoot) ? branch : `${branch} (unborn; no commits yet)`;
+ }
+
+ const detached = gitRun(repoRoot, ['rev-parse', '--short', 'HEAD'], { allowFailure: true });
+ if (detached.status === 0) {
+ return `(detached at ${String(detached.stdout || '').trim()})`;
+ }
+ return '(unknown)';
+}
+
+function hasOriginRemote(repoRoot) {
+ return gitRun(repoRoot, ['remote', 'get-url', 'origin'], { allowFailure: true }).status === 0;
+}
+
+function detectComposeHintFiles(repoRoot) {
+ return COMPOSE_HINT_FILES.filter((relativePath) => fs.existsSync(path.join(repoRoot, relativePath)));
+}
+
+function workingTreeIsDirty(repoRoot) {
+ const result = gitRun(repoRoot, ['status', '--porcelain'], { allowFailure: true });
+ if (result.status !== 0) {
+ throw new Error('Unable to inspect git working tree status');
+ }
+ const lines = (result.stdout || '').split('\n').filter((line) => line.length > 0);
+ const significant = lines.filter((line) => {
+ const pathPart = (line.length > 3 ? line.slice(3) : '').trim();
+ if (!pathPart) return false;
+ if (pathPart === LOCK_FILE_RELATIVE) return false;
+ if (pathPart.startsWith(`${LOCK_FILE_RELATIVE} -> `)) return false;
+ if (pathPart.endsWith(` -> ${LOCK_FILE_RELATIVE}`)) return false;
+ return true;
+ });
+ return significant.length > 0;
+}
+
+function ensureRepoBranch(repoRoot, branch) {
+ const current = currentBranchName(repoRoot);
+ if (current === branch) {
+ return { ok: true, changed: false };
+ }
+
+ const checkoutResult = run('git', ['-C', repoRoot, 'checkout', branch], { timeout: 20_000 });
+ if (checkoutResult.error && typeof checkoutResult.status !== 'number') {
+ return {
+ ok: false,
+ changed: false,
+ stdout: checkoutResult.stdout || '',
+ stderr: checkoutResult.stderr || '',
+ };
+ }
+ if (checkoutResult.status !== 0) {
+ return {
+ ok: false,
+ changed: false,
+ stdout: checkoutResult.stdout || '',
+ stderr: checkoutResult.stderr || '',
+ };
+ }
+
+ return { ok: true, changed: true };
+}
+
+function ensureOriginBaseRef(repoRoot, baseBranch) {
+ const fetch = gitRun(repoRoot, ['fetch', 'origin', baseBranch, '--quiet'], { allowFailure: true });
+ if (fetch.status !== 0) {
+ throw new Error(
+ `Unable to fetch origin/${baseBranch}. Ensure remote 'origin' exists and branch '${baseBranch}' is available.`,
+ );
+ }
+ const hasRemoteBase = gitRun(repoRoot, ['show-ref', '--verify', '--quiet', `refs/remotes/origin/${baseBranch}`], {
+ allowFailure: true,
+ });
+ if (hasRemoteBase.status !== 0) {
+ throw new Error(`Remote base branch not found: origin/${baseBranch}`);
+ }
+}
+
+function aheadBehind(repoRoot, branchRef, baseRef) {
+ const result = gitRun(repoRoot, ['rev-list', '--left-right', '--count', `${branchRef}...${baseRef}`], {
+ allowFailure: true,
+ });
+ if (result.status !== 0) {
+ throw new Error(`Unable to compute ahead/behind for ${branchRef} vs ${baseRef}`);
+ }
+ const parts = (result.stdout || '').trim().split(/\s+/).filter(Boolean);
+ const ahead = Number.parseInt(parts[0] || '0', 10);
+ const behind = Number.parseInt(parts[1] || '0', 10);
+ return { ahead: Number.isFinite(ahead) ? ahead : 0, behind: Number.isFinite(behind) ? behind : 0 };
+}
+
+function lockRegistryStatus(repoRoot) {
+ const result = gitRun(repoRoot, ['status', '--porcelain', '--', LOCK_FILE_RELATIVE], { allowFailure: true });
+ if (result.status !== 0) {
+ return { dirty: false, untracked: false };
+ }
+ const lines = (result.stdout || '').split('\n').filter((line) => line.length > 0);
+ if (lines.length === 0) {
+ return { dirty: false, untracked: false };
+ }
+ const untracked = lines.some((line) => line.startsWith('??'));
+ return { dirty: true, untracked };
+}
+
+function listAgentWorktrees(repoRoot) {
+ const result = gitRun(repoRoot, ['worktree', 'list', '--porcelain'], { allowFailure: true });
+ if (result.status !== 0) {
+ throw new Error('Unable to list git worktrees for finish command');
+ }
+
+ const entries = [];
+ let currentPath = '';
+ let currentBranchRef = '';
+ const lines = String(result.stdout || '').split('\n');
+ for (const line of lines) {
+ if (!line.trim()) {
+ if (currentPath && currentBranchRef.startsWith('refs/heads/agent/')) {
+ entries.push({
+ worktreePath: currentPath,
+ branch: currentBranchRef.replace(/^refs\/heads\//, ''),
+ });
+ }
+ currentPath = '';
+ currentBranchRef = '';
+ continue;
+ }
+ if (line.startsWith('worktree ')) {
+ currentPath = line.slice('worktree '.length).trim();
+ continue;
+ }
+ if (line.startsWith('branch ')) {
+ currentBranchRef = line.slice('branch '.length).trim();
+ continue;
+ }
+ }
+ if (currentPath && currentBranchRef.startsWith('refs/heads/agent/')) {
+ entries.push({
+ worktreePath: currentPath,
+ branch: currentBranchRef.replace(/^refs\/heads\//, ''),
+ });
+ }
+
+ return entries;
+}
+
+function listLocalAgentBranchesForFinish(repoRoot) {
+ return uniquePreserveOrder(
+ listLocalAgentBranches(repoRoot).filter((line) => line.startsWith('agent/')),
+ );
+}
+
+function gitQuietChangeResult(worktreePath, args) {
+ const result = run('git', ['-C', worktreePath, ...args], { stdio: 'pipe' });
+ if (result.status === 0) {
+ return false;
+ }
+ if (result.status === 1) {
+ return true;
+ }
+ throw new Error(
+ `git ${args.join(' ')} failed in ${worktreePath}: ${(
+ result.stderr || result.stdout || ''
+ ).trim()}`,
+ );
+}
+
+function worktreeHasLocalChanges(worktreePath) {
+ const hasUnstaged = gitQuietChangeResult(worktreePath, [
+ 'diff',
+ '--quiet',
+ '--',
+ '.',
+ ':(exclude).omx/state/agent-file-locks.json',
+ ]);
+ if (hasUnstaged) {
+ return true;
+ }
+
+ const hasStaged = gitQuietChangeResult(worktreePath, [
+ 'diff',
+ '--cached',
+ '--quiet',
+ '--',
+ '.',
+ ':(exclude).omx/state/agent-file-locks.json',
+ ]);
+ if (hasStaged) {
+ return true;
+ }
+
+ const untracked = run('git', ['-C', worktreePath, 'ls-files', '--others', '--exclude-standard'], {
+ stdio: 'pipe',
+ });
+ if (untracked.status !== 0) {
+ throw new Error(`Unable to inspect untracked files in ${worktreePath}`);
+ }
+ return String(untracked.stdout || '').trim().length > 0;
+}
+
+function gitOutputLines(worktreePath, args) {
+ const result = run('git', ['-C', worktreePath, ...args], { stdio: 'pipe' });
+ if (result.status !== 0) {
+ throw new Error(
+ `git ${args.join(' ')} failed in ${worktreePath}: ${(
+ result.stderr || result.stdout || ''
+ ).trim()}`,
+ );
+ }
+ return String(result.stdout || '')
+ .split('\n')
+ .map((line) => line.trim())
+ .filter(Boolean);
+}
+
+function branchExists(repoRoot, branch) {
+ const result = gitRun(repoRoot, ['show-ref', '--verify', '--quiet', `refs/heads/${branch}`], {
+ allowFailure: true,
+ });
+ return result.status === 0;
+}
+
+function resolveFinishBaseBranch(repoRoot, _sourceBranch, explicitBase) {
+ if (explicitBase) {
+ return explicitBase;
+ }
+
+ const configured = readGitConfig(repoRoot, GIT_BASE_BRANCH_KEY);
+ if (configured) {
+ return configured;
+ }
+
+ return DEFAULT_BASE_BRANCH;
+}
+
+function branchMergedIntoBase(repoRoot, branch, baseBranch) {
+ if (!branchExists(repoRoot, baseBranch)) {
+ return false;
+ }
+ const result = gitRun(repoRoot, ['merge-base', '--is-ancestor', branch, baseBranch], {
+ allowFailure: true,
+ });
+ if (result.status === 0) {
+ return true;
+ }
+ if (result.status === 1) {
+ return false;
+ }
+ throw new Error(`Unable to determine merge status for ${branch} -> ${baseBranch}`);
+}
+
+function syncOperation(repoRoot, strategy, baseRef, ffOnly) {
+ if (strategy === 'rebase') {
+ if (ffOnly) {
+ throw new Error('--ff-only is only supported with --strategy merge');
+ }
+ const rebased = run('git', ['-C', repoRoot, 'rebase', baseRef], { stdio: 'pipe' });
+ if (rebased.status !== 0) {
+ const details = (rebased.stderr || rebased.stdout || '').trim();
+ const gitDir = path.join(repoRoot, '.git');
+ const rebaseActive = fs.existsSync(path.join(gitDir, 'rebase-merge')) || fs.existsSync(path.join(gitDir, 'rebase-apply'));
+ const help = rebaseActive
+ ? '\nResolve conflicts, then run: git rebase --continue\nOr abort: git rebase --abort'
+ : '';
+ throw new Error(`Sync failed during rebase onto ${baseRef}.${details ? `\n${details}` : ''}${help}`);
+ }
+ return;
+ }
+
+ const mergeArgs = ['-C', repoRoot, 'merge', '--no-edit'];
+ if (ffOnly) {
+ mergeArgs.push('--ff-only');
+ }
+ mergeArgs.push(baseRef);
+ const merged = run('git', mergeArgs, { stdio: 'pipe' });
+ if (merged.status !== 0) {
+ const details = (merged.stderr || merged.stdout || '').trim();
+ const gitDir = path.join(repoRoot, '.git');
+ const mergeActive = fs.existsSync(path.join(gitDir, 'MERGE_HEAD'));
+ const help = mergeActive ? '\nResolve conflicts, then run: git commit\nOr abort: git merge --abort' : '';
+ throw new Error(`Sync failed during merge from ${baseRef}.${details ? `\n${details}` : ''}${help}`);
+ }
+}
+
module.exports = {
DEFAULT_NESTED_REPO_MAX_DEPTH: NESTED_REPO_DEFAULT_MAX_DEPTH,
gitRun,
resolveRepoRoot,
isGitRepo,
discoverNestedGitRepos,
+ parseBranchList,
+ uniquePreserveOrder,
+ readConfiguredProtectedBranches,
+ listLocalUserBranches,
+ listLocalAgentBranches,
+ mapWorktreePathsByBranch,
+ gitRefExists,
+ hasSignificantWorkingTreeChanges,
+ readProtectedBranches,
+ writeProtectedBranches,
+ readGitConfig,
+ resolveBaseBranch,
+ resolveSyncStrategy,
+ currentBranchName,
+ repoHasHeadCommit,
+ readBranchDisplayName,
+ hasOriginRemote,
+ repoHasOriginRemote: hasOriginRemote,
+ detectComposeHintFiles,
+ workingTreeIsDirty,
+ ensureRepoBranch,
+ ensureOriginBaseRef,
+ aheadBehind,
+ lockRegistryStatus,
+ listAgentWorktrees,
+ listLocalAgentBranchesForFinish,
+ gitQuietChangeResult,
+ worktreeHasLocalChanges,
+ gitOutputLines,
+ branchExists,
+ resolveFinishBaseBranch,
+ branchMergedIntoBase,
+ syncOperation,
};
diff --git a/test/cli-args-dispatch.test.js b/test/cli-args-dispatch.test.js
index 5c5dd1c..366c268 100644
--- a/test/cli-args-dispatch.test.js
+++ b/test/cli-args-dispatch.test.js
@@ -187,8 +187,11 @@ test('scaffold reuses the shared destination-path helper from context', () => {
test('cli main no longer keeps local copies of extracted shared helpers or dead cleanup code', () => {
const source = fs.readFileSync(path.join(repoRoot, 'src', 'cli', 'main.js'), 'utf8');
+ const doctorSource = fs.readFileSync(path.join(repoRoot, 'src', 'doctor', 'index.js'), 'utf8');
+ const gitSource = fs.readFileSync(path.join(repoRoot, 'src', 'git', 'index.js'), 'utf8');
assert.match(source, /require\('\.\.\/context'\)/);
+ assert.match(source, /require\('\.\.\/doctor'\)/);
assert.match(source, /require\('\.\.\/output'\)/);
assert.match(source, /require\('\.\.\/scaffold'\)/);
assert.match(source, /require\('\.\/args'\)/);
@@ -207,6 +210,12 @@ test('cli main no longer keeps local copies of extracted shared helpers or dead
assert.doesNotMatch(source, /function resolveRepoRoot\(targetPath\)/);
assert.doesNotMatch(source, /function isGitRepo\(targetPath\)/);
assert.doesNotMatch(source, /function discoverNestedGitRepos\(rootPath, opts = \{\}\)/);
+ assert.doesNotMatch(source, /function readGitConfig\(repoRoot, key\)/);
+ assert.doesNotMatch(source, /function currentBranchName\(repoRoot\)/);
+ assert.doesNotMatch(source, /function workingTreeIsDirty\(repoRoot\)/);
+ assert.doesNotMatch(source, /function aheadBehind\(repoRoot, branchRef, baseRef\)/);
+ assert.doesNotMatch(source, /function branchExists\(repoRoot, branch\)/);
+ assert.doesNotMatch(source, /function branchMergedIntoBase\(repoRoot, branch, baseBranch\)/);
assert.doesNotMatch(source, /function maybeSuggestCommand\(command\)/);
assert.doesNotMatch(source, /function normalizeCommandOrThrow\(command\)/);
assert.doesNotMatch(source, /function warnDeprecatedAlias\(aliasName\)/);
@@ -222,6 +231,26 @@ test('cli main no longer keeps local copies of extracted shared helpers or dead
assert.doesNotMatch(source, /function initWorkspace\(rawArgs\)/);
assert.doesNotMatch(source, /function doctorAudit\(rawArgs\)/);
assert.doesNotMatch(source, /function syncDoctorLocalSupportFiles\(repoRoot, dryRun\)/);
- assert.equal((source.match(/function gitRefExists\(/g) || []).length, 1);
- assert.equal((source.match(/Auto-finish flow failed for sandbox branch/g) || []).length, 1);
+ assert.doesNotMatch(source, /function parseGitPathList\(output\)/);
+ assert.doesNotMatch(source, /function collectDoctorChangedPaths\(worktreePath\)/);
+ assert.doesNotMatch(source, /function collectDoctorDeletedPaths\(worktreePath\)/);
+ assert.doesNotMatch(source, /function collectWorktreeDirtyPaths\(worktreePath\)/);
+ assert.doesNotMatch(source, /function claimDoctorChangedLocks\(metadata\)/);
+ assert.doesNotMatch(source, /function autoCommitDoctorSandboxChanges\(metadata\)/);
+ assert.doesNotMatch(source, /function finishDoctorSandboxBranch\(blocked, metadata, options = \{\}\)/);
+ assert.doesNotMatch(source, /function mergeDoctorSandboxRepairsBackToProtectedBase\(options, blocked, metadata, autoCommitResult, finishResult\)/);
+ assert.doesNotMatch(source, /function syncDoctorLockRegistryBeforeMerge\(repoRoot, metadata\)/);
+ assert.doesNotMatch(source, /function syncDoctorLockRegistryAfterMerge\(repoRoot, sandboxLockContent\)/);
+ assert.doesNotMatch(source, /function executeDoctorSandboxLifecycle\(options, blocked, metadata\)/);
+ assert.doesNotMatch(source, /function emitDoctorSandboxJsonOutput\(nestedResult, execution\)/);
+ assert.doesNotMatch(source, /function emitDoctorSandboxConsoleOutput\(options, blocked, metadata, startResult, nestedResult, execution\)/);
+ assert.doesNotMatch(source, /function runDoctorInSandbox\(options, blocked\)/);
+ assert.match(doctorSource, /function runDoctorInSandbox\(options, blocked, rawIntegrations = \{\}\)/);
+ assert.match(doctorSource, /function executeDoctorSandboxLifecycle\(options, blocked, metadata, integrations\)/);
+ assert.match(gitSource, /function readGitConfig\(repoRoot, key\)/);
+ assert.match(gitSource, /function currentBranchName\(repoRoot\)/);
+ assert.match(gitSource, /function workingTreeIsDirty\(repoRoot\)/);
+ assert.match(gitSource, /function aheadBehind\(repoRoot, branchRef, baseRef\)/);
+ assert.match(gitSource, /function branchMergedIntoBase\(repoRoot, branch, baseBranch\)/);
+ assert.equal((doctorSource.match(/Auto-finish flow failed for sandbox branch/g) || []).length, 1);
});
From 07603dc357f07bad3ecc2f8cb880ea1fe3c1f3d1 Mon Sep 17 00:00:00 2001
From: NagyVikt
Date: Wed, 22 Apr 2026 16:32:03 +0200
Subject: [PATCH 2/2] Make the remaining CLI extractions reviewable without
changing Guardex behavior
Move the last git/worktree, scaffold/template, and protected-main doctor seams out of src/cli/main.js so the sensitive paths live in owned modules. This also removes the cached factory wrappers for sandbox, toolchain, and finish wiring while keeping command behavior and release metadata expectations stable.
Constraint: Must preserve protected-main doctor and finish behavior while shrinking src/cli/main.js
Rejected: Keep the service-locator wrappers in main.js | leaves the most sensitive flows in the least reviewable file
Confidence: high
Scope-risk: moderate
Reversibility: clean
Directive: Keep future CLI extractions behavior-preserving and extend focused regression coverage before moving protected-branch flows again
Tested: node --check src/cli/main.js src/git/index.js src/scaffold/index.js src/doctor/index.js src/sandbox/index.js src/finish/index.js src/toolchain/index.js; node --test test/cli-args-dispatch.test.js test/doctor.test.js test/install.test.js test/metadata.test.js test/finish.test.js test/setup.test.js; npm test; openspec validate agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38 --type change --strict; openspec validate --specs
Not-tested: Live gh-authenticated release publishing against GitHub
---
README.md | 4 +
.../.openspec.yaml | 2 +
.../proposal.md | 18 +
.../specs/cli-doctor-foundations/spec.md | 11 +
.../specs/cli-modularization/spec.md | 19 +
.../tasks.md | 46 +
src/cli/main.js | 2389 ++++-------------
src/doctor/index.js | 571 ++--
src/finish/index.js | 814 +++---
src/git/index.js | 67 +
src/sandbox/index.js | 353 ++-
src/scaffold/index.js | 680 +++++
src/toolchain/index.js | 800 ++++--
test/metadata.test.js | 11 +-
14 files changed, 3124 insertions(+), 2661 deletions(-)
create mode 100644 openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/.openspec.yaml
create mode 100644 openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/proposal.md
create mode 100644 openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-doctor-foundations/spec.md
create mode 100644 openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-modularization/spec.md
create mode 100644 openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/tasks.md
diff --git a/README.md b/README.md
index 14442ab..27ca5ba 100644
--- a/README.md
+++ b/README.md
@@ -45,6 +45,10 @@ npm i -g @imdeadpool/guardex
guard many agent. keep one repo clean.
+[about_description.txt](./about_description.txt)
+
+Guardian T-Rex for your multi-agent repo. Isolated worktrees, file locks, and PR-only merges stop parallel Codex & Claude agents from overwriting each other's work. Auto-wires Oh My Codex, Oh My Claude, OpenSpec, and Caveman.
+
Guardian T-Rex for your multi-agent repo. Isolated worktrees, file locks, and PR-only merges stop parallel Codex & Claude agents from overwriting each other's work. Auto-wires Oh My Codex, Oh My Claude, OpenSpec, and Caveman.
diff --git a/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/.openspec.yaml b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/.openspec.yaml
new file mode 100644
index 0000000..25345f4
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/.openspec.yaml
@@ -0,0 +1,2 @@
+schema: spec-driven
+created: 2026-04-22
diff --git a/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/proposal.md b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/proposal.md
new file mode 100644
index 0000000..9c1d37d
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/proposal.md
@@ -0,0 +1,18 @@
+## Why
+
+- `src/cli/main.js` is still over 5,500 lines and retains the remaining git/worktree, scaffold/template, and protected-main doctor lifecycle clusters.
+- Those clusters keep the most sensitive behavior in the least reviewable file and force `src/sandbox`, `src/toolchain`, and `src/finish` to keep constructor-style dependency bags.
+- The requested outcome is a payoff-first extraction pass: move the remaining helper seams to owned modules, then delete the DI wrappers so the CLI reads as direct module wiring instead of a service locator.
+
+## What Changes
+
+- Move the remaining git/worktree helpers from `src/cli/main.js` into `src/git/index.js`.
+- Move the remaining managed-file, template, JSONC, and repo-settings helpers from `src/cli/main.js` into `src/scaffold/index.js`.
+- Extract the protected-main doctor sandbox lifecycle and related protected-base sandbox helpers into `src/doctor/index.js`.
+- Convert `src/sandbox/index.js`, `src/toolchain/index.js`, and `src/finish/index.js` to direct modules and remove `getSandboxApi()`, `getToolchainApi()`, and `getFinishApi()` from `src/cli/main.js`.
+
+## Impact
+
+- Primary surfaces: `src/cli/main.js`, `src/git/index.js`, `src/scaffold/index.js`, new `src/doctor/index.js`, `src/sandbox/index.js`, `src/toolchain/index.js`, `src/finish/index.js`, and focused CLI regression tests.
+- Main risk surface is `gx doctor` on protected branches plus `gx finish` auto-commit/sync behavior, so the pass must extend behavior-lock coverage first and rerun doctor/install/finish-adjacent suites after each extraction stage.
+- This is an internal cleanup only; command names, output wording, and zero-copy CLI behavior must stay stable.
diff --git a/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-doctor-foundations/spec.md b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-doctor-foundations/spec.md
new file mode 100644
index 0000000..32fcb15
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-doctor-foundations/spec.md
@@ -0,0 +1,11 @@
+## MODIFIED Requirements
+
+### Requirement: Typed protected-main doctor sandbox lifecycle
+The system SHALL keep the protected-main `gx doctor` sandbox path behaviorally equivalent while moving the lifecycle sequencing out of `src/cli/main.js` and into a dedicated doctor module.
+
+#### Scenario: Protected-main doctor lifecycle is extracted without behavior drift
+- **GIVEN** `gx doctor` runs on a protected local base branch
+- **WHEN** the protected-main doctor flow creates a sandbox, runs nested doctor, auto-commits repairs, and finishes through the PR path
+- **THEN** `src/cli/main.js` delegates that lifecycle to `src/doctor/index.js`
+- **AND** the observable output and success/failure behavior remain unchanged
+- **AND** the existing protected-main doctor regression tests still pass.
diff --git a/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-modularization/spec.md b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-modularization/spec.md
new file mode 100644
index 0000000..a091f25
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/specs/cli-modularization/spec.md
@@ -0,0 +1,19 @@
+## MODIFIED Requirements
+
+### Requirement: Module seams mirror operational responsibility
+The CLI SHALL keep git/worktree, scaffold/template, and doctor lifecycle helper ownership in their extracted `src/` modules instead of redefining those seams in `src/cli/main.js`.
+
+#### Scenario: Git and scaffold helper seams stay single-source
+- **WHEN** maintainers inspect `src/cli/main.js`
+- **THEN** git/worktree helpers are imported from `src/git/index.js`
+- **AND** scaffold/template/settings helpers are imported from `src/scaffold/index.js`
+- **AND** `src/cli/main.js` does not redefine those helpers locally.
+
+### Requirement: CLI module wiring is direct after extraction
+The modularized CLI SHALL wire extracted modules through direct exports/imports instead of constructor-style dependency bags.
+
+#### Scenario: Factory wrappers are removed after seam extraction
+- **WHEN** maintainers inspect the runtime modules after this cleanup
+- **THEN** `src/cli/main.js` does not define `getSandboxApi()`, `getToolchainApi()`, or `getFinishApi()`
+- **AND** `src/sandbox/index.js`, `src/toolchain/index.js`, and `src/finish/index.js` export direct functions instead of `create*Api` factories
+- **AND** require-time/syntax regressions do not occur from the factory removal.
diff --git a/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/tasks.md b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/tasks.md
new file mode 100644
index 0000000..dda36a4
--- /dev/null
+++ b/openspec/changes/agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38/tasks.md
@@ -0,0 +1,46 @@
+## Definition of Done
+
+This change is complete only when **all** of the following are true:
+
+- Every checkbox below is checked.
+- The agent branch reaches `MERGED` state on `origin` and the PR URL + state are recorded in the completion handoff.
+- If any step blocks (test failure, conflict, ambiguous result), append a `BLOCKED:` line under section 4 explaining the blocker and **STOP**. Do not tick remaining cleanup boxes; do not silently skip the cleanup pipeline.
+
+## Handoff
+
+- Handoff: change=`agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38`; branch=`agent/codex/extract-doctor-sandbox-module-2026-04-22-15-38`; scope=`src/cli/main.js`, `src/git/index.js`, `src/scaffold/index.js`, `src/doctor/index.js`, `src/sandbox/index.js`, `src/toolchain/index.js`, `src/finish/index.js`, `test/cli-args-dispatch.test.js`; action=`extract the remaining git/scaffold/doctor helper seams and delete the DI factory wrappers without changing CLI behavior`.
+
+## 1. Specification
+
+- [x] 1.1 Finalize the git/scaffold/doctor/DI extraction scope and acceptance criteria for `agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38`.
+- [x] 1.2 Confirm no capability spec delta is required because this change is a behavior-preserving internal extraction and cleanup pass.
+
+## 2. Implementation
+
+- [x] 2.1 Extend focused regression coverage so the remaining helper clusters and DI wrapper functions are behavior-locked before cleanup.
+- [x] 2.2 Move the remaining git/worktree helpers from `src/cli/main.js` into `src/git/index.js` and update callers.
+- [x] 2.3 Move the remaining scaffold/template/JSONC/settings helpers from `src/cli/main.js` into `src/scaffold/index.js` and update callers.
+- [x] 2.4 Extract the protected-main doctor sandbox lifecycle and related sandbox helpers into `src/doctor/index.js` and route `src/cli/main.js` through it.
+- [x] 2.5 Convert `src/sandbox/index.js`, `src/toolchain/index.js`, and `src/finish/index.js` to direct modules and delete the cached factory wrappers from `src/cli/main.js`.
+
+## 3. Verification
+
+- [x] 3.1 Run `node --check src/cli/main.js src/git/index.js src/scaffold/index.js src/doctor/index.js src/sandbox/index.js src/finish/index.js src/toolchain/index.js`.
+- [x] 3.2 Run `node --test test/cli-args-dispatch.test.js`.
+- [x] 3.3 Run focused CLI regression suites covering setup, doctor, install, metadata, and finish-adjacent behavior.
+- [x] 3.4 Run `npm test`.
+- [x] 3.5 Run `openspec validate agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38 --type change --strict`.
+- [x] 3.6 Run `openspec validate --specs`.
+
+Verified on 2026-04-22:
+- `node --check src/cli/main.js src/git/index.js src/scaffold/index.js src/doctor/index.js src/sandbox/index.js src/finish/index.js src/toolchain/index.js`
+- `node --test test/cli-args-dispatch.test.js test/doctor.test.js test/install.test.js test/metadata.test.js test/finish.test.js test/setup.test.js`
+- `npm test` -> `209` pass, `0` fail, `1` skip
+- `openspec validate agent-codex-extract-git-scaffold-doctor-di-2026-04-22-15-38 --type change --strict`
+- `openspec validate --specs` -> `No items found to validate.`
+
+## 4. Cleanup (mandatory; run before claiming completion)
+
+- [ ] 4.1 Run `gx branch finish --branch agent/codex/extract-doctor-sandbox-module-2026-04-22-15-38 --base main --via-pr --wait-for-merge --cleanup`.
+- [ ] 4.2 Record the PR URL and final merge state (`MERGED`) in the completion handoff.
+- [ ] 4.3 Confirm the sandbox worktree is gone (`git worktree list` no longer shows the agent path; `git branch -a` shows no surviving local/remote refs for the branch).
diff --git a/src/cli/main.js b/src/cli/main.js
index 901aee5..130cdd6 100755
--- a/src/cli/main.js
+++ b/src/cli/main.js
@@ -3,7 +3,7 @@
const hooksModule = require('../hooks');
const sandboxModule = require('../sandbox');
const toolchainModule = require('../toolchain');
-const finishModule = require('../finish');
+const finishCommands = require('../finish');
const doctorModule = require('../doctor');
const {
fs,
@@ -73,14 +73,37 @@ const {
resolveRepoRoot,
isGitRepo,
discoverNestedGitRepos,
+ uniquePreserveOrder,
+ listLocalUserBranches,
+ listLocalAgentBranches,
+ mapWorktreePathsByBranch,
gitRefExists,
+ hasSignificantWorkingTreeChanges,
+ readConfiguredProtectedBranches,
+ readProtectedBranches,
+ ensureSetupProtectedBranches,
+ writeProtectedBranches,
readGitConfig,
+ resolveBaseBranch,
+ resolveSyncStrategy,
currentBranchName,
+ repoHasHeadCommit,
+ readBranchDisplayName,
+ hasOriginRemote: repoHasOriginRemote,
+ detectComposeHintFiles,
+ printSetupRepoHints,
ensureRepoBranch,
+ ensureOriginBaseRef,
workingTreeIsDirty,
aheadBehind,
+ lockRegistryStatus,
+ listAgentWorktrees,
+ listLocalAgentBranchesForFinish,
+ worktreeHasLocalChanges,
branchExists,
+ resolveFinishBaseBranch,
branchMergedIntoBase,
+ syncOperation,
} = require('../git');
const {
run,
@@ -131,94 +154,30 @@ const {
renderShellDispatchShim,
renderPythonDispatchShim,
managedForceConflictMessage,
+ renderManagedFile,
+ ensureGeneratedScriptShim,
+ ensureHookShim,
+ copyTemplateFile,
+ ensureTemplateFilePresent,
+ ensureOmxScaffold,
+ ensureLockRegistry,
+ lockStateOrError,
+ writeLockState,
+ removeLegacyPackageScripts,
+ installUserLevelAsset,
+ removeLegacyManagedRepoFile,
+ ensureAgentsSnippet,
+ ensureManagedGitignore,
+ stripJsonComments,
+ stripJsonTrailingCommas,
+ parseJsonObjectLikeFile,
+ buildRepoVscodeSettings,
+ ensureRepoVscodeSettings,
+ configureHooks,
printOperations,
printStandaloneOperations,
} = require('../scaffold');
-let sandboxApi;
-let toolchainApi;
-let finishApi;
-
-function getSandboxApi() {
- if (!sandboxApi) {
- sandboxApi = sandboxModule.createSandboxApi({
- protectedBaseWriteBlock,
- runInstallInternal,
- ensureSetupProtectedBranches,
- ensureParentWorkspaceView,
- buildParentWorkspaceView,
- runFixInternal,
- });
- }
- return sandboxApi;
-}
-
-function getToolchainApi() {
- if (!toolchainApi) {
- toolchainApi = toolchainModule.createToolchainApi({
- TOOL_NAME,
- NPM_BIN,
- NPX_BIN,
- packageJson,
- OPENSPEC_PACKAGE,
- OPENSPEC_BIN,
- GLOBAL_TOOLCHAIN_PACKAGES,
- parseAutoApproval,
- isInteractiveTerminal,
- promptYesNoStrict,
- run,
- checkForGuardexUpdate,
- printUpdateAvailableBanner,
- readInstalledGuardexVersion,
- restartIntoUpdatedGuardex,
- checkForOpenSpecPackageUpdate,
- printOpenSpecUpdateAvailableBanner,
- resolveGlobalInstallApproval,
- detectGlobalToolchainPackages,
- detectOptionalLocalCompanionTools,
- formatGlobalToolchainServiceName,
- askGlobalInstallForMissing,
- });
- }
- return toolchainApi;
-}
-
-function getFinishApi() {
- if (!finishApi) {
- finishApi = finishModule.createFinishApi({
- TOOL_NAME,
- LOCK_FILE_RELATIVE,
- path,
- fs,
- run,
- runPackageAsset,
- resolveRepoRoot,
- parseCleanupArgs,
- parseMergeArgs,
- parseFinishArgs,
- parseSyncArgs,
- listAgentWorktrees,
- listLocalAgentBranchesForFinish,
- uniquePreserveOrder,
- branchExists,
- resolveFinishBaseBranch,
- worktreeHasLocalChanges,
- branchMergedIntoBase,
- autoCommitWorktreeForFinish,
- resolveBaseBranch,
- resolveSyncStrategy,
- ensureOriginBaseRef,
- gitRun,
- currentBranchName,
- workingTreeIsDirty,
- aheadBehind,
- lockRegistryStatus,
- syncOperation,
- });
- }
- return finishApi;
-}
-
/**
* @typedef {Object} AutoFinishSummary
* @property {boolean} [enabled]
@@ -273,116 +232,27 @@ function getFinishApi() {
* @property {AutoFinishSummary} autoFinish
* @property {string | null} sandboxLockContent
*/
-function renderManagedFile(repoRoot, relativePath, content, options = {}) {
- const destinationPath = path.join(repoRoot, relativePath);
- const destinationExists = fs.existsSync(destinationPath);
- const force = Boolean(options.force);
- const dryRun = Boolean(options.dryRun);
-
- if (destinationExists) {
- const existingContent = fs.readFileSync(destinationPath, 'utf8');
- if (existingContent === content) {
- ensureExecutable(destinationPath, relativePath, dryRun);
- return { status: 'unchanged', file: relativePath };
- }
- if (!force && !isCriticalGuardrailPath(relativePath)) {
- throw new Error(managedForceConflictMessage(relativePath));
- }
- }
-
- ensureParentDir(repoRoot, destinationPath, dryRun);
- if (!dryRun) {
- fs.writeFileSync(destinationPath, content, 'utf8');
- ensureExecutable(destinationPath, relativePath, dryRun);
- }
-
- if (destinationExists && !force && isCriticalGuardrailPath(relativePath)) {
- return { status: dryRun ? 'would-repair-critical' : 'repaired-critical', file: relativePath };
- }
-
- return { status: destinationExists ? 'overwritten' : 'created', file: relativePath };
-}
-
-function ensureGeneratedScriptShim(repoRoot, spec, options = {}) {
- const content = spec.kind === 'python'
- ? renderPythonDispatchShim(spec.command)
- : renderShellDispatchShim(spec.command);
- return renderManagedFile(repoRoot, spec.relativePath, content, options);
-}
-
-function ensureHookShim(repoRoot, hookName, options = {}) {
- return renderManagedFile(
- repoRoot,
- path.posix.join('.githooks', hookName),
- renderShellDispatchShim(['hook', 'run', hookName]),
- options,
- );
-}
-
-function copyTemplateFile(repoRoot, relativeTemplatePath, force, dryRun) {
- const sourcePath = path.join(TEMPLATE_ROOT, relativeTemplatePath);
- const destinationRelativePath = toDestinationPath(relativeTemplatePath);
- const destinationPath = path.join(repoRoot, destinationRelativePath);
-
- const sourceContent = fs.readFileSync(sourcePath, 'utf8');
- const destinationExists = fs.existsSync(destinationPath);
-
- if (destinationExists) {
- const existingContent = fs.readFileSync(destinationPath, 'utf8');
- if (existingContent === sourceContent) {
- ensureExecutable(destinationPath, destinationRelativePath, dryRun);
- return { status: 'unchanged', file: destinationRelativePath };
- }
- if (!force && !isCriticalGuardrailPath(destinationRelativePath)) {
- throw new Error(managedForceConflictMessage(destinationRelativePath));
- }
- }
- ensureParentDir(repoRoot, destinationPath, dryRun);
- if (!dryRun) {
- fs.writeFileSync(destinationPath, sourceContent, 'utf8');
- ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+function appendForceArgs(args, options) {
+ if (!options.force) {
+ return;
}
-
- if (destinationExists && !force && isCriticalGuardrailPath(destinationRelativePath)) {
- return { status: dryRun ? 'would-repair-critical' : 'repaired-critical', file: destinationRelativePath };
+ args.push('--force');
+ for (const managedPath of options.forceManagedPaths || []) {
+ args.push(managedPath);
}
-
- return { status: destinationExists ? 'overwritten' : 'created', file: destinationRelativePath };
}
-function ensureTemplateFilePresent(repoRoot, relativeTemplatePath, dryRun) {
- const sourcePath = path.join(TEMPLATE_ROOT, relativeTemplatePath);
- const destinationRelativePath = toDestinationPath(relativeTemplatePath);
- const destinationPath = path.join(repoRoot, destinationRelativePath);
- const sourceContent = fs.readFileSync(sourcePath, 'utf8');
-
- if (fs.existsSync(destinationPath)) {
- const existingContent = fs.readFileSync(destinationPath, 'utf8');
- if (existingContent === sourceContent) {
- ensureExecutable(destinationPath, destinationRelativePath, dryRun);
- return { status: 'unchanged', file: destinationRelativePath };
- }
-
- if (isCriticalGuardrailPath(destinationRelativePath)) {
- if (!dryRun) {
- fs.writeFileSync(destinationPath, sourceContent, 'utf8');
- ensureExecutable(destinationPath, destinationRelativePath, dryRun);
- }
- return { status: dryRun ? 'would-repair-critical' : 'repaired-critical', file: destinationRelativePath };
- }
-
- // In fix mode, avoid silently replacing local customizations.
- return { status: 'skipped-conflict', file: destinationRelativePath };
+function shouldForceManagedPath(options, relativePath) {
+ if (!options.force) {
+ return false;
}
-
- ensureParentDir(repoRoot, destinationPath, dryRun);
- if (!dryRun) {
- fs.writeFileSync(destinationPath, sourceContent, 'utf8');
- ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+ const targetedPaths = Array.isArray(options.forceManagedPaths) ? options.forceManagedPaths : [];
+ if (targetedPaths.length === 0) {
+ return true;
}
-
- return { status: 'created', file: destinationRelativePath };
+ const normalized = normalizeManagedForcePath(relativePath);
+ return normalized !== null && targetedPaths.includes(normalized);
}
function ensureTargetedLegacyWorkflowShims(repoRoot, options) {
@@ -401,1762 +271,595 @@ function ensureTargetedLegacyWorkflowShims(repoRoot, options) {
return operations;
}
-function lockFilePath(repoRoot) {
- return path.join(repoRoot, LOCK_FILE_RELATIVE);
+function normalizeWorkspacePath(relativePath) {
+ return String(relativePath || '.').replace(/\\/g, '/');
}
-function ensureOmxScaffold(repoRoot, dryRun) {
- const operations = [];
-
- for (const relativeDir of REPO_SCAFFOLD_DIRECTORIES) {
- const absoluteDir = path.join(repoRoot, relativeDir);
- if (fs.existsSync(absoluteDir)) {
- if (!fs.statSync(absoluteDir).isDirectory()) {
- throw new Error(`Expected directory at ${relativeDir} but found a file.`);
- }
- operations.push({ status: 'unchanged', file: relativeDir });
- continue;
- }
-
- if (!dryRun) {
- fs.mkdirSync(absoluteDir, { recursive: true });
- }
- operations.push({ status: 'created', file: relativeDir });
- }
+function isCommandAvailable(commandName) {
+ return run('which', [commandName]).status === 0;
+}
- for (const relativeDir of OMX_SCAFFOLD_DIRECTORIES) {
- const absoluteDir = path.join(repoRoot, relativeDir);
- if (fs.existsSync(absoluteDir)) {
- if (!fs.statSync(absoluteDir).isDirectory()) {
- throw new Error(`Expected directory at ${relativeDir} but found a file.`);
- }
- operations.push({ status: 'unchanged', file: relativeDir });
- continue;
- }
+function buildParentWorkspaceView(repoRoot) {
+ const parentDir = path.dirname(repoRoot);
+ const workspaceFileName = `${path.basename(repoRoot)}-branches.code-workspace`;
+ const workspacePath = path.join(parentDir, workspaceFileName);
+ const repoRelativePath = normalizeWorkspacePath(path.relative(parentDir, repoRoot) || '.');
- if (!dryRun) {
- fs.mkdirSync(absoluteDir, { recursive: true });
- }
- operations.push({ status: 'created', file: relativeDir });
- }
+ return {
+ workspacePath,
+ payload: {
+ folders: [
+ { path: repoRelativePath },
+ ...AGENT_WORKTREE_RELATIVE_DIRS.map((relativeDir) => ({
+ path: normalizeWorkspacePath(path.join(repoRelativePath === '.' ? '' : repoRelativePath, relativeDir)),
+ })),
+ ],
+ settings: {
+ 'scm.alwaysShowRepositories': true,
+ },
+ },
+ };
+}
- for (const [relativeFile, defaultContent] of OMX_SCAFFOLD_FILES.entries()) {
- const absoluteFile = path.join(repoRoot, relativeFile);
- if (fs.existsSync(absoluteFile)) {
- if (!fs.statSync(absoluteFile).isFile()) {
- throw new Error(`Expected file at ${relativeFile} but found a directory.`);
- }
- operations.push({ status: 'unchanged', file: relativeFile });
- continue;
- }
+function ensureParentWorkspaceView(repoRoot, dryRun) {
+ const { workspacePath, payload } = buildParentWorkspaceView(repoRoot);
+ const operationFile = path.relative(repoRoot, workspacePath) || path.basename(workspacePath);
+ const nextContent = `${JSON.stringify(payload, null, 2)}\n`;
+ const note = 'parent VS Code workspace view';
+ if (!fs.existsSync(workspacePath)) {
if (!dryRun) {
- fs.mkdirSync(path.dirname(absoluteFile), { recursive: true });
- fs.writeFileSync(absoluteFile, defaultContent, 'utf8');
+ fs.writeFileSync(workspacePath, nextContent, 'utf8');
}
- operations.push({ status: 'created', file: relativeFile });
+ return { status: dryRun ? 'would-create' : 'created', file: operationFile, note };
}
- return operations;
-}
-
-function ensureLockRegistry(repoRoot, dryRun) {
- const absolutePath = lockFilePath(repoRoot);
- if (fs.existsSync(absolutePath)) {
- return { status: 'unchanged', file: LOCK_FILE_RELATIVE };
+ const currentContent = fs.readFileSync(workspacePath, 'utf8');
+ if (currentContent === nextContent) {
+ return { status: 'unchanged', file: operationFile, note };
}
if (!dryRun) {
- fs.mkdirSync(path.dirname(absolutePath), { recursive: true });
- fs.writeFileSync(absolutePath, JSON.stringify({ locks: {} }, null, 2) + '\n', 'utf8');
- }
-
- return { status: 'created', file: LOCK_FILE_RELATIVE };
-}
-
-function lockStateOrError(repoRoot) {
- const lockPath = lockFilePath(repoRoot);
- if (!fs.existsSync(lockPath)) {
- return { ok: false, error: `${LOCK_FILE_RELATIVE} is missing` };
- }
-
- try {
- const parsed = JSON.parse(fs.readFileSync(lockPath, 'utf8'));
- if (!parsed || typeof parsed !== 'object' || typeof parsed.locks !== 'object' || parsed.locks === null) {
- return { ok: false, error: `${LOCK_FILE_RELATIVE} has invalid schema (expected { locks: {} })` };
- }
-
- // Normalize older schema entries.
- for (const [filePath, entry] of Object.entries(parsed.locks)) {
- if (!entry || typeof entry !== 'object') {
- parsed.locks[filePath] = { branch: '', claimed_at: '', allow_delete: false };
- continue;
- }
- if (!Object.prototype.hasOwnProperty.call(entry, 'allow_delete')) {
- entry.allow_delete = false;
- }
- }
-
- return { ok: true, raw: parsed, locks: parsed.locks };
- } catch (error) {
- return { ok: false, error: `${LOCK_FILE_RELATIVE} is invalid JSON: ${error.message}` };
+ fs.writeFileSync(workspacePath, nextContent, 'utf8');
}
+ return { status: dryRun ? 'would-update' : 'updated', file: operationFile, note };
}
-function writeLockState(repoRoot, payload, dryRun) {
- if (dryRun) return;
- const lockPath = lockFilePath(repoRoot);
- fs.mkdirSync(path.dirname(lockPath), { recursive: true });
- fs.writeFileSync(lockPath, JSON.stringify(payload, null, 2) + '\n', 'utf8');
+function hasGuardexBootstrapFiles(repoRoot) {
+ const required = [
+ 'AGENTS.md',
+ '.githooks/pre-commit',
+ '.githooks/pre-push',
+ LOCK_FILE_RELATIVE,
+ ];
+ return required.every((relativePath) => fs.existsSync(path.join(repoRoot, relativePath)));
}
-function removeLegacyPackageScripts(repoRoot, dryRun) {
- const packagePath = path.join(repoRoot, 'package.json');
- if (!fs.existsSync(packagePath)) {
- return { status: 'skipped', file: 'package.json', note: 'package.json not found' };
+function protectedBaseWriteBlock(options, { requireBootstrap = true } = {}) {
+ if (options.dryRun || options.allowProtectedBaseWrite) {
+ return null;
}
- let pkg;
- try {
- pkg = JSON.parse(fs.readFileSync(packagePath, 'utf8'));
- } catch (error) {
- throw new Error(`Unable to parse package.json in target repo: ${error.message}`);
+ const repoRoot = resolveRepoRoot(options.target);
+ if (requireBootstrap && !hasGuardexBootstrapFiles(repoRoot)) {
+ return null;
}
- const existingScripts = pkg.scripts && typeof pkg.scripts === 'object'
- ? pkg.scripts
- : {};
- pkg.scripts = existingScripts;
- let changed = false;
- for (const [key, value] of Object.entries(LEGACY_MANAGED_PACKAGE_SCRIPTS)) {
- if (existingScripts[key] === value) {
- delete existingScripts[key];
- changed = true;
- }
+ const branch = currentBranchName(repoRoot);
+ if (branch !== 'main') {
+ return null;
}
- if (!changed) {
- return { status: 'unchanged', file: 'package.json', note: 'no Guardex-managed agent:* scripts found' };
+ const protectedBranches = readProtectedBranches(repoRoot);
+ if (!protectedBranches.includes(branch)) {
+ return null;
}
- if (!dryRun) {
- fs.writeFileSync(packagePath, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
- }
+ return {
+ repoRoot,
+ branch,
+ };
+}
- return { status: dryRun ? 'would-update' : 'updated', file: 'package.json', note: 'removed Guardex-managed agent:* scripts' };
+function assertProtectedMainWriteAllowed(options, commandName) {
+ return sandboxModule.assertProtectedMainWriteAllowed(options, commandName);
}
-function installUserLevelAsset(asset, options = {}) {
- const dryRun = Boolean(options.dryRun);
- const force = Boolean(options.force);
- const destinationPath = path.join(GUARDEX_HOME_DIR, asset.destination);
- const sourceContent = fs.readFileSync(asset.source, 'utf8');
- const destinationExists = fs.existsSync(destinationPath);
-
- if (destinationExists) {
- const existingContent = fs.readFileSync(destinationPath, 'utf8');
- if (existingContent === sourceContent) {
- return { status: 'unchanged', file: asset.destination };
- }
- if (!force) {
- return { status: 'skipped-conflict', file: asset.destination };
+function runSetupBootstrapInternal(options) {
+ const installPayload = runInstallInternal(options);
+ installPayload.operations.push(
+ ensureSetupProtectedBranches(installPayload.repoRoot, Boolean(options.dryRun)),
+ );
+
+ let parentWorkspace = null;
+ if (options.parentWorkspaceView) {
+ installPayload.operations.push(
+ ensureParentWorkspaceView(installPayload.repoRoot, Boolean(options.dryRun)),
+ );
+ if (!options.dryRun) {
+ parentWorkspace = buildParentWorkspaceView(installPayload.repoRoot);
}
}
- if (!dryRun) {
- fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
- fs.writeFileSync(destinationPath, sourceContent, 'utf8');
- }
- return { status: destinationExists ? (dryRun ? 'would-update' : 'updated') : 'created', file: asset.destination };
+ const fixPayload = runFixInternal({
+ target: installPayload.repoRoot,
+ dryRun: options.dryRun,
+ force: options.force,
+ forceManagedPaths: options.forceManagedPaths,
+ dropStaleLocks: true,
+ skipAgents: options.skipAgents,
+ skipPackageJson: options.skipPackageJson,
+ skipGitignore: options.skipGitignore,
+ allowProtectedBaseWrite: options.allowProtectedBaseWrite,
+ });
+
+ return {
+ installPayload,
+ fixPayload,
+ parentWorkspace,
+ };
}
-function removeLegacyManagedRepoFile(repoRoot, relativePath, options = {}) {
- const dryRun = Boolean(options.dryRun);
- const force = Boolean(options.force);
- const absolutePath = path.join(repoRoot, relativePath);
- if (!fs.existsSync(absolutePath)) {
- return { status: 'unchanged', file: relativePath, note: 'not present' };
- }
- if (!fs.statSync(absolutePath).isFile()) {
- return { status: 'skipped-conflict', file: relativePath, note: 'not a regular file' };
- }
+function extractAgentBranchStartMetadata(output) {
+ const branchMatch = String(output || '').match(/^\[agent-branch-start\] Created branch: (.+)$/m);
+ const worktreeMatch = String(output || '').match(/^\[agent-branch-start\] Worktree: (.+)$/m);
+ return {
+ branch: branchMatch ? branchMatch[1].trim() : '',
+ worktreePath: worktreeMatch ? worktreeMatch[1].trim() : '',
+ };
+}
- const skillAsset = USER_LEVEL_SKILL_ASSETS.find((asset) => asset.destination === relativePath);
- if (skillAsset) {
- const userLevelPath = path.join(GUARDEX_HOME_DIR, skillAsset.destination);
- if (!fs.existsSync(userLevelPath)) {
- return { status: 'skipped', file: relativePath, note: 'user-level replacement not installed' };
- }
+function resolveSandboxTarget(repoRoot, worktreePath, targetPath) {
+ const resolvedTarget = path.resolve(targetPath);
+ const relativeTarget = path.relative(repoRoot, resolvedTarget);
+ if (relativeTarget.startsWith('..') || path.isAbsolute(relativeTarget)) {
+ throw new Error(`sandbox target must stay inside repo root: ${resolvedTarget}`);
}
-
- const templateRelative = skillAsset
- ? skillAsset.source.slice(TEMPLATE_ROOT.length + 1)
- : relativePath.replace(/^\./, '');
- const sourcePath = path.join(TEMPLATE_ROOT, templateRelative);
- if (!fs.existsSync(sourcePath)) {
- return { status: 'skipped', file: relativePath, note: 'template source missing' };
+ if (!relativeTarget || relativeTarget === '.') {
+ return worktreePath;
}
+ return path.join(worktreePath, relativeTarget);
+}
- const sourceContent = fs.readFileSync(sourcePath, 'utf8');
- const existingContent = fs.readFileSync(absolutePath, 'utf8');
- if (existingContent !== sourceContent && !force) {
- return { status: 'skipped-conflict', file: relativePath, note: 'local edits differ from managed template' };
- }
+function buildSandboxSetupArgs(options, sandboxTarget) {
+ const args = ['setup', '--target', sandboxTarget, '--no-global-install', '--no-recursive'];
+ appendForceArgs(args, options);
+ if (options.skipAgents) args.push('--skip-agents');
+ if (options.skipPackageJson) args.push('--skip-package-json');
+ if (options.skipGitignore) args.push('--no-gitignore');
+ if (options.dryRun) args.push('--dry-run');
+ return args;
+}
- if (!dryRun) {
- fs.rmSync(absolutePath, { force: true });
- }
- return { status: dryRun ? 'would-remove' : 'removed', file: relativePath };
+function isSpawnFailure(result) {
+ return Boolean(result?.error) && typeof result?.status !== 'number';
}
-function ensureAgentsSnippet(repoRoot, dryRun, options = {}) {
- const agentsPath = path.join(repoRoot, 'AGENTS.md');
- const snippet = fs.readFileSync(path.join(TEMPLATE_ROOT, 'AGENTS.multiagent-safety.md'), 'utf8').trimEnd();
- const managedRegex = new RegExp(
- `${AGENTS_MARKER_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}[\\s\\S]*?${AGENTS_MARKER_END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`,
- 'm',
- );
+function protectedBaseSandboxBranchPrefix() {
+ const now = new Date();
+ const stamp = [
+ now.getUTCFullYear(),
+ String(now.getUTCMonth() + 1).padStart(2, '0'),
+ String(now.getUTCDate()).padStart(2, '0'),
+ ].join('') + '-' + [
+ String(now.getUTCHours()).padStart(2, '0'),
+ String(now.getUTCMinutes()).padStart(2, '0'),
+ String(now.getUTCSeconds()).padStart(2, '0'),
+ ].join('');
+ return `agent/gx/${stamp}`;
+}
- if (!fs.existsSync(agentsPath)) {
- if (!dryRun) {
- fs.writeFileSync(agentsPath, `# AGENTS\n\n${snippet}\n`, 'utf8');
- }
- return { status: 'created', file: 'AGENTS.md' };
- }
+function protectedBaseSandboxWorktreePath(repoRoot, branchName) {
+ return path.join(repoRoot, defaultAgentWorktreeRelativeDir(), branchName.replace(/\//g, '__'));
+}
- const existing = fs.readFileSync(agentsPath, 'utf8');
- if (managedRegex.test(existing)) {
- const next = existing.replace(managedRegex, snippet);
- if (next === existing) {
- return { status: 'unchanged', file: 'AGENTS.md' };
- }
- if (!dryRun) {
- fs.writeFileSync(agentsPath, next, 'utf8');
- }
- return { status: 'updated', file: 'AGENTS.md', note: 'refreshed gitguardex-managed block' };
+function resolveProtectedBaseSandboxStartRef(repoRoot, baseBranch) {
+ run('git', ['-C', repoRoot, 'fetch', 'origin', baseBranch, '--quiet'], { timeout: 20_000 });
+ if (gitRefExists(repoRoot, `refs/remotes/origin/${baseBranch}`)) {
+ return `origin/${baseBranch}`;
}
-
- if (existing.includes(AGENTS_MARKER_START)) {
- return { status: 'unchanged', file: 'AGENTS.md', note: 'existing marker found without managed end marker' };
+ if (gitRefExists(repoRoot, `refs/heads/${baseBranch}`)) {
+ return baseBranch;
}
-
- const separator = existing.endsWith('\n') ? '\n' : '\n\n';
- if (!dryRun) {
- fs.writeFileSync(agentsPath, `${existing}${separator}${snippet}\n`, 'utf8');
+ if (currentBranchName(repoRoot) === baseBranch) {
+ return null;
}
-
- return { status: 'updated', file: 'AGENTS.md' };
+ throw new Error(`Unable to find base ref for sandbox bootstrap: ${baseBranch}`);
}
-function ensureManagedGitignore(repoRoot, dryRun) {
- const gitignorePath = path.join(repoRoot, '.gitignore');
- const managedBlock = [
- GITIGNORE_MARKER_START,
- ...MANAGED_GITIGNORE_PATHS,
- GITIGNORE_MARKER_END,
- ].join('\n');
- const managedRegex = new RegExp(
- `${GITIGNORE_MARKER_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}[\\s\\S]*?${GITIGNORE_MARKER_END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`,
- 'm',
- );
+function startProtectedBaseSandboxFallback(blocked, sandboxSuffix) {
+ const branchPrefix = protectedBaseSandboxBranchPrefix();
+ let selectedBranch = '';
+ let selectedWorktreePath = '';
- if (!fs.existsSync(gitignorePath)) {
- if (!dryRun) {
- fs.writeFileSync(gitignorePath, `${managedBlock}\n`, 'utf8');
- }
- return { status: 'created', file: '.gitignore', note: 'added gitguardex-managed entries' };
- }
-
- const existing = fs.readFileSync(gitignorePath, 'utf8');
- if (managedRegex.test(existing)) {
- const next = existing.replace(managedRegex, managedBlock);
- if (next === existing) {
- return { status: 'unchanged', file: '.gitignore' };
- }
- if (!dryRun) {
- fs.writeFileSync(gitignorePath, next, 'utf8');
- }
- return { status: 'updated', file: '.gitignore', note: 'refreshed gitguardex-managed entries' };
- }
-
- const separator = existing.endsWith('\n') ? '\n' : '\n\n';
- if (!dryRun) {
- fs.writeFileSync(gitignorePath, `${existing}${separator}${managedBlock}\n`, 'utf8');
- }
- return { status: 'updated', file: '.gitignore', note: 'appended gitguardex-managed entries' };
-}
-
-function stripJsonComments(source) {
- let result = '';
- let inString = false;
- let escapeNext = false;
- let inLineComment = false;
- let inBlockComment = false;
-
- for (let index = 0; index < source.length; index += 1) {
- const current = source[index];
- const next = source[index + 1];
-
- if (inLineComment) {
- if (current === '\n' || current === '\r') {
- inLineComment = false;
- result += current;
- }
+ for (let attempt = 0; attempt < 30; attempt += 1) {
+ const suffix = attempt === 0 ? sandboxSuffix : `${attempt + 1}-${sandboxSuffix}`;
+ const candidateBranch = `${branchPrefix}-${suffix}`;
+ const candidateWorktreePath = protectedBaseSandboxWorktreePath(blocked.repoRoot, candidateBranch);
+ if (gitRefExists(blocked.repoRoot, `refs/heads/${candidateBranch}`)) {
continue;
}
-
- if (inBlockComment) {
- if (current === '*' && next === '/') {
- inBlockComment = false;
- index += 1;
- continue;
- }
- if (current === '\n' || current === '\r') {
- result += current;
- }
+ if (fs.existsSync(candidateWorktreePath)) {
continue;
}
+ selectedBranch = candidateBranch;
+ selectedWorktreePath = candidateWorktreePath;
+ break;
+ }
- if (inString) {
- result += current;
- if (escapeNext) {
- escapeNext = false;
- } else if (current === '\\') {
- escapeNext = true;
- } else if (current === '"') {
- inString = false;
- }
- continue;
- }
+ if (!selectedBranch || !selectedWorktreePath) {
+ throw new Error('Unable to allocate unique sandbox branch/worktree');
+ }
- if (current === '"') {
- inString = true;
- result += current;
- continue;
- }
+ fs.mkdirSync(path.dirname(selectedWorktreePath), { recursive: true });
+ const startRef = resolveProtectedBaseSandboxStartRef(blocked.repoRoot, blocked.branch);
+ const addArgs = startRef
+ ? ['-C', blocked.repoRoot, 'worktree', 'add', '-b', selectedBranch, selectedWorktreePath, startRef]
+ : ['-C', blocked.repoRoot, 'worktree', 'add', '--orphan', selectedWorktreePath];
+ const addResult = run('git', addArgs);
+ if (isSpawnFailure(addResult)) {
+ throw addResult.error;
+ }
+ if (addResult.status !== 0) {
+ throw new Error((addResult.stderr || addResult.stdout || 'failed to create sandbox').trim());
+ }
- if (current === '/' && next === '/') {
- inLineComment = true;
- index += 1;
- continue;
+ if (!startRef) {
+ const renameResult = run(
+ 'git',
+ ['-C', selectedWorktreePath, 'branch', '-m', selectedBranch],
+ { timeout: 20_000 },
+ );
+ if (isSpawnFailure(renameResult)) {
+ throw renameResult.error;
}
-
- if (current === '/' && next === '*') {
- inBlockComment = true;
- index += 1;
- continue;
+ if (renameResult.status !== 0) {
+ throw new Error(
+ (renameResult.stderr || renameResult.stdout || 'failed to name orphan sandbox branch').trim(),
+ );
}
-
- result += current;
}
- return result;
+ return {
+ metadata: {
+ branch: selectedBranch,
+ worktreePath: selectedWorktreePath,
+ },
+ stdout:
+ `[agent-branch-start] Created branch: ${selectedBranch}\n` +
+ `[agent-branch-start] Worktree: ${selectedWorktreePath}\n`,
+ stderr: addResult.stderr || '',
+ };
}
-function stripJsonTrailingCommas(source) {
- let result = '';
- let inString = false;
- let escapeNext = false;
-
- for (let index = 0; index < source.length; index += 1) {
- const current = source[index];
-
- if (inString) {
- result += current;
- if (escapeNext) {
- escapeNext = false;
- } else if (current === '\\') {
- escapeNext = true;
- } else if (current === '"') {
- inString = false;
- }
- continue;
- }
+function startProtectedBaseSandbox(blocked, { taskName, sandboxSuffix }) {
+ if (sandboxSuffix === 'gx-doctor') {
+ return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
+ }
- if (current === '"') {
- inString = true;
- result += current;
- continue;
- }
+ const startResult = runPackageAsset('branchStart', [
+ '--task',
+ taskName,
+ '--agent',
+ SHORT_TOOL_NAME,
+ '--base',
+ blocked.branch,
+ ], { cwd: blocked.repoRoot });
+ if (isSpawnFailure(startResult)) {
+ throw startResult.error;
+ }
+ if (startResult.status !== 0) {
+ return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
+ }
- if (current === ',') {
- let lookahead = index + 1;
- while (lookahead < source.length && /\s/.test(source[lookahead])) {
- lookahead += 1;
- }
- if (source[lookahead] === '}' || source[lookahead] === ']') {
- continue;
- }
- }
+ const metadata = extractAgentBranchStartMetadata(startResult.stdout);
+ const currentBranch = currentBranchName(blocked.repoRoot);
+ const worktreePath = metadata.worktreePath ? path.resolve(metadata.worktreePath) : '';
+ const repoRootPath = path.resolve(blocked.repoRoot);
+ const hasSafeWorktree = Boolean(worktreePath) && worktreePath !== repoRootPath;
+ const branchChanged = Boolean(currentBranch) && currentBranch !== blocked.branch;
- result += current;
+ if (!hasSafeWorktree || branchChanged) {
+ const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
+ if (!restoreResult.ok) {
+ const detail = [restoreResult.stderr, restoreResult.stdout].filter(Boolean).join('\n').trim();
+ throw new Error(
+ `sandbox startup switched protected base checkout and could not restore '${blocked.branch}'.` +
+ (detail ? `\n${detail}` : ''),
+ );
+ }
+ return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
}
- return result;
+ return {
+ metadata,
+ stdout: startResult.stdout || '',
+ stderr: startResult.stderr || '',
+ };
}
-function parseJsonObjectLikeFile(source, relativePath) {
- let parsed;
- try {
- parsed = JSON.parse(stripJsonTrailingCommas(stripJsonComments(source)));
- } catch (error) {
- throw new Error(`Unable to parse ${relativePath} as JSON or JSONC: ${error.message}`);
- }
+function cleanupProtectedBaseSandbox(repoRoot, metadata) {
+ const result = {
+ worktree: 'skipped',
+ branch: 'skipped',
+ note: 'missing sandbox metadata',
+ };
- if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
- throw new Error(`${relativePath} must contain a top-level object.`);
+ if (!metadata?.worktreePath || !metadata?.branch) {
+ return result;
}
- return parsed;
-}
-
-function uniqueStringList(values) {
- const seen = new Set();
- const result = [];
+ if (fs.existsSync(metadata.worktreePath)) {
+ const removeResult = run(
+ 'git',
+ ['-C', repoRoot, 'worktree', 'remove', '--force', metadata.worktreePath],
+ { timeout: 30_000 },
+ );
+ if (isSpawnFailure(removeResult)) {
+ throw removeResult.error;
+ }
+ if (removeResult.status !== 0) {
+ throw new Error(
+ (removeResult.stderr || removeResult.stdout || 'failed to remove sandbox worktree').trim(),
+ );
+ }
+ result.worktree = 'removed';
+ } else {
+ result.worktree = 'missing';
+ }
- for (const value of values) {
- if (typeof value !== 'string' || seen.has(value)) {
- continue;
+ if (gitRefExists(repoRoot, `refs/heads/${metadata.branch}`)) {
+ const branchDeleteResult = run(
+ 'git',
+ ['-C', repoRoot, 'branch', '-D', metadata.branch],
+ { timeout: 20_000 },
+ );
+ if (isSpawnFailure(branchDeleteResult)) {
+ throw branchDeleteResult.error;
+ }
+ if (branchDeleteResult.status !== 0) {
+ throw new Error(
+ (branchDeleteResult.stderr || branchDeleteResult.stdout || 'failed to delete sandbox branch').trim(),
+ );
}
- seen.add(value);
- result.push(value);
+ result.branch = 'deleted';
+ } else {
+ result.branch = 'missing';
}
+ result.note = 'sandbox worktree pruned';
return result;
}
-function buildRepoVscodeSettings(existingSettings = {}) {
- const nextSettings = { ...existingSettings };
- const existingIgnoredFolders = Array.isArray(existingSettings[REPO_SCAN_IGNORED_FOLDERS_SETTING])
- ? existingSettings[REPO_SCAN_IGNORED_FOLDERS_SETTING]
- : [];
-
- nextSettings[REPO_SCAN_IGNORED_FOLDERS_SETTING] = uniqueStringList([
- ...existingIgnoredFolders,
- ...MANAGED_REPO_SCAN_IGNORED_FOLDERS,
- ]);
+function runSetupInSandbox(options, blocked, repoLabel = '') {
+ const startResult = startProtectedBaseSandbox(blocked, {
+ taskName: `${SHORT_TOOL_NAME}-setup`,
+ sandboxSuffix: 'gx-setup',
+ });
+ const metadata = startResult.metadata;
- return nextSettings;
-}
+ if (startResult.stdout) process.stdout.write(startResult.stdout);
+ if (startResult.stderr) process.stderr.write(startResult.stderr);
+ console.log(
+ `[${TOOL_NAME}] setup blocked on protected branch '${blocked.branch}' in an initialized repo; ` +
+ 'refreshing through a sandbox worktree and syncing managed bootstrap files back locally.',
+ );
-function ensureRepoVscodeSettings(repoRoot, dryRun) {
- const settingsPath = path.join(repoRoot, SHARED_VSCODE_SETTINGS_RELATIVE);
- const destinationExists = fs.existsSync(settingsPath);
- const existingContent = destinationExists ? fs.readFileSync(settingsPath, 'utf8') : '';
- const existingSettings = destinationExists
- ? parseJsonObjectLikeFile(existingContent, SHARED_VSCODE_SETTINGS_RELATIVE)
- : {};
- const nextContent = `${JSON.stringify(buildRepoVscodeSettings(existingSettings), null, 2)}\n`;
+ const sandboxTarget = resolveSandboxTarget(blocked.repoRoot, metadata.worktreePath, options.target);
+ const nestedResult = run(
+ process.execPath,
+ [__filename, ...buildSandboxSetupArgs(options, sandboxTarget)],
+ { cwd: metadata.worktreePath },
+ );
+ if (isSpawnFailure(nestedResult)) {
+ throw nestedResult.error;
+ }
+ if (nestedResult.status !== 0) {
+ if (nestedResult.stdout) process.stdout.write(nestedResult.stdout);
+ if (nestedResult.stderr) process.stderr.write(nestedResult.stderr);
+ throw new Error(
+ `sandboxed setup failed for protected branch '${blocked.branch}'. ` +
+ `Inspect sandbox at ${metadata.worktreePath}`,
+ );
+ }
- if (destinationExists && existingContent === nextContent) {
- return { status: 'unchanged', file: SHARED_VSCODE_SETTINGS_RELATIVE };
+ const syncOptions = {
+ ...options,
+ target: blocked.repoRoot,
+ recursive: false,
+ allowProtectedBaseWrite: true,
+ };
+ const { installPayload, fixPayload, parentWorkspace } = runSetupBootstrapInternal(syncOptions);
+ printOperations(`Setup/install${repoLabel}`, installPayload, syncOptions.dryRun);
+ printOperations(`Setup/fix${repoLabel}`, fixPayload, syncOptions.dryRun);
+ if (!syncOptions.dryRun && parentWorkspace) {
+ console.log(`[${TOOL_NAME}] Parent workspace view: ${parentWorkspace.workspacePath}`);
}
- ensureParentDir(repoRoot, settingsPath, dryRun);
- if (!dryRun) {
- fs.writeFileSync(settingsPath, nextContent, 'utf8');
+ const scanResult = runScanInternal({ target: blocked.repoRoot, json: false });
+ const currentBaseBranch = currentBranchName(scanResult.repoRoot);
+ const autoFinishSummary = doctorModule.autoFinishReadyAgentBranches(scanResult.repoRoot, {
+ baseBranch: currentBaseBranch,
+ dryRun: syncOptions.dryRun,
+ });
+ printScanResult(scanResult, false);
+ if (autoFinishSummary.enabled) {
+ console.log(
+ `[${TOOL_NAME}] Auto-finish sweep (base=${currentBaseBranch}): attempted=${autoFinishSummary.attempted}, completed=${autoFinishSummary.completed}, skipped=${autoFinishSummary.skipped}, failed=${autoFinishSummary.failed}`,
+ );
+ for (const detail of autoFinishSummary.details) {
+ console.log(`[${TOOL_NAME}] ${detail}`);
+ }
+ } else if (autoFinishSummary.details.length > 0) {
+ console.log(`[${TOOL_NAME}] ${autoFinishSummary.details[0]}`);
}
+ const cleanupResult = cleanupProtectedBaseSandbox(blocked.repoRoot, metadata);
+ console.log(
+ `[${TOOL_NAME}] Protected-base setup sandbox cleanup: ${cleanupResult.note} ` +
+ `(worktree=${cleanupResult.worktree}, branch=${cleanupResult.branch}).`,
+ );
+
return {
- status: destinationExists ? 'updated' : 'created',
- file: SHARED_VSCODE_SETTINGS_RELATIVE,
- note: 'shared VS Code repo scan ignores for Guardex worktrees',
+ scanResult,
};
}
-function configureHooks(repoRoot, dryRun) {
- if (dryRun) {
- return { status: 'would-set', key: 'core.hooksPath', value: '.githooks' };
- }
- const result = run('git', ['-C', repoRoot, 'config', 'core.hooksPath', '.githooks']);
- if (result.status !== 0) {
- throw new Error(`Failed to set git hooksPath: ${(result.stderr || '').trim()}`);
- }
+function todayDateStamp() {
+ return new Date().toISOString().slice(0, 10);
+}
+
+function inferGithubRepoFromOrigin(repoRoot) {
+ const rawOrigin = readGitConfig(repoRoot, 'remote.origin.url');
+ if (!rawOrigin) return '';
- return { status: 'set', key: 'core.hooksPath', value: '.githooks' };
+ const httpsMatch = rawOrigin.match(/github\.com[:/](.+?)(?:\.git)?$/i);
+ if (!httpsMatch) return '';
+ const slug = (httpsMatch[1] || '').replace(/^\/+/, '').trim();
+ if (!slug || !slug.includes('/')) return '';
+ return `github.com/${slug}`;
}
-function appendForceArgs(args, options) {
- if (!options.force) {
- return;
- }
- args.push('--force');
- for (const managedPath of options.forceManagedPaths || []) {
- args.push(managedPath);
- }
-}
-
-function shouldForceManagedPath(options, relativePath) {
- if (!options.force) {
- return false;
- }
- const targetedPaths = Array.isArray(options.forceManagedPaths) ? options.forceManagedPaths : [];
- if (targetedPaths.length === 0) {
- return true;
- }
- const normalized = normalizeManagedForcePath(relativePath);
- return normalized !== null && targetedPaths.includes(normalized);
-}
-
-function normalizeWorkspacePath(relativePath) {
- return String(relativePath || '.').replace(/\\/g, '/');
-}
-
-function buildParentWorkspaceView(repoRoot) {
- const parentDir = path.dirname(repoRoot);
- const workspaceFileName = `${path.basename(repoRoot)}-branches.code-workspace`;
- const workspacePath = path.join(parentDir, workspaceFileName);
- const repoRelativePath = normalizeWorkspacePath(path.relative(parentDir, repoRoot) || '.');
-
- return {
- workspacePath,
- payload: {
- folders: [
- { path: repoRelativePath },
- ...AGENT_WORKTREE_RELATIVE_DIRS.map((relativeDir) => ({
- path: normalizeWorkspacePath(path.join(repoRelativePath === '.' ? '' : repoRelativePath, relativeDir)),
- })),
- ],
- settings: {
- 'scm.alwaysShowRepositories': true,
- },
- },
- };
-}
-
-function ensureParentWorkspaceView(repoRoot, dryRun) {
- const { workspacePath, payload } = buildParentWorkspaceView(repoRoot);
- const operationFile = path.relative(repoRoot, workspacePath) || path.basename(workspacePath);
- const nextContent = `${JSON.stringify(payload, null, 2)}\n`;
- const note = 'parent VS Code workspace view';
-
- if (!fs.existsSync(workspacePath)) {
- if (!dryRun) {
- fs.writeFileSync(workspacePath, nextContent, 'utf8');
- }
- return { status: dryRun ? 'would-create' : 'created', file: operationFile, note };
- }
-
- const currentContent = fs.readFileSync(workspacePath, 'utf8');
- if (currentContent === nextContent) {
- return { status: 'unchanged', file: operationFile, note };
- }
-
- if (!dryRun) {
- fs.writeFileSync(workspacePath, nextContent, 'utf8');
- }
- return { status: dryRun ? 'would-update' : 'updated', file: operationFile, note };
-}
-
-function hasGuardexBootstrapFiles(repoRoot) {
- const required = [
- 'AGENTS.md',
- '.githooks/pre-commit',
- '.githooks/pre-push',
- LOCK_FILE_RELATIVE,
- ];
- return required.every((relativePath) => fs.existsSync(path.join(repoRoot, relativePath)));
-}
-
-function protectedBaseWriteBlock(options, { requireBootstrap = true } = {}) {
- if (options.dryRun || options.allowProtectedBaseWrite) {
- return null;
- }
-
- const repoRoot = resolveRepoRoot(options.target);
- if (requireBootstrap && !hasGuardexBootstrapFiles(repoRoot)) {
- return null;
- }
-
- const branch = currentBranchName(repoRoot);
- if (branch !== 'main') {
- return null;
- }
-
- const protectedBranches = readProtectedBranches(repoRoot);
- if (!protectedBranches.includes(branch)) {
- return null;
- }
-
- return {
- repoRoot,
- branch,
- };
-}
-
-function assertProtectedMainWriteAllowed(options, commandName) {
- return getSandboxApi().assertProtectedMainWriteAllowed(options, commandName);
-}
-
-function runSetupBootstrapInternal(options) {
- return getSandboxApi().runSetupBootstrapInternal(options);
-}
-
-function extractAgentBranchStartMetadata(output) {
- const branchMatch = String(output || '').match(/^\[agent-branch-start\] Created branch: (.+)$/m);
- const worktreeMatch = String(output || '').match(/^\[agent-branch-start\] Worktree: (.+)$/m);
- return {
- branch: branchMatch ? branchMatch[1].trim() : '',
- worktreePath: worktreeMatch ? worktreeMatch[1].trim() : '',
- };
-}
-
-function resolveSandboxTarget(repoRoot, worktreePath, targetPath) {
- const resolvedTarget = path.resolve(targetPath);
- const relativeTarget = path.relative(repoRoot, resolvedTarget);
- if (relativeTarget.startsWith('..') || path.isAbsolute(relativeTarget)) {
- throw new Error(`sandbox target must stay inside repo root: ${resolvedTarget}`);
- }
- if (!relativeTarget || relativeTarget === '.') {
- return worktreePath;
- }
- return path.join(worktreePath, relativeTarget);
-}
-
-function buildSandboxSetupArgs(options, sandboxTarget) {
- const args = ['setup', '--target', sandboxTarget, '--no-global-install', '--no-recursive'];
- appendForceArgs(args, options);
- if (options.skipAgents) args.push('--skip-agents');
- if (options.skipPackageJson) args.push('--skip-package-json');
- if (options.skipGitignore) args.push('--no-gitignore');
- if (options.dryRun) args.push('--dry-run');
- return args;
-}
-
-function isSpawnFailure(result) {
- return Boolean(result?.error) && typeof result?.status !== 'number';
-}
-
-function protectedBaseSandboxBranchPrefix() {
- const now = new Date();
- const stamp = [
- now.getUTCFullYear(),
- String(now.getUTCMonth() + 1).padStart(2, '0'),
- String(now.getUTCDate()).padStart(2, '0'),
- ].join('') + '-' + [
- String(now.getUTCHours()).padStart(2, '0'),
- String(now.getUTCMinutes()).padStart(2, '0'),
- String(now.getUTCSeconds()).padStart(2, '0'),
- ].join('');
- return `agent/gx/${stamp}`;
-}
-
-function protectedBaseSandboxWorktreePath(repoRoot, branchName) {
- return path.join(repoRoot, defaultAgentWorktreeRelativeDir(), branchName.replace(/\//g, '__'));
-}
-
-function resolveProtectedBaseSandboxStartRef(repoRoot, baseBranch) {
- run('git', ['-C', repoRoot, 'fetch', 'origin', baseBranch, '--quiet'], { timeout: 20_000 });
- if (gitRefExists(repoRoot, `refs/remotes/origin/${baseBranch}`)) {
- return `origin/${baseBranch}`;
- }
- if (gitRefExists(repoRoot, `refs/heads/${baseBranch}`)) {
- return baseBranch;
- }
- if (currentBranchName(repoRoot) === baseBranch) {
- return null;
- }
- throw new Error(`Unable to find base ref for sandbox bootstrap: ${baseBranch}`);
-}
-
-function startProtectedBaseSandboxFallback(blocked, sandboxSuffix) {
- const branchPrefix = protectedBaseSandboxBranchPrefix();
- let selectedBranch = '';
- let selectedWorktreePath = '';
-
- for (let attempt = 0; attempt < 30; attempt += 1) {
- const suffix = attempt === 0 ? sandboxSuffix : `${attempt + 1}-${sandboxSuffix}`;
- const candidateBranch = `${branchPrefix}-${suffix}`;
- const candidateWorktreePath = protectedBaseSandboxWorktreePath(blocked.repoRoot, candidateBranch);
- if (gitRefExists(blocked.repoRoot, `refs/heads/${candidateBranch}`)) {
- continue;
- }
- if (fs.existsSync(candidateWorktreePath)) {
- continue;
- }
- selectedBranch = candidateBranch;
- selectedWorktreePath = candidateWorktreePath;
- break;
- }
-
- if (!selectedBranch || !selectedWorktreePath) {
- throw new Error('Unable to allocate unique sandbox branch/worktree');
- }
-
- fs.mkdirSync(path.dirname(selectedWorktreePath), { recursive: true });
- const startRef = resolveProtectedBaseSandboxStartRef(blocked.repoRoot, blocked.branch);
- const addArgs = startRef
- ? ['-C', blocked.repoRoot, 'worktree', 'add', '-b', selectedBranch, selectedWorktreePath, startRef]
- : ['-C', blocked.repoRoot, 'worktree', 'add', '--orphan', selectedWorktreePath];
- const addResult = run('git', addArgs);
- if (isSpawnFailure(addResult)) {
- throw addResult.error;
- }
- if (addResult.status !== 0) {
- throw new Error((addResult.stderr || addResult.stdout || 'failed to create sandbox').trim());
- }
-
- if (!startRef) {
- const renameResult = run(
- 'git',
- ['-C', selectedWorktreePath, 'branch', '-m', selectedBranch],
- { timeout: 20_000 },
- );
- if (isSpawnFailure(renameResult)) {
- throw renameResult.error;
- }
- if (renameResult.status !== 0) {
- throw new Error(
- (renameResult.stderr || renameResult.stdout || 'failed to name orphan sandbox branch').trim(),
- );
- }
- }
-
- return {
- metadata: {
- branch: selectedBranch,
- worktreePath: selectedWorktreePath,
- },
- stdout:
- `[agent-branch-start] Created branch: ${selectedBranch}\n` +
- `[agent-branch-start] Worktree: ${selectedWorktreePath}\n`,
- stderr: addResult.stderr || '',
- };
-}
-
-function startProtectedBaseSandbox(blocked, { taskName, sandboxSuffix }) {
- if (sandboxSuffix === 'gx-doctor') {
- return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
- }
-
- const startResult = runPackageAsset('branchStart', [
- '--task',
- taskName,
- '--agent',
- SHORT_TOOL_NAME,
- '--base',
- blocked.branch,
- ], { cwd: blocked.repoRoot });
- if (isSpawnFailure(startResult)) {
- throw startResult.error;
- }
- if (startResult.status !== 0) {
- return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
- }
-
- const metadata = extractAgentBranchStartMetadata(startResult.stdout);
- const currentBranch = currentBranchName(blocked.repoRoot);
- const worktreePath = metadata.worktreePath ? path.resolve(metadata.worktreePath) : '';
- const repoRootPath = path.resolve(blocked.repoRoot);
- const hasSafeWorktree = Boolean(worktreePath) && worktreePath !== repoRootPath;
- const branchChanged = Boolean(currentBranch) && currentBranch !== blocked.branch;
-
- if (!hasSafeWorktree || branchChanged) {
- const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
- if (!restoreResult.ok) {
- const detail = [restoreResult.stderr, restoreResult.stdout].filter(Boolean).join('\n').trim();
- throw new Error(
- `sandbox startup switched protected base checkout and could not restore '${blocked.branch}'.` +
- (detail ? `\n${detail}` : ''),
- );
- }
- return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
- }
-
- return {
- metadata,
- stdout: startResult.stdout || '',
- stderr: startResult.stderr || '',
- };
-}
-
-function cleanupProtectedBaseSandbox(repoRoot, metadata) {
- const result = {
- worktree: 'skipped',
- branch: 'skipped',
- note: 'missing sandbox metadata',
- };
-
- if (!metadata?.worktreePath || !metadata?.branch) {
- return result;
- }
-
- if (fs.existsSync(metadata.worktreePath)) {
- const removeResult = run(
- 'git',
- ['-C', repoRoot, 'worktree', 'remove', '--force', metadata.worktreePath],
- { timeout: 30_000 },
- );
- if (isSpawnFailure(removeResult)) {
- throw removeResult.error;
- }
- if (removeResult.status !== 0) {
- throw new Error(
- (removeResult.stderr || removeResult.stdout || 'failed to remove sandbox worktree').trim(),
- );
- }
- result.worktree = 'removed';
- } else {
- result.worktree = 'missing';
- }
-
- if (gitRefExists(repoRoot, `refs/heads/${metadata.branch}`)) {
- const branchDeleteResult = run(
- 'git',
- ['-C', repoRoot, 'branch', '-D', metadata.branch],
- { timeout: 20_000 },
- );
- if (isSpawnFailure(branchDeleteResult)) {
- throw branchDeleteResult.error;
- }
- if (branchDeleteResult.status !== 0) {
- throw new Error(
- (branchDeleteResult.stderr || branchDeleteResult.stdout || 'failed to delete sandbox branch').trim(),
- );
- }
- result.branch = 'deleted';
- } else {
- result.branch = 'missing';
- }
-
- result.note = 'sandbox worktree pruned';
- return result;
-}
-
-function runSetupInSandbox(options, blocked, repoLabel = '') {
- const startResult = startProtectedBaseSandbox(blocked, {
- taskName: `${SHORT_TOOL_NAME}-setup`,
- sandboxSuffix: 'gx-setup',
- });
- const metadata = startResult.metadata;
-
- if (startResult.stdout) process.stdout.write(startResult.stdout);
- if (startResult.stderr) process.stderr.write(startResult.stderr);
- console.log(
- `[${TOOL_NAME}] setup blocked on protected branch '${blocked.branch}' in an initialized repo; ` +
- 'refreshing through a sandbox worktree and syncing managed bootstrap files back locally.',
- );
-
- const sandboxTarget = resolveSandboxTarget(blocked.repoRoot, metadata.worktreePath, options.target);
- const nestedResult = run(
- process.execPath,
- [__filename, ...buildSandboxSetupArgs(options, sandboxTarget)],
- { cwd: metadata.worktreePath },
- );
- if (isSpawnFailure(nestedResult)) {
- throw nestedResult.error;
- }
- if (nestedResult.status !== 0) {
- if (nestedResult.stdout) process.stdout.write(nestedResult.stdout);
- if (nestedResult.stderr) process.stderr.write(nestedResult.stderr);
- throw new Error(
- `sandboxed setup failed for protected branch '${blocked.branch}'. ` +
- `Inspect sandbox at ${metadata.worktreePath}`,
- );
- }
-
- const syncOptions = {
- ...options,
- target: blocked.repoRoot,
- recursive: false,
- allowProtectedBaseWrite: true,
- };
- const { installPayload, fixPayload, parentWorkspace } = runSetupBootstrapInternal(syncOptions);
- printOperations(`Setup/install${repoLabel}`, installPayload, syncOptions.dryRun);
- printOperations(`Setup/fix${repoLabel}`, fixPayload, syncOptions.dryRun);
- if (!syncOptions.dryRun && parentWorkspace) {
- console.log(`[${TOOL_NAME}] Parent workspace view: ${parentWorkspace.workspacePath}`);
- }
-
- const scanResult = runScanInternal({ target: blocked.repoRoot, json: false });
- const currentBaseBranch = currentBranchName(scanResult.repoRoot);
- const autoFinishSummary = autoFinishReadyAgentBranches(scanResult.repoRoot, {
- baseBranch: currentBaseBranch,
- dryRun: syncOptions.dryRun,
- });
- printScanResult(scanResult, false);
- if (autoFinishSummary.enabled) {
- console.log(
- `[${TOOL_NAME}] Auto-finish sweep (base=${currentBaseBranch}): attempted=${autoFinishSummary.attempted}, completed=${autoFinishSummary.completed}, skipped=${autoFinishSummary.skipped}, failed=${autoFinishSummary.failed}`,
- );
- for (const detail of autoFinishSummary.details) {
- console.log(`[${TOOL_NAME}] ${detail}`);
- }
- } else if (autoFinishSummary.details.length > 0) {
- console.log(`[${TOOL_NAME}] ${autoFinishSummary.details[0]}`);
- }
-
- const cleanupResult = cleanupProtectedBaseSandbox(blocked.repoRoot, metadata);
- console.log(
- `[${TOOL_NAME}] Protected-base setup sandbox cleanup: ${cleanupResult.note} ` +
- `(worktree=${cleanupResult.worktree}, branch=${cleanupResult.branch}).`,
- );
-
- return {
- scanResult,
- };
-}
-
-
-function todayDateStamp() {
- return new Date().toISOString().slice(0, 10);
-}
-
-function inferGithubRepoFromOrigin(repoRoot) {
- const rawOrigin = readGitConfig(repoRoot, 'remote.origin.url');
- if (!rawOrigin) return '';
-
- const httpsMatch = rawOrigin.match(/github\.com[:/](.+?)(?:\.git)?$/i);
- if (!httpsMatch) return '';
- const slug = (httpsMatch[1] || '').replace(/^\/+/, '').trim();
- if (!slug || !slug.includes('/')) return '';
- return `github.com/${slug}`;
-}
-
-function inferGithubRepoSlug(rawValue) {
- const raw = String(rawValue || '').trim();
- if (!raw) return '';
- const match = raw.match(/github\.com[:/](.+?)(?:\.git)?$/i);
- if (!match) return '';
- const slug = String(match[1] || '')
- .replace(/^\/+/, '')
- .replace(/^github\.com\//i, '')
- .trim();
- if (!slug || !slug.includes('/')) return '';
- return slug;
-}
-
-function resolveScorecardRepo(repoRoot, explicitRepo) {
- if (explicitRepo) {
- return explicitRepo.trim();
- }
- const inferred = inferGithubRepoFromOrigin(repoRoot);
- if (inferred) return inferred;
- throw new Error(
- 'Unable to infer GitHub repo from origin remote. Pass --repo github.com//.',
- );
-}
-
-function runScorecardJson(repo) {
- const result = run(SCORECARD_BIN, ['--repo', repo, '--format', 'json'], { allowFailure: true });
- if (result.status !== 0) {
- const details = (result.stderr || result.stdout || '').trim();
- throw new Error(
- `Failed to run scorecard CLI ('${SCORECARD_BIN} --repo ${repo} --format json').${details ? `\n${details}` : ''}`,
- );
- }
-
- try {
- return JSON.parse(result.stdout || '{}');
- } catch (error) {
- throw new Error(`Unable to parse scorecard JSON output: ${error.message}`);
- }
-}
-
-function readScorecardJsonFile(filePath) {
- const absolute = path.resolve(filePath);
- if (!fs.existsSync(absolute)) {
- throw new Error(`scorecard JSON file not found: ${absolute}`);
- }
- try {
- return JSON.parse(fs.readFileSync(absolute, 'utf8'));
- } catch (error) {
- throw new Error(`Unable to parse scorecard JSON file: ${error.message}`);
- }
-}
-
-function normalizeScorecardChecks(payload) {
- const rawChecks = Array.isArray(payload?.checks) ? payload.checks : [];
- return rawChecks.map((check) => {
- const name = String(check?.name || 'Unknown');
- const rawScore = Number(check?.score);
- const score = Number.isFinite(rawScore) ? rawScore : 0;
- return {
- name,
- score,
- risk: SCORECARD_RISK_BY_CHECK[name] || 'Unknown',
- };
- });
-}
-
-function renderScorecardBaselineMarkdown({ repo, score, checks, capturedAt, scorecardVersion, reportDate }) {
- const rows = checks
- .map((item) => `| ${item.name} | ${item.score} | ${item.risk} |`)
- .join('\n');
-
- return [
- '# OpenSSF Scorecard Baseline Report',
- '',
- `- **Repository:** \`${repo}\``,
- '- **Source:** generated by `gx report scorecard`',
- `- **Captured at:** ${capturedAt}`,
- `- **Scorecard version:** \`${scorecardVersion}\``,
- `- **Overall score:** **${score} / 10**`,
- '',
- '## Check breakdown',
- '',
- '| Check | Score | Risk |',
- '|---|---:|---|',
- rows || '| (none) | 0 | Unknown |',
- '',
- `## Report date`,
- '',
- `- ${reportDate}`,
- '',
- ].join('\n');
-}
-
-function renderScorecardRemediationPlanMarkdown({ baselineRelativePath, checks }) {
- const failing = checks.filter((item) => item.score < 10);
- const failingRows = failing
- .sort((a, b) => a.score - b.score || a.name.localeCompare(b.name))
- .map((item) => `| ${item.name} | ${item.score} | ${item.risk} |`)
- .join('\n');
-
- return [
- '# OpenSSF Scorecard Remediation Plan',
- '',
- `Based on baseline report: \`${baselineRelativePath}\`.`,
- '',
- '## Failing checks',
- '',
- '| Check | Score | Risk |',
- '|---|---:|---|',
- (failingRows || '| None | 10 | N/A |'),
- '',
- '## Priority order',
- '',
- '1. Fix **High** risk checks first (especially score 0 items).',
- '2. Then close **Medium** risk checks with score < 10.',
- '3. Finally address **Low** risk ecosystem/process checks.',
- '',
- '## Verification loop',
- '',
- '1. Run scorecard again.',
- '2. Re-generate baseline + remediation files.',
- '3. Compare score deltas and track improved checks.',
- '',
- ].join('\n');
-}
-
-function parseBranchList(rawValue) {
- return String(rawValue || '')
- .split(/[\s,]+/)
- .map((item) => item.trim())
- .filter(Boolean);
-}
-
-function uniquePreserveOrder(items) {
- const seen = new Set();
- const result = [];
- for (const item of items) {
- if (seen.has(item)) continue;
- seen.add(item);
- result.push(item);
- }
- return result;
-}
-
-function readConfiguredProtectedBranches(repoRoot) {
- const result = gitRun(repoRoot, ['config', '--get', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
- if (result.status !== 0) {
- return null;
- }
- const parsed = uniquePreserveOrder(parseBranchList(result.stdout.trim()));
- if (parsed.length === 0) {
- return null;
- }
- return parsed;
-}
-
-function listLocalUserBranches(repoRoot) {
- const result = gitRun(repoRoot, ['for-each-ref', '--format=%(refname:short)', 'refs/heads'], { allowFailure: true });
- const branchNames = result.status === 0
- ? uniquePreserveOrder(
- String(result.stdout || '')
- .split('\n')
- .map((item) => item.trim())
- .filter(Boolean),
- )
- : [];
-
- const additionalUserBranches = branchNames.filter(
- (branchName) =>
- !branchName.startsWith('agent/') &&
- !DEFAULT_PROTECTED_BRANCHES.includes(branchName),
- );
- if (additionalUserBranches.length > 0) {
- return additionalUserBranches;
- }
-
- const current = gitRun(repoRoot, ['branch', '--show-current'], { allowFailure: true });
- if (current.status !== 0) {
- return [];
- }
-
- const branchName = String(current.stdout || '').trim();
- if (
- !branchName ||
- branchName.startsWith('agent/') ||
- DEFAULT_PROTECTED_BRANCHES.includes(branchName)
- ) {
- return [];
- }
-
- return [branchName];
-}
-
-function listLocalAgentBranches(repoRoot) {
- const result = gitRun(
- repoRoot,
- ['for-each-ref', '--format=%(refname:short)', 'refs/heads/agent/'],
- { allowFailure: true },
- );
- if (result.status !== 0) {
- return [];
- }
- return uniquePreserveOrder(
- String(result.stdout || '')
- .split('\n')
- .map((item) => item.trim())
- .filter(Boolean),
- );
-}
-
-function mapWorktreePathsByBranch(repoRoot) {
- const result = gitRun(repoRoot, ['worktree', 'list', '--porcelain'], { allowFailure: true });
- const map = new Map();
- if (result.status !== 0) {
- return map;
- }
-
- const lines = String(result.stdout || '').split('\n');
- let currentWorktree = '';
- for (const line of lines) {
- if (line.startsWith('worktree ')) {
- currentWorktree = line.slice('worktree '.length).trim();
- continue;
- }
- if (line.startsWith('branch refs/heads/')) {
- const branchName = line.slice('branch refs/heads/'.length).trim();
- if (currentWorktree && branchName) {
- map.set(branchName, currentWorktree);
- }
- }
- }
- return map;
-}
-
-function hasSignificantWorkingTreeChanges(worktreePath) {
- const result = run('git', [
- '-C',
- worktreePath,
- 'status',
- '--porcelain',
- '--untracked-files=normal',
- '--',
- ]);
- if (result.status !== 0) {
- return true;
- }
-
- const lines = String(result.stdout || '')
- .split('\n')
- .map((line) => line.trimEnd())
- .filter((line) => line.length > 0);
-
- for (const line of lines) {
- const pathPart = (line.length > 3 ? line.slice(3) : '').trim();
- if (!pathPart) continue;
- if (pathPart === LOCK_FILE_RELATIVE) continue;
- if (pathPart.startsWith(`${LOCK_FILE_RELATIVE} -> `)) continue;
- if (pathPart.endsWith(` -> ${LOCK_FILE_RELATIVE}`)) continue;
- return true;
- }
- return false;
-}
-
-function originRemoteLooksLikeGithub(repoRoot) {
- const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
- if (!originUrl) {
- return false;
- }
- return /github\.com[:/]/i.test(originUrl);
-}
-
-function autoFinishReadyAgentBranches(repoRoot, options = {}) {
- const baseBranch = String(options.baseBranch || '').trim();
- const dryRun = Boolean(options.dryRun);
- const waitForMerge = options.waitForMerge !== false;
- const excludedBranches = new Set(
- Array.isArray(options.excludeBranches)
- ? options.excludeBranches.map((branch) => String(branch || '').trim()).filter(Boolean)
- : [],
- );
-
- const summary = {
- enabled: true,
- baseBranch,
- attempted: 0,
- completed: 0,
- skipped: 0,
- failed: 0,
- details: [],
- };
-
- if (!baseBranch || baseBranch === 'HEAD' || baseBranch.startsWith('agent/')) {
- summary.enabled = false;
- summary.details.push('Skipped auto-finish sweep (base branch is missing or not a non-agent local branch).');
- return summary;
- }
-
- if (String(process.env.GUARDEX_DOCTOR_SANDBOX || '') === '1') {
- summary.enabled = false;
- summary.details.push('Skipped auto-finish sweep inside doctor sandbox pass.');
- return summary;
- }
-
- if (String(process.env.GUARDEX_SKIP_AUTO_FINISH_READY_BRANCHES || '') === '1') {
- summary.enabled = false;
- summary.details.push('Skipped auto-finish sweep (GUARDEX_SKIP_AUTO_FINISH_READY_BRANCHES=1).');
- return summary;
- }
-
- if (dryRun) {
- summary.enabled = false;
- summary.details.push('Skipped auto-finish sweep in dry-run mode.');
- return summary;
- }
-
- const hasOrigin = gitRun(repoRoot, ['remote', 'get-url', 'origin'], { allowFailure: true }).status === 0;
- if (!hasOrigin) {
- summary.enabled = false;
- summary.details.push('Skipped auto-finish sweep (origin remote missing).');
- return summary;
- }
- const explicitGhBin = Boolean(String(process.env.GUARDEX_GH_BIN || '').trim());
- if (!explicitGhBin && !originRemoteLooksLikeGithub(repoRoot)) {
- summary.enabled = false;
- summary.details.push('Skipped auto-finish sweep (origin remote is not GitHub).');
- return summary;
- }
-
- const ghBin = process.env.GUARDEX_GH_BIN || 'gh';
- if (run(ghBin, ['--version']).status !== 0) {
- summary.enabled = false;
- summary.details.push(`Skipped auto-finish sweep (${ghBin} not available).`);
- return summary;
- }
-
- const branchWorktrees = mapWorktreePathsByBranch(repoRoot);
- const agentBranches = listLocalAgentBranches(repoRoot);
- if (agentBranches.length === 0) {
- summary.enabled = false;
- summary.details.push('No local agent branches found for auto-finish sweep.');
- return summary;
- }
-
- for (const branch of agentBranches) {
- if (excludedBranches.has(branch)) {
- summary.skipped += 1;
- summary.details.push(`[skip] ${branch}: excluded from this auto-finish sweep.`);
- continue;
- }
-
- if (branch === baseBranch) {
- summary.skipped += 1;
- summary.details.push(`[skip] ${branch}: source branch equals base branch.`);
- continue;
- }
-
- let counts;
- try {
- counts = aheadBehind(repoRoot, branch, baseBranch);
- } catch (error) {
- summary.failed += 1;
- summary.details.push(`[fail] ${branch}: unable to compute ahead/behind (${error.message}).`);
- continue;
- }
-
- if (counts.ahead <= 0) {
- summary.skipped += 1;
- summary.details.push(`[skip] ${branch}: already merged into ${baseBranch}.`);
- continue;
- }
-
- const branchWorktree = branchWorktrees.get(branch) || '';
- if (branchWorktree && hasSignificantWorkingTreeChanges(branchWorktree)) {
- summary.skipped += 1;
- summary.details.push(`[skip] ${branch}: dirty worktree (${branchWorktree}).`);
- continue;
- }
-
- summary.attempted += 1;
- const finishArgs = [
- '--branch',
- branch,
- '--base',
- baseBranch,
- '--via-pr',
- waitForMerge ? '--wait-for-merge' : '--no-wait-for-merge',
- '--cleanup',
- ];
- const finishResult = runPackageAsset('branchFinish', finishArgs, { cwd: repoRoot });
- const combinedOutput = [finishResult.stdout || '', finishResult.stderr || ''].join('\n').trim();
-
- if (finishResult.status === 0) {
- summary.completed += 1;
- summary.details.push(`[done] ${branch}: auto-finish completed.`);
- continue;
- }
-
- const recoverableConflict = detectRecoverableAutoFinishConflict(combinedOutput);
- if (recoverableConflict) {
- summary.skipped += 1;
- const tail = combinedOutput ? ` ${combinedOutput.split('\n').slice(-2).join(' | ')}` : '';
- summary.details.push(`[skip] ${branch}: ${recoverableConflict.rawLabel}${tail}`);
- continue;
- }
-
- summary.failed += 1;
- const tail = combinedOutput ? ` ${combinedOutput.split('\n').slice(-2).join(' | ')}` : '';
- summary.details.push(`[fail] ${branch}: auto-finish failed.${tail}`);
- }
-
- return summary;
+function inferGithubRepoSlug(rawValue) {
+ const raw = String(rawValue || '').trim();
+ if (!raw) return '';
+ const match = raw.match(/github\.com[:/](.+?)(?:\.git)?$/i);
+ if (!match) return '';
+ const slug = String(match[1] || '')
+ .replace(/^\/+/, '')
+ .replace(/^github\.com\//i, '')
+ .trim();
+ if (!slug || !slug.includes('/')) return '';
+ return slug;
}
-function ensureSetupProtectedBranches(repoRoot, dryRun) {
- const localUserBranches = listLocalUserBranches(repoRoot);
- if (localUserBranches.length === 0) {
- return {
- status: 'unchanged',
- file: `git config ${GIT_PROTECTED_BRANCHES_KEY}`,
- note: 'no additional local user branches detected',
- };
- }
-
- const configured = readConfiguredProtectedBranches(repoRoot);
- const currentBranches = configured || [...DEFAULT_PROTECTED_BRANCHES];
- const missingBranches = localUserBranches.filter((branchName) => !currentBranches.includes(branchName));
- if (missingBranches.length === 0) {
- return {
- status: 'unchanged',
- file: `git config ${GIT_PROTECTED_BRANCHES_KEY}`,
- note: 'local user branches already protected',
- };
- }
-
- const nextBranches = uniquePreserveOrder([...currentBranches, ...missingBranches]);
- if (!dryRun) {
- writeProtectedBranches(repoRoot, nextBranches);
+function resolveScorecardRepo(repoRoot, explicitRepo) {
+ if (explicitRepo) {
+ return explicitRepo.trim();
}
-
- return {
- status: dryRun ? 'would-update' : 'updated',
- file: `git config ${GIT_PROTECTED_BRANCHES_KEY}`,
- note: `added local user branch(es): ${missingBranches.join(', ')}`,
- };
+ const inferred = inferGithubRepoFromOrigin(repoRoot);
+ if (inferred) return inferred;
+ throw new Error(
+ 'Unable to infer GitHub repo from origin remote. Pass --repo github.com//.',
+ );
}
-function readProtectedBranches(repoRoot) {
- const result = gitRun(repoRoot, ['config', '--get', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
+function runScorecardJson(repo) {
+ const result = run(SCORECARD_BIN, ['--repo', repo, '--format', 'json'], { allowFailure: true });
if (result.status !== 0) {
- return [...DEFAULT_PROTECTED_BRANCHES];
- }
-
- const parsed = uniquePreserveOrder(parseBranchList(result.stdout.trim()));
- if (parsed.length === 0) {
- return [...DEFAULT_PROTECTED_BRANCHES];
- }
- return parsed;
-}
-
-function writeProtectedBranches(repoRoot, branches) {
- if (branches.length === 0) {
- gitRun(repoRoot, ['config', '--unset-all', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
- return;
- }
- gitRun(repoRoot, ['config', GIT_PROTECTED_BRANCHES_KEY, branches.join(' ')]);
-}
-
-function resolveBaseBranch(repoRoot, explicitBase) {
- if (explicitBase) {
- return explicitBase;
- }
- const configured = readGitConfig(repoRoot, GIT_BASE_BRANCH_KEY);
- return configured || DEFAULT_BASE_BRANCH;
-}
-
-function resolveSyncStrategy(repoRoot, explicitStrategy) {
- const strategy = (explicitStrategy || readGitConfig(repoRoot, GIT_SYNC_STRATEGY_KEY) || DEFAULT_SYNC_STRATEGY)
- .trim()
- .toLowerCase();
- if (strategy !== 'rebase' && strategy !== 'merge') {
- throw new Error(`Invalid sync strategy '${strategy}' (expected: rebase or merge)`);
- }
- return strategy;
-}
-
-function repoHasHeadCommit(repoRoot) {
- return gitRun(repoRoot, ['rev-parse', '--verify', 'HEAD'], { allowFailure: true }).status === 0;
-}
-
-function readBranchDisplayName(repoRoot) {
- const symbolic = gitRun(repoRoot, ['symbolic-ref', '--quiet', '--short', 'HEAD'], { allowFailure: true });
- if (symbolic.status === 0) {
- const branch = String(symbolic.stdout || '').trim();
- if (!branch) {
- return '(unknown)';
- }
- return repoHasHeadCommit(repoRoot) ? branch : `${branch} (unborn; no commits yet)`;
- }
-
- const detached = gitRun(repoRoot, ['rev-parse', '--short', 'HEAD'], { allowFailure: true });
- if (detached.status === 0) {
- return `(detached at ${String(detached.stdout || '').trim()})`;
- }
- return '(unknown)';
-}
-
-function repoHasOriginRemote(repoRoot) {
- return gitRun(repoRoot, ['remote', 'get-url', 'origin'], { allowFailure: true }).status === 0;
-}
-
-function detectComposeHintFiles(repoRoot) {
- return COMPOSE_HINT_FILES.filter((relativePath) => fs.existsSync(path.join(repoRoot, relativePath)));
-}
-
-function printSetupRepoHints(repoRoot, baseBranch, repoLabel = '') {
- const branchDisplay = readBranchDisplayName(repoRoot);
- const hasHeadCommit = repoHasHeadCommit(repoRoot);
- const hasOrigin = repoHasOriginRemote(repoRoot);
- const composeFiles = detectComposeHintFiles(repoRoot);
- if (hasHeadCommit && hasOrigin && composeFiles.length === 0) {
- return;
- }
-
- const label = repoLabel ? ` ${repoLabel}` : '';
- if (!hasHeadCommit) {
- console.log(`[${TOOL_NAME}] Fresh repo onboarding${label}: current branch is ${branchDisplay}.`);
- console.log(`[${TOOL_NAME}] Bootstrap commit${label}: git add . && git commit -m "bootstrap gitguardex"`);
- console.log(
- `[${TOOL_NAME}] First agent flow${label}: ` +
- `gx branch start "" "codex" -> ` +
- `gx locks claim --branch "$(git branch --show-current)" -> ` +
- `gx branch finish --branch "$(git branch --show-current)" --base ${baseBranch} --via-pr --wait-for-merge`,
- );
- }
- if (!hasOrigin) {
- console.log(`[${TOOL_NAME}] No origin remote${label}: finish and auto-merge flows stay local until you add one.`);
- }
- if (composeFiles.length > 0) {
- console.log(
- `[${TOOL_NAME}] Docker Compose helper${label}: detected ${composeFiles.join(', ')}. ` +
- `Set GUARDEX_DOCKER_SERVICE and run 'bash scripts/guardex-docker-loader.sh -- '.`,
- );
- }
-}
-
-function ensureOriginBaseRef(repoRoot, baseBranch) {
- const fetch = gitRun(repoRoot, ['fetch', 'origin', baseBranch, '--quiet'], { allowFailure: true });
- if (fetch.status !== 0) {
+ const details = (result.stderr || result.stdout || '').trim();
throw new Error(
- `Unable to fetch origin/${baseBranch}. Ensure remote 'origin' exists and branch '${baseBranch}' is available.`,
+ `Failed to run scorecard CLI ('${SCORECARD_BIN} --repo ${repo} --format json').${details ? `\n${details}` : ''}`,
);
}
- const hasRemoteBase = gitRun(repoRoot, ['show-ref', '--verify', '--quiet', `refs/remotes/origin/${baseBranch}`], {
- allowFailure: true,
- });
- if (hasRemoteBase.status !== 0) {
- throw new Error(`Remote base branch not found: origin/${baseBranch}`);
- }
-}
-
-function lockRegistryStatus(repoRoot) {
- const result = gitRun(repoRoot, ['status', '--porcelain', '--', LOCK_FILE_RELATIVE], { allowFailure: true });
- if (result.status !== 0) {
- return { dirty: false, untracked: false };
- }
- const lines = (result.stdout || '').split('\n').filter((line) => line.length > 0);
- if (lines.length === 0) {
- return { dirty: false, untracked: false };
- }
- const untracked = lines.some((line) => line.startsWith('??'));
- return { dirty: true, untracked };
-}
-
-
-function listAgentWorktrees(repoRoot) {
- const result = gitRun(repoRoot, ['worktree', 'list', '--porcelain'], { allowFailure: true });
- if (result.status !== 0) {
- throw new Error('Unable to list git worktrees for finish command');
- }
-
- const entries = [];
- let currentPath = '';
- let currentBranchRef = '';
- const lines = String(result.stdout || '').split('\n');
- for (const line of lines) {
- if (!line.trim()) {
- if (currentPath && currentBranchRef.startsWith('refs/heads/agent/')) {
- entries.push({
- worktreePath: currentPath,
- branch: currentBranchRef.replace(/^refs\/heads\//, ''),
- });
- }
- currentPath = '';
- currentBranchRef = '';
- continue;
- }
- if (line.startsWith('worktree ')) {
- currentPath = line.slice('worktree '.length).trim();
- continue;
- }
- if (line.startsWith('branch ')) {
- currentBranchRef = line.slice('branch '.length).trim();
- continue;
- }
- }
- if (currentPath && currentBranchRef.startsWith('refs/heads/agent/')) {
- entries.push({
- worktreePath: currentPath,
- branch: currentBranchRef.replace(/^refs\/heads\//, ''),
- });
- }
-
- return entries;
-}
-function listLocalAgentBranchesForFinish(repoRoot) {
- const result = gitRun(
- repoRoot,
- ['for-each-ref', '--format=%(refname:short)', 'refs/heads/agent/'],
- { allowFailure: true },
- );
- if (result.status !== 0) {
- throw new Error('Unable to list local agent branches');
+ try {
+ return JSON.parse(result.stdout || '{}');
+ } catch (error) {
+ throw new Error(`Unable to parse scorecard JSON output: ${error.message}`);
}
- return uniquePreserveOrder(
- String(result.stdout || '')
- .split('\n')
- .map((line) => line.trim())
- .filter((line) => line.startsWith('agent/')),
- );
}
-function gitQuietChangeResult(worktreePath, args) {
- const result = run('git', ['-C', worktreePath, ...args], { stdio: 'pipe' });
- if (result.status === 0) {
- return false;
+function readScorecardJsonFile(filePath) {
+ const absolute = path.resolve(filePath);
+ if (!fs.existsSync(absolute)) {
+ throw new Error(`scorecard JSON file not found: ${absolute}`);
}
- if (result.status === 1) {
- return true;
+ try {
+ return JSON.parse(fs.readFileSync(absolute, 'utf8'));
+ } catch (error) {
+ throw new Error(`Unable to parse scorecard JSON file: ${error.message}`);
}
- throw new Error(
- `git ${args.join(' ')} failed in ${worktreePath}: ${(
- result.stderr || result.stdout || ''
- ).trim()}`,
- );
}
-function worktreeHasLocalChanges(worktreePath) {
- const hasUnstaged = gitQuietChangeResult(worktreePath, [
- 'diff',
- '--quiet',
- '--',
- '.',
- ':(exclude).omx/state/agent-file-locks.json',
- ]);
- if (hasUnstaged) {
- return true;
- }
-
- const hasStaged = gitQuietChangeResult(worktreePath, [
- 'diff',
- '--cached',
- '--quiet',
- '--',
- '.',
- ':(exclude).omx/state/agent-file-locks.json',
- ]);
- if (hasStaged) {
- return true;
- }
-
- const untracked = run('git', ['-C', worktreePath, 'ls-files', '--others', '--exclude-standard'], {
- stdio: 'pipe',
+function normalizeScorecardChecks(payload) {
+ const rawChecks = Array.isArray(payload?.checks) ? payload.checks : [];
+ return rawChecks.map((check) => {
+ const name = String(check?.name || 'Unknown');
+ const rawScore = Number(check?.score);
+ const score = Number.isFinite(rawScore) ? rawScore : 0;
+ return {
+ name,
+ score,
+ risk: SCORECARD_RISK_BY_CHECK[name] || 'Unknown',
+ };
});
- if (untracked.status !== 0) {
- throw new Error(`Unable to inspect untracked files in ${worktreePath}`);
- }
- return String(untracked.stdout || '').trim().length > 0;
-}
-
-function gitOutputLines(worktreePath, args) {
- const result = run('git', ['-C', worktreePath, ...args], { stdio: 'pipe' });
- if (result.status !== 0) {
- throw new Error(
- `git ${args.join(' ')} failed in ${worktreePath}: ${(
- result.stderr || result.stdout || ''
- ).trim()}`,
- );
- }
- return String(result.stdout || '')
- .split('\n')
- .map((line) => line.trim())
- .filter(Boolean);
}
-function claimLocksForAutoCommit(repoRoot, worktreePath, branch) {
- const changedFiles = uniquePreserveOrder([
- ...gitOutputLines(worktreePath, ['diff', '--name-only', '--', '.', ':(exclude).omx/state/agent-file-locks.json']),
- ...gitOutputLines(worktreePath, ['diff', '--cached', '--name-only', '--', '.', ':(exclude).omx/state/agent-file-locks.json']),
- ...gitOutputLines(worktreePath, ['ls-files', '--others', '--exclude-standard']),
- ]);
-
- if (changedFiles.length > 0) {
- const claim = runPackageAsset('lockTool', ['claim', '--branch', branch, ...changedFiles], {
- cwd: repoRoot,
- stdio: 'pipe',
- });
- if (claim.status !== 0) {
- throw new Error(
- `Lock claim failed for ${branch}: ${(
- claim.stderr || claim.stdout || ''
- ).trim()}`,
- );
- }
- }
+function renderScorecardBaselineMarkdown({ repo, score, checks, capturedAt, scorecardVersion, reportDate }) {
+ const rows = checks
+ .map((item) => `| ${item.name} | ${item.score} | ${item.risk} |`)
+ .join('\n');
- const deletedFiles = uniquePreserveOrder([
- ...gitOutputLines(worktreePath, [
- 'diff',
- '--name-only',
- '--diff-filter=D',
- '--',
- '.',
- ':(exclude).omx/state/agent-file-locks.json',
- ]),
- ...gitOutputLines(worktreePath, [
- 'diff',
- '--cached',
- '--name-only',
- '--diff-filter=D',
- '--',
- '.',
- ':(exclude).omx/state/agent-file-locks.json',
- ]),
- ]);
-
- if (deletedFiles.length > 0) {
- const allowDelete = runPackageAsset('lockTool', ['allow-delete', '--branch', branch, ...deletedFiles], {
- cwd: repoRoot,
- stdio: 'pipe',
- });
- if (allowDelete.status !== 0) {
- throw new Error(
- `Delete-lock grant failed for ${branch}: ${(
- allowDelete.stderr || allowDelete.stdout || ''
- ).trim()}`,
- );
- }
- }
+ return [
+ '# OpenSSF Scorecard Baseline Report',
+ '',
+ `- **Repository:** \`${repo}\``,
+ '- **Source:** generated by `gx report scorecard`',
+ `- **Captured at:** ${capturedAt}`,
+ `- **Scorecard version:** \`${scorecardVersion}\``,
+ `- **Overall score:** **${score} / 10**`,
+ '',
+ '## Check breakdown',
+ '',
+ '| Check | Score | Risk |',
+ '|---|---:|---|',
+ rows || '| (none) | 0 | Unknown |',
+ '',
+ `## Report date`,
+ '',
+ `- ${reportDate}`,
+ '',
+ ].join('\n');
}
-function resolveFinishBaseBranch(repoRoot, _sourceBranch, explicitBase) {
- if (explicitBase) {
- return explicitBase;
- }
-
- const configured = readGitConfig(repoRoot, GIT_BASE_BRANCH_KEY);
- if (configured) {
- return configured;
- }
+function renderScorecardRemediationPlanMarkdown({ baselineRelativePath, checks }) {
+ const failing = checks.filter((item) => item.score < 10);
+ const failingRows = failing
+ .sort((a, b) => a.score - b.score || a.name.localeCompare(b.name))
+ .map((item) => `| ${item.name} | ${item.score} | ${item.risk} |`)
+ .join('\n');
- return DEFAULT_BASE_BRANCH;
+ return [
+ '# OpenSSF Scorecard Remediation Plan',
+ '',
+ `Based on baseline report: \`${baselineRelativePath}\`.`,
+ '',
+ '## Failing checks',
+ '',
+ '| Check | Score | Risk |',
+ '|---|---:|---|',
+ (failingRows || '| None | 10 | N/A |'),
+ '',
+ '## Priority order',
+ '',
+ '1. Fix **High** risk checks first (especially score 0 items).',
+ '2. Then close **Medium** risk checks with score < 10.',
+ '3. Finally address **Low** risk ecosystem/process checks.',
+ '',
+ '## Verification loop',
+ '',
+ '1. Run scorecard again.',
+ '2. Re-generate baseline + remediation files.',
+ '3. Compare score deltas and track improved checks.',
+ '',
+ ].join('\n');
}
-function autoCommitWorktreeForFinish(repoRoot, worktreePath, branch, options) {
- const hasChanges = worktreeHasLocalChanges(worktreePath);
- if (!hasChanges) {
- return { changed: false, committed: false };
- }
-
- if (options.noAutoCommit) {
- throw new Error(
- `Branch '${branch}' has local changes in ${worktreePath}. Re-run without --no-auto-commit or commit manually first.`,
- );
- }
-
- if (options.dryRun) {
- return { changed: true, committed: false, dryRun: true };
- }
-
- claimLocksForAutoCommit(repoRoot, worktreePath, branch);
-
- const addResult = run('git', ['-C', worktreePath, 'add', '-A'], { stdio: 'pipe' });
- if (addResult.status !== 0) {
- throw new Error(`git add failed in ${worktreePath}: ${(addResult.stderr || addResult.stdout || '').trim()}`);
- }
-
- const stagedHasChanges = gitQuietChangeResult(worktreePath, [
- 'diff',
- '--cached',
- '--quiet',
- '--',
- '.',
- ':(exclude).omx/state/agent-file-locks.json',
- ]);
- if (!stagedHasChanges) {
- return { changed: true, committed: false };
- }
-
- const commitMessage = options.commitMessage || `Auto-finish: ${branch}`;
- const commitResult = run('git', ['-C', worktreePath, 'commit', '-m', commitMessage], { stdio: 'pipe' });
- if (commitResult.status !== 0) {
- throw new Error(
- `Auto-commit failed on '${branch}': ${(
- commitResult.stderr || commitResult.stdout || ''
- ).trim()}`,
- );
- }
-
- return { changed: true, committed: true, message: commitMessage };
+function parseBranchList(rawValue) {
+ return String(rawValue || '')
+ .split(/[\s,]+/)
+ .map((item) => item.trim())
+ .filter(Boolean);
}
-function syncOperation(repoRoot, strategy, baseRef, ffOnly) {
- if (strategy === 'rebase') {
- if (ffOnly) {
- throw new Error('--ff-only is only supported with --strategy merge');
- }
- const rebased = run('git', ['-C', repoRoot, 'rebase', baseRef], { stdio: 'pipe' });
- if (rebased.status !== 0) {
- const details = (rebased.stderr || rebased.stdout || '').trim();
- const gitDir = path.join(repoRoot, '.git');
- const rebaseActive = fs.existsSync(path.join(gitDir, 'rebase-merge')) || fs.existsSync(path.join(gitDir, 'rebase-apply'));
- const help = rebaseActive
- ? '\nResolve conflicts, then run: git rebase --continue\nOr abort: git rebase --abort'
- : '';
- throw new Error(`Sync failed during rebase onto ${baseRef}.${details ? `\n${details}` : ''}${help}`);
- }
- return;
- }
-
- const mergeArgs = ['-C', repoRoot, 'merge', '--no-edit'];
- if (ffOnly) {
- mergeArgs.push('--ff-only');
- }
- mergeArgs.push(baseRef);
- const merged = run('git', mergeArgs, { stdio: 'pipe' });
- if (merged.status !== 0) {
- const details = (merged.stderr || merged.stdout || '').trim();
- const gitDir = path.join(repoRoot, '.git');
- const mergeActive = fs.existsSync(path.join(gitDir, 'MERGE_HEAD'));
- const help = mergeActive ? '\nResolve conflicts, then run: git commit\nOr abort: git merge --abort' : '';
- throw new Error(`Sync failed during merge from ${baseRef}.${details ? `\n${details}` : ''}${help}`);
+function originRemoteLooksLikeGithub(repoRoot) {
+ const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
+ if (!originUrl) {
+ return false;
}
+ return /github\.com[:/]/i.test(originUrl);
}
function isInteractiveTerminal() {
@@ -2364,7 +1067,7 @@ function printUpdateAvailableBanner(current, latest) {
}
function maybeSelfUpdateBeforeStatus() {
- return getToolchainApi().maybeSelfUpdateBeforeStatus();
+ return toolchainModule.maybeSelfUpdateBeforeStatus();
}
function readInstalledGuardexVersion() {
@@ -2499,7 +1202,7 @@ function printOpenSpecUpdateAvailableBanner(current, latest) {
}
function maybeOpenSpecUpdateBeforeStatus() {
- return getToolchainApi().maybeOpenSpecUpdateBeforeStatus();
+ return toolchainModule.maybeOpenSpecUpdateBeforeStatus();
}
function promptYesNoStrict(question) {
@@ -2678,7 +1381,7 @@ function askGlobalInstallForMissing(options, missingPackages, missingLocalTools)
}
function installGlobalToolchain(options) {
- return getToolchainApi().installGlobalToolchain(options);
+ return toolchainModule.installGlobalToolchain(options);
}
function findStaleLockPaths(repoRoot, locks) {
@@ -3038,9 +1741,9 @@ function status(rawArgs) {
json: false,
});
- const toolchain = detectGlobalToolchainPackages();
+ const toolchain = toolchainModule.detectGlobalToolchainPackages();
const npmServices = GLOBAL_TOOLCHAIN_PACKAGES.map((pkg) => {
- const service = getGlobalToolchainService(pkg);
+ const service = toolchainModule.getGlobalToolchainService(pkg);
if (!toolchain.ok) {
return {
name: service.name,
@@ -3058,12 +1761,12 @@ function status(rawArgs) {
status: toolchain.installed.includes(pkg) ? 'active' : 'inactive',
};
});
- const localCompanionServices = detectOptionalLocalCompanionTools().map((tool) => ({
+ const localCompanionServices = toolchainModule.detectOptionalLocalCompanionTools().map((tool) => ({
name: tool.name,
displayName: tool.displayName || tool.name,
status: tool.status,
}));
- const requiredSystemTools = detectRequiredSystemTools();
+ const requiredSystemTools = toolchainModule.detectRequiredSystemTools();
const services = [
...npmServices,
...localCompanionServices,
@@ -3132,7 +1835,7 @@ function status(rawArgs) {
console.log(
`[${TOOL_NAME}] Optional companion tools inactive: ${inactiveOptionalCompanions.join(', ')}`,
);
- for (const warning of describeMissingGlobalDependencyWarnings(
+ for (const warning of toolchainModule.describeMissingGlobalDependencyWarnings(
npmServices
.filter((service) => service.status === 'inactive')
.map((service) => service.packageName),
@@ -3394,7 +2097,7 @@ function doctor(rawArgs) {
cleanupProtectedBaseSandbox,
ensureOmxScaffold,
configureHooks,
- autoFinishReadyAgentBranches,
+ autoFinishReadyAgentBranches: doctorModule.autoFinishReadyAgentBranches,
});
return;
}
@@ -3412,7 +2115,7 @@ function doctor(rawArgs) {
failed: 0,
details: [],
}
- : autoFinishReadyAgentBranches(scanResult.repoRoot, {
+ : doctorModule.autoFinishReadyAgentBranches(scanResult.repoRoot, {
baseBranch: currentBaseBranch,
dryRun: singleRepoOptions.dryRun,
waitForMerge: singleRepoOptions.waitForMerge,
@@ -3868,7 +2571,7 @@ function setup(rawArgs) {
allowProtectedBaseWrite: false,
});
- const globalInstallStatus = installGlobalToolchain(options);
+ const globalInstallStatus = toolchainModule.installGlobalToolchain(options);
if (globalInstallStatus.status === 'installed') {
console.log(
`[${TOOL_NAME}] ✅ Companion tools installed (${(globalInstallStatus.packages || []).join(', ')}).`,
@@ -3876,7 +2579,7 @@ function setup(rawArgs) {
} else if (globalInstallStatus.status === 'already-installed') {
console.log(`[${TOOL_NAME}] ✅ Companion tools already installed. Skipping.`);
} else if (globalInstallStatus.status === 'failed') {
- const installCommands = describeCompanionInstallCommands(
+ const installCommands = toolchainModule.describeCompanionInstallCommands(
GLOBAL_TOOLCHAIN_PACKAGES,
OPTIONAL_LOCAL_COMPANION_TOOLS,
);
@@ -3892,13 +2595,13 @@ function setup(rawArgs) {
);
} else if (globalInstallStatus.status === 'skipped') {
console.log(`[${TOOL_NAME}] ⚠️ Companion installs skipped by user choice.`);
- for (const warning of describeMissingGlobalDependencyWarnings(
+ for (const warning of toolchainModule.describeMissingGlobalDependencyWarnings(
globalInstallStatus.missingPackages || [],
)) {
console.log(`[${TOOL_NAME}] ⚠️ ${warning}`);
}
}
- const requiredSystemTools = detectRequiredSystemTools();
+ const requiredSystemTools = toolchainModule.detectRequiredSystemTools();
const missingSystemTools = requiredSystemTools.filter((tool) => tool.status !== 'active');
if (missingSystemTools.length === 0) {
console.log(`[${TOOL_NAME}] ✅ Required system tools available (${requiredSystemTools.map((tool) => tool.name).join(', ')}).`);
@@ -3966,7 +2669,7 @@ function setup(rawArgs) {
const scanResult = runScanInternal({ target: repoPath, json: false });
const currentBaseBranch = currentBranchName(scanResult.repoRoot);
- const autoFinishSummary = autoFinishReadyAgentBranches(scanResult.repoRoot, {
+ const autoFinishSummary = doctorModule.autoFinishReadyAgentBranches(scanResult.repoRoot, {
baseBranch: currentBaseBranch,
dryRun: perRepoOptions.dryRun,
});
@@ -4471,19 +3174,19 @@ function migrate(rawArgs) {
}
function cleanup(rawArgs) {
- return getFinishApi().cleanup(rawArgs);
+ return finishCommands.cleanup(rawArgs);
}
function merge(rawArgs) {
- return getFinishApi().merge(rawArgs);
+ return finishCommands.merge(rawArgs);
}
function finish(rawArgs, defaults = {}) {
- return getFinishApi().finish(rawArgs, defaults);
+ return finishCommands.finish(rawArgs, defaults);
}
function sync(rawArgs) {
- return getFinishApi().sync(rawArgs);
+ return finishCommands.sync(rawArgs);
}
function protect(rawArgs) {
@@ -4568,8 +3271,8 @@ function main() {
const args = process.argv.slice(2);
if (args.length === 0) {
- maybeSelfUpdateBeforeStatus();
- maybeOpenSpecUpdateBeforeStatus();
+ toolchainModule.maybeSelfUpdateBeforeStatus();
+ toolchainModule.maybeOpenSpecUpdateBeforeStatus();
status([]);
return;
}
@@ -4583,7 +3286,7 @@ function main() {
}
if (command === '--version' || command === '-v' || command === 'version') {
- maybeSelfUpdateBeforeStatus();
+ toolchainModule.maybeSelfUpdateBeforeStatus();
console.log(packageJson.version);
return;
}
diff --git a/src/doctor/index.js b/src/doctor/index.js
index 70404dc..819c48b 100644
--- a/src/doctor/index.js
+++ b/src/doctor/index.js
@@ -3,46 +3,64 @@ const {
path,
TOOL_NAME,
SHORT_TOOL_NAME,
- CLI_ENTRY_PATH,
LOCK_FILE_RELATIVE,
REQUIRED_MANAGED_REPO_FILES,
- AGENT_WORKTREE_RELATIVE_DIRS,
OMX_SCAFFOLD_DIRECTORIES,
OMX_SCAFFOLD_FILES,
+ AGENT_WORKTREE_RELATIVE_DIRS,
+ defaultAgentWorktreeRelativeDir,
} = require('../context');
const { run, runPackageAsset } = require('../core/runtime');
-const { readGitConfig, ensureRepoBranch } = require('../git');
-const { printAutoFinishSummary } = require('../output');
+const {
+ currentBranchName,
+ gitRefExists,
+ readGitConfig,
+ ensureRepoBranch,
+ hasOriginRemote,
+ aheadBehind,
+ mapWorktreePathsByBranch,
+ hasSignificantWorkingTreeChanges,
+ listLocalAgentBranches,
+} = require('../git');
+const {
+ extractAgentBranchStartMetadata,
+ resolveSandboxTarget,
+ isSpawnFailure,
+ startProtectedBaseSandbox,
+ cleanupProtectedBaseSandbox,
+} = require('../sandbox');
+const { ensureOmxScaffold, configureHooks } = require('../scaffold');
+const { detectRecoverableAutoFinishConflict, printAutoFinishSummary } = require('../output');
/**
- * @typedef {Object} AutoFinishSummary
- * @property {boolean} [enabled]
- * @property {number} [attempted]
- * @property {number} [completed]
- * @property {number} [skipped]
- * @property {number} [failed]
- * @property {string[]} [details]
- * @property {string} [baseBranch]
+ * @typedef {Object} SandboxMetadata
+ * @property {string} branch
+ * @property {string} worktreePath
*/
/**
* @typedef {Object} OperationResult
* @property {string} status
- * @property {string} note
+ * @property {string} [note]
* @property {string} [stdout]
* @property {string} [stderr]
* @property {string} [prUrl]
* @property {string[]} [stagedFiles]
* @property {string} [commitMessage]
- * @property {unknown[]} [operations]
+ * @property {OperationResult[]} [operations]
* @property {OperationResult} [cleanup]
* @property {OperationResult} [hookRefresh]
*/
/**
- * @typedef {Object} SandboxMetadata
- * @property {string} branch
- * @property {string} worktreePath
+ * @typedef {Object} AutoFinishSummary
+ * @property {boolean} [enabled]
+ * @property {number} [attempted]
+ * @property {number} [completed]
+ * @property {number} [skipped]
+ * @property {number} [failed]
+ * @property {string[]} [details]
+ * @property {string} [baseBranch]
*/
/**
@@ -69,39 +87,15 @@ const { printAutoFinishSummary } = require('../output');
* @property {string | null} sandboxLockContent
*/
-function requireDoctorIntegration(name, value) {
- if (typeof value !== 'function') {
- throw new Error(`doctor integration missing: ${name}`);
- }
- return value;
-}
-
-function appendForceArgs(args, options) {
- if (!options.force) {
- return;
- }
- args.push('--force');
- if (Array.isArray(options.forceManagedPaths) && options.forceManagedPaths.length > 0) {
- args.push(...options.forceManagedPaths);
- }
-}
-
-function resolveSandboxTarget(repoRoot, worktreePath, targetPath) {
- const resolvedTarget = path.resolve(targetPath);
- const relativeTarget = path.relative(repoRoot, resolvedTarget);
- if (relativeTarget.startsWith('..') || path.isAbsolute(relativeTarget)) {
- throw new Error(`sandbox target must stay inside repo root: ${resolvedTarget}`);
- }
- if (!relativeTarget || relativeTarget === '.') {
- return worktreePath;
- }
- return path.join(worktreePath, relativeTarget);
-}
-
function buildSandboxDoctorArgs(options, sandboxTarget) {
const args = ['doctor', '--target', sandboxTarget];
if (options.dryRun) args.push('--dry-run');
- appendForceArgs(args, options);
+ if (options.force) {
+ args.push('--force');
+ for (const managedPath of options.forceManagedPaths || []) {
+ args.push(managedPath);
+ }
+ }
if (options.skipAgents) args.push('--skip-agents');
if (options.skipPackageJson) args.push('--skip-package-json');
if (options.skipGitignore) args.push('--no-gitignore');
@@ -112,8 +106,16 @@ function buildSandboxDoctorArgs(options, sandboxTarget) {
return args;
}
-function isSpawnFailure(result) {
- return Boolean(result?.error) && typeof result?.status !== 'number';
+function originRemoteLooksLikeGithub(repoRoot) {
+ const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
+ if (!originUrl) {
+ return false;
+ }
+ return /github\.com[:/]/i.test(originUrl);
+}
+
+function isCommandAvailable(commandName) {
+ return run('which', [commandName]).status === 0;
}
function parseGitPathList(output) {
@@ -123,8 +125,13 @@ function parseGitPathList(output) {
.filter((line) => line && line !== LOCK_FILE_RELATIVE);
}
-function collectWorktreePaths(worktreePath, commands) {
+function collectDoctorChangedPaths(worktreePath) {
const changed = new Set();
+ const commands = [
+ ['diff', '--name-only'],
+ ['diff', '--cached', '--name-only'],
+ ['ls-files', '--others', '--exclude-standard'],
+ ];
for (const gitArgs of commands) {
const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
for (const filePath of parseGitPathList(result.stdout)) {
@@ -134,27 +141,35 @@ function collectWorktreePaths(worktreePath, commands) {
return Array.from(changed);
}
-function collectDoctorChangedPaths(worktreePath) {
- return collectWorktreePaths(worktreePath, [
- ['diff', '--name-only'],
- ['diff', '--cached', '--name-only'],
- ['ls-files', '--others', '--exclude-standard'],
- ]);
-}
-
function collectDoctorDeletedPaths(worktreePath) {
- return collectWorktreePaths(worktreePath, [
+ const deleted = new Set();
+ const commands = [
['diff', '--name-only', '--diff-filter=D'],
['diff', '--cached', '--name-only', '--diff-filter=D'],
- ]);
+ ];
+ for (const gitArgs of commands) {
+ const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
+ for (const filePath of parseGitPathList(result.stdout)) {
+ deleted.add(filePath);
+ }
+ }
+ return Array.from(deleted);
}
function collectWorktreeDirtyPaths(worktreePath) {
- return collectWorktreePaths(worktreePath, [
+ const dirty = new Set();
+ const commands = [
['diff', '--name-only'],
['diff', '--cached', '--name-only'],
['ls-files', '--others', '--exclude-standard'],
- ]);
+ ];
+ for (const gitArgs of commands) {
+ const result = run('git', ['-C', worktreePath, ...gitArgs], { timeout: 20_000 });
+ for (const filePath of parseGitPathList(result.stdout)) {
+ dirty.add(filePath);
+ }
+ }
+ return Array.from(dirty);
}
function collectDoctorForceAddPaths(worktreePath) {
@@ -286,22 +301,6 @@ function autoCommitDoctorSandboxChanges(metadata) {
};
}
-function hasOriginRemote(repoRoot) {
- return run('git', ['-C', repoRoot, 'remote', 'get-url', 'origin']).status === 0;
-}
-
-function originRemoteLooksLikeGithub(repoRoot) {
- const originUrl = readGitConfig(repoRoot, 'remote.origin.url');
- if (!originUrl) {
- return false;
- }
- return /github\.com[:/]/i.test(originUrl);
-}
-
-function isCommandAvailable(commandName) {
- return run('which', [commandName]).status === 0;
-}
-
function extractAgentBranchFinishPrUrl(output) {
const match = String(output || '').match(/\[agent-branch-finish\] PR:\s*(\S+)/);
return match ? match[1] : '';
@@ -393,21 +392,7 @@ function finishDoctorSandboxBranch(blocked, metadata, options = {}) {
};
}
-function applyStash(repoRoot, stashRef) {
- if (!stashRef) {
- return;
- }
- run('git', ['-C', repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
-}
-
-function dropStash(repoRoot, stashRef) {
- if (!stashRef) {
- return;
- }
- run('git', ['-C', repoRoot, 'stash', 'drop', stashRef], { timeout: 20_000 });
-}
-
-function mergeDoctorSandboxRepairsBackToProtectedBase(options, blocked, metadata, autoCommitResult, finishResult, integrations) {
+function mergeDoctorSandboxRepairsBackToProtectedBase(options, blocked, metadata, autoCommitResult, finishResult) {
if (options.dryRun) {
return {
status: autoCommitResult.status === 'committed' ? 'would-merge' : 'skipped',
@@ -447,132 +432,133 @@ function mergeDoctorSandboxRepairsBackToProtectedBase(options, blocked, metadata
]);
const dirtyPaths = collectWorktreeDirtyPaths(blocked.repoRoot);
let stashRef = '';
- let mergeSucceeded = false;
-
- try {
- if (dirtyPaths.length > 0) {
- const unexpectedPaths = dirtyPaths.filter((filePath) => {
- if (allowedPaths.has(filePath)) {
- return false;
- }
- return !AGENT_WORKTREE_RELATIVE_DIRS.some(
- (relativeDir) => filePath === relativeDir || filePath.startsWith(`${relativeDir}/`),
- );
- });
- if (unexpectedPaths.length > 0) {
- return {
- status: 'failed',
- note: `protected branch workspace has unrelated local changes: ${unexpectedPaths.join(', ')}`,
- };
+ if (dirtyPaths.length > 0) {
+ const unexpectedPaths = dirtyPaths.filter((filePath) => {
+ if (allowedPaths.has(filePath)) {
+ return false;
}
-
- const stashMessage = `guardex-doctor-merge-${Date.now()}`;
- const stashResult = run(
- 'git',
- ['-C', blocked.repoRoot, 'stash', 'push', '--all', '--message', stashMessage],
- { timeout: 30_000 },
+ return !AGENT_WORKTREE_RELATIVE_DIRS.some(
+ (relativeDir) => filePath === relativeDir || filePath.startsWith(`${relativeDir}/`),
);
- if (isSpawnFailure(stashResult)) {
- return {
- status: 'failed',
- note: 'could not stash protected branch doctor drift before merge',
- stdout: stashResult.stdout || '',
- stderr: stashResult.stderr || '',
- };
- }
- if (stashResult.status !== 0) {
- return {
- status: 'failed',
- note: 'stashing protected branch doctor drift failed',
- stdout: stashResult.stdout || '',
- stderr: stashResult.stderr || '',
- };
- }
-
- const stashLookup = run(
- 'git',
- ['-C', blocked.repoRoot, 'stash', 'list'],
- { timeout: 20_000 },
- );
- stashRef = String(stashLookup.stdout || '')
- .split('\n')
- .find((line) => line.includes(stashMessage))
- ?.split(':')[0]
- ?.trim() || '';
- }
-
- const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
- if (!restoreResult.ok) {
+ });
+ if (unexpectedPaths.length > 0) {
return {
status: 'failed',
- note: `could not restore protected branch '${blocked.branch}' before applying sandbox repairs`,
- stdout: restoreResult.stdout || '',
- stderr: restoreResult.stderr || '',
+ note: `protected branch workspace has unrelated local changes: ${unexpectedPaths.join(', ')}`,
};
}
-
- const mergeResult = run(
+ const stashMessage = `guardex-doctor-merge-${Date.now()}`;
+ const stashResult = run(
'git',
- ['-C', blocked.repoRoot, 'merge', '--ff-only', metadata.branch],
+ ['-C', blocked.repoRoot, 'stash', 'push', '--all', '--message', stashMessage],
{ timeout: 30_000 },
);
- if (isSpawnFailure(mergeResult)) {
+ if (isSpawnFailure(stashResult)) {
return {
status: 'failed',
- note: 'tracked doctor repair merge errored',
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
+ note: 'could not stash protected branch doctor drift before merge',
+ stdout: stashResult.stdout || '',
+ stderr: stashResult.stderr || '',
};
}
- if (mergeResult.status !== 0) {
+ if (stashResult.status !== 0) {
return {
status: 'failed',
- note: 'tracked doctor repair merge failed',
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
+ note: 'stashing protected branch doctor drift failed',
+ stdout: stashResult.stdout || '',
+ stderr: stashResult.stderr || '',
};
}
- mergeSucceeded = true;
- let cleanupResult;
- try {
- cleanupResult = integrations.cleanupProtectedBaseSandbox(blocked.repoRoot, metadata);
- } catch (error) {
- return {
- status: 'failed',
- note: `tracked doctor repair merge succeeded but sandbox cleanup failed: ${error.message}`,
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- };
- }
+ const stashLookup = run(
+ 'git',
+ ['-C', blocked.repoRoot, 'stash', 'list'],
+ { timeout: 20_000 },
+ );
+ stashRef = String(stashLookup.stdout || '')
+ .split('\n')
+ .find((line) => line.includes(stashMessage))
+ ?.split(':')[0]
+ ?.trim() || '';
+ }
- let hookRefreshResult;
- try {
- hookRefreshResult = integrations.configureHooks(blocked.repoRoot, false);
- } catch (error) {
- return {
- status: 'failed',
- note: `tracked doctor repair merge succeeded but local hook refresh failed: ${error.message}`,
- stdout: mergeResult.stdout || '',
- stderr: mergeResult.stderr || '',
- };
+ const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
+ if (!restoreResult.ok) {
+ if (stashRef) {
+ run('git', ['-C', blocked.repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
}
+ return {
+ status: 'failed',
+ note: `could not restore protected branch '${blocked.branch}' before applying sandbox repairs`,
+ stdout: restoreResult.stdout || '',
+ stderr: restoreResult.stderr || '',
+ };
+ }
+ const mergeResult = run(
+ 'git',
+ ['-C', blocked.repoRoot, 'merge', '--ff-only', metadata.branch],
+ { timeout: 30_000 },
+ );
+ if (isSpawnFailure(mergeResult)) {
+ if (stashRef) {
+ run('git', ['-C', blocked.repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
+ }
return {
- status: 'merged',
- note: 'fast-forwarded tracked doctor repairs into the protected base workspace',
+ status: 'failed',
+ note: 'tracked doctor repair merge errored',
stdout: mergeResult.stdout || '',
stderr: mergeResult.stderr || '',
- cleanup: cleanupResult,
- hookRefresh: hookRefreshResult,
};
- } finally {
- if (mergeSucceeded) {
- dropStash(blocked.repoRoot, stashRef);
- } else {
- applyStash(blocked.repoRoot, stashRef);
+ }
+ if (mergeResult.status !== 0) {
+ if (stashRef) {
+ run('git', ['-C', blocked.repoRoot, 'stash', 'apply', stashRef], { timeout: 30_000 });
}
+ return {
+ status: 'failed',
+ note: 'tracked doctor repair merge failed',
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
+ }
+
+ let cleanupResult;
+ try {
+ cleanupResult = cleanupProtectedBaseSandbox(blocked.repoRoot, metadata);
+ } catch (error) {
+ return {
+ status: 'failed',
+ note: `tracked doctor repair merge succeeded but sandbox cleanup failed: ${error.message}`,
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
}
+
+ let hookRefreshResult;
+ try {
+ hookRefreshResult = configureHooks(blocked.repoRoot, false);
+ } catch (error) {
+ return {
+ status: 'failed',
+ note: `tracked doctor repair merge succeeded but local hook refresh failed: ${error.message}`,
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ };
+ }
+
+ if (stashRef) {
+ run('git', ['-C', blocked.repoRoot, 'stash', 'drop', stashRef], { timeout: 20_000 });
+ }
+
+ return {
+ status: 'merged',
+ note: 'fast-forwarded tracked doctor repairs into the protected base workspace',
+ stdout: mergeResult.stdout || '',
+ stderr: mergeResult.stderr || '',
+ cleanup: cleanupResult,
+ hookRefresh: hookRefreshResult,
+ };
}
function createDoctorSkippedOperation(note = 'sandbox doctor did not complete successfully') {
@@ -605,7 +591,7 @@ function createDoctorSandboxExecutionState(note = 'sandbox doctor did not comple
};
}
-function summarizeDoctorOmxScaffoldSync(repoRoot, dryRun, ensureOmxScaffold) {
+function summarizeDoctorOmxScaffoldSync(repoRoot, dryRun) {
const omxScaffoldOps = ensureOmxScaffold(repoRoot, dryRun);
const changedOmxPaths = omxScaffoldOps.filter((operation) => operation.status !== 'unchanged');
if (changedOmxPaths.length === 0) {
@@ -704,15 +690,155 @@ function syncDoctorLockRegistryAfterMerge(repoRoot, sandboxLockContent) {
};
}
+function autoFinishReadyAgentBranches(repoRoot, options = {}) {
+ const baseBranch = String(options.baseBranch || '').trim();
+ const dryRun = Boolean(options.dryRun);
+ const waitForMerge = options.waitForMerge !== false;
+ const excludedBranches = new Set(
+ Array.isArray(options.excludeBranches)
+ ? options.excludeBranches.map((branch) => String(branch || '').trim()).filter(Boolean)
+ : [],
+ );
+
+ const summary = {
+ enabled: true,
+ baseBranch,
+ attempted: 0,
+ completed: 0,
+ skipped: 0,
+ failed: 0,
+ details: [],
+ };
+
+ if (!baseBranch || baseBranch === 'HEAD' || baseBranch.startsWith('agent/')) {
+ summary.enabled = false;
+ summary.details.push('Skipped auto-finish sweep (base branch is missing or not a non-agent local branch).');
+ return summary;
+ }
+
+ if (String(process.env.GUARDEX_DOCTOR_SANDBOX || '') === '1') {
+ summary.enabled = false;
+ summary.details.push('Skipped auto-finish sweep inside doctor sandbox pass.');
+ return summary;
+ }
+
+ if (String(process.env.GUARDEX_SKIP_AUTO_FINISH_READY_BRANCHES || '') === '1') {
+ summary.enabled = false;
+ summary.details.push('Skipped auto-finish sweep (GUARDEX_SKIP_AUTO_FINISH_READY_BRANCHES=1).');
+ return summary;
+ }
+
+ if (dryRun) {
+ summary.enabled = false;
+ summary.details.push('Skipped auto-finish sweep in dry-run mode.');
+ return summary;
+ }
+
+ if (!hasOriginRemote(repoRoot)) {
+ summary.enabled = false;
+ summary.details.push('Skipped auto-finish sweep (origin remote missing).');
+ return summary;
+ }
+ const explicitGhBin = Boolean(String(process.env.GUARDEX_GH_BIN || '').trim());
+ if (!explicitGhBin && !originRemoteLooksLikeGithub(repoRoot)) {
+ summary.enabled = false;
+ summary.details.push('Skipped auto-finish sweep (origin remote is not GitHub).');
+ return summary;
+ }
+
+ const ghBin = process.env.GUARDEX_GH_BIN || 'gh';
+ if (run(ghBin, ['--version']).status !== 0) {
+ summary.enabled = false;
+ summary.details.push(`Skipped auto-finish sweep (${ghBin} not available).`);
+ return summary;
+ }
+
+ const branchWorktrees = mapWorktreePathsByBranch(repoRoot);
+ const agentBranches = listLocalAgentBranches(repoRoot);
+ if (agentBranches.length === 0) {
+ summary.enabled = false;
+ summary.details.push('No local agent branches found for auto-finish sweep.');
+ return summary;
+ }
+
+ for (const branch of agentBranches) {
+ if (excludedBranches.has(branch)) {
+ summary.skipped += 1;
+ summary.details.push(`[skip] ${branch}: excluded from this auto-finish sweep.`);
+ continue;
+ }
+
+ if (branch === baseBranch) {
+ summary.skipped += 1;
+ summary.details.push(`[skip] ${branch}: source branch equals base branch.`);
+ continue;
+ }
+
+ let counts;
+ try {
+ counts = aheadBehind(repoRoot, branch, baseBranch);
+ } catch (error) {
+ summary.failed += 1;
+ summary.details.push(`[fail] ${branch}: unable to compute ahead/behind (${error.message}).`);
+ continue;
+ }
+
+ if (counts.ahead <= 0) {
+ summary.skipped += 1;
+ summary.details.push(`[skip] ${branch}: already merged into ${baseBranch}.`);
+ continue;
+ }
+
+ const branchWorktree = branchWorktrees.get(branch) || '';
+ if (branchWorktree && hasSignificantWorkingTreeChanges(branchWorktree)) {
+ summary.skipped += 1;
+ summary.details.push(`[skip] ${branch}: dirty worktree (${branchWorktree}).`);
+ continue;
+ }
+
+ summary.attempted += 1;
+ const finishArgs = [
+ '--branch',
+ branch,
+ '--base',
+ baseBranch,
+ '--via-pr',
+ waitForMerge ? '--wait-for-merge' : '--no-wait-for-merge',
+ '--cleanup',
+ ];
+ const finishResult = runPackageAsset('branchFinish', finishArgs, { cwd: repoRoot });
+ const combinedOutput = [finishResult.stdout || '', finishResult.stderr || ''].join('\n').trim();
+
+ if (finishResult.status === 0) {
+ summary.completed += 1;
+ summary.details.push(`[done] ${branch}: auto-finish completed.`);
+ continue;
+ }
+
+ const recoverableConflict = detectRecoverableAutoFinishConflict(combinedOutput);
+ if (recoverableConflict) {
+ summary.skipped += 1;
+ const tail = combinedOutput ? ` ${combinedOutput.split('\n').slice(-2).join(' | ')}` : '';
+ summary.details.push(`[skip] ${branch}: ${recoverableConflict.rawLabel}${tail}`);
+ continue;
+ }
+
+ summary.failed += 1;
+ const tail = combinedOutput ? ` ${combinedOutput.split('\n').slice(-2).join(' | ')}` : '';
+ summary.details.push(`[fail] ${branch}: auto-finish failed.${tail}`);
+ }
+
+ return summary;
+}
+
function executeDoctorSandboxLifecycle(options, blocked, metadata, integrations) {
const execution = createDoctorSandboxExecutionState();
const dryRun = Boolean(options.dryRun);
+ const resolvedIntegrations = integrations && typeof integrations === 'object' ? integrations : {};
+ const autoFinishRunner =
+ resolvedIntegrations.autoFinishReadyAgentBranches || autoFinishReadyAgentBranches;
- execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(
- blocked.repoRoot,
- dryRun,
- integrations.ensureOmxScaffold,
- );
+ execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(blocked.repoRoot, dryRun);
if (!dryRun) {
execution.autoCommit = autoCommitDoctorSandboxChanges(metadata);
@@ -738,19 +864,14 @@ function executeDoctorSandboxLifecycle(options, blocked, metadata, integrations)
metadata,
execution.autoCommit,
execution.finish,
- integrations,
);
- execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(
- blocked.repoRoot,
- dryRun,
- integrations.ensureOmxScaffold,
- );
+ execution.omxScaffoldSync = summarizeDoctorOmxScaffoldSync(blocked.repoRoot, dryRun);
execution.lockSync = syncDoctorLockRegistryAfterMerge(
blocked.repoRoot,
execution.sandboxLockContent,
);
- execution.autoFinish = integrations.autoFinishReadyAgentBranches(blocked.repoRoot, {
+ execution.autoFinish = autoFinishRunner(blocked.repoRoot, {
baseBranch: blocked.branch,
dryRun: options.dryRun,
waitForMerge: options.waitForMerge,
@@ -898,25 +1019,9 @@ function setDoctorSandboxExitCode(nestedResult, execution) {
}
function runDoctorInSandbox(options, blocked, rawIntegrations = {}) {
- const integrations = {
- startProtectedBaseSandbox: requireDoctorIntegration(
- 'startProtectedBaseSandbox',
- rawIntegrations.startProtectedBaseSandbox,
- ),
- cleanupProtectedBaseSandbox: requireDoctorIntegration(
- 'cleanupProtectedBaseSandbox',
- rawIntegrations.cleanupProtectedBaseSandbox,
- ),
- ensureOmxScaffold: requireDoctorIntegration('ensureOmxScaffold', rawIntegrations.ensureOmxScaffold),
- configureHooks: requireDoctorIntegration('configureHooks', rawIntegrations.configureHooks),
- autoFinishReadyAgentBranches: requireDoctorIntegration(
- 'autoFinishReadyAgentBranches',
- rawIntegrations.autoFinishReadyAgentBranches,
- ),
- };
-
- /** @type {SandboxStartResult} */
- const startResult = integrations.startProtectedBaseSandbox(blocked, {
+ const integrations = rawIntegrations && typeof rawIntegrations === 'object' ? rawIntegrations : {};
+ const startSandbox = integrations.startProtectedBaseSandbox || startProtectedBaseSandbox;
+ const startResult = startSandbox(blocked, {
taskName: `${SHORT_TOOL_NAME}-doctor`,
sandboxSuffix: 'gx-doctor',
});
@@ -925,7 +1030,7 @@ function runDoctorInSandbox(options, blocked, rawIntegrations = {}) {
const sandboxTarget = resolveSandboxTarget(blocked.repoRoot, metadata.worktreePath, options.target);
const nestedResult = run(
process.execPath,
- [CLI_ENTRY_PATH, ...buildSandboxDoctorArgs(options, sandboxTarget)],
+ [require.main?.filename || process.argv[1], ...buildSandboxDoctorArgs(options, sandboxTarget)],
{ cwd: metadata.worktreePath },
);
if (isSpawnFailure(nestedResult)) {
@@ -946,5 +1051,21 @@ function runDoctorInSandbox(options, blocked, rawIntegrations = {}) {
}
module.exports = {
+ extractAgentBranchStartMetadata,
+ resolveSandboxTarget,
+ buildSandboxDoctorArgs,
+ isSpawnFailure,
+ startProtectedBaseSandbox,
+ cleanupProtectedBaseSandbox,
+ claimDoctorChangedLocks,
+ autoCommitDoctorSandboxChanges,
+ finishDoctorSandboxBranch,
+ mergeDoctorSandboxRepairsBackToProtectedBase,
+ syncDoctorLockRegistryBeforeMerge,
+ syncDoctorLockRegistryAfterMerge,
+ executeDoctorSandboxLifecycle,
+ emitDoctorSandboxJsonOutput,
+ emitDoctorSandboxConsoleOutput,
+ autoFinishReadyAgentBranches,
runDoctorInSandbox,
};
diff --git a/src/finish/index.js b/src/finish/index.js
index 9ff9eec..2fcb426 100644
--- a/src/finish/index.js
+++ b/src/finish/index.js
@@ -1,425 +1,523 @@
-function createFinishApi(deps) {
- const {
- TOOL_NAME,
- LOCK_FILE_RELATIVE,
- path,
- fs,
- run,
- runPackageAsset,
- resolveRepoRoot,
- parseCleanupArgs,
- parseMergeArgs,
- parseFinishArgs,
- parseSyncArgs,
- listAgentWorktrees,
- listLocalAgentBranchesForFinish,
- uniquePreserveOrder,
- branchExists,
- resolveFinishBaseBranch,
- worktreeHasLocalChanges,
- branchMergedIntoBase,
- autoCommitWorktreeForFinish,
- resolveBaseBranch,
- resolveSyncStrategy,
- ensureOriginBaseRef,
- gitRun,
- currentBranchName,
- workingTreeIsDirty,
- aheadBehind,
- lockRegistryStatus,
- syncOperation,
- } = deps;
-
- function cleanup(rawArgs) {
- const options = parseCleanupArgs(rawArgs);
- const repoRoot = resolveRepoRoot(options.target);
-
- const args = [];
- if (options.base) {
- args.push('--base', options.base);
- }
- if (options.branch) {
- args.push('--branch', options.branch);
- }
- if (options.forceDirty) {
- args.push('--force-dirty');
- }
- if (options.dryRun) {
- args.push('--dry-run');
- }
- if (!options.keepCleanWorktrees) {
- args.push('--only-dirty-worktrees');
- }
- if (options.includePrMerged) {
- args.push('--include-pr-merged');
- }
- if (options.idleMinutes > 0) {
- args.push('--idle-minutes', String(options.idleMinutes));
- }
- if (options.maxBranches > 0) {
- args.push('--max-branches', String(options.maxBranches));
- }
- args.push('--delete-branches');
- if (!options.keepRemote) {
- args.push('--delete-remote-branches');
+const { TOOL_NAME, LOCK_FILE_RELATIVE, path, fs } = require('../context');
+const { run, runPackageAsset } = require('../core/runtime');
+const {
+ resolveRepoRoot,
+ uniquePreserveOrder,
+ listAgentWorktrees,
+ listLocalAgentBranchesForFinish,
+ branchExists,
+ resolveFinishBaseBranch,
+ worktreeHasLocalChanges,
+ branchMergedIntoBase,
+ resolveBaseBranch,
+ resolveSyncStrategy,
+ ensureOriginBaseRef,
+ gitRun,
+ currentBranchName,
+ workingTreeIsDirty,
+ aheadBehind,
+ lockRegistryStatus,
+ syncOperation,
+ gitOutputLines,
+} = require('../git');
+const {
+ parseCleanupArgs,
+ parseMergeArgs,
+ parseFinishArgs,
+ parseSyncArgs,
+} = require('../cli/args');
+
+function claimLocksForAutoCommit(repoRoot, worktreePath, branch) {
+ const changedFiles = uniquePreserveOrder([
+ ...gitOutputLines(worktreePath, ['diff', '--name-only', '--', '.', ':(exclude).omx/state/agent-file-locks.json']),
+ ...gitOutputLines(worktreePath, ['diff', '--cached', '--name-only', '--', '.', ':(exclude).omx/state/agent-file-locks.json']),
+ ...gitOutputLines(worktreePath, ['ls-files', '--others', '--exclude-standard']),
+ ]);
+
+ if (changedFiles.length > 0) {
+ const claim = runPackageAsset('lockTool', ['claim', '--branch', branch, ...changedFiles], {
+ cwd: repoRoot,
+ stdio: 'pipe',
+ });
+ if (claim.status !== 0) {
+ throw new Error(
+ `Lock claim failed for ${branch}: ${(
+ claim.stderr || claim.stdout || ''
+ ).trim()}`,
+ );
}
+ }
- const runCleanupCycle = () => {
- const runResult = runPackageAsset('worktreePrune', args, { cwd: repoRoot, stdio: 'inherit' });
- if (runResult.status !== 0) {
- throw new Error('Cleanup command failed');
- }
- };
-
- if (options.watch) {
- let cycle = 0;
- while (true) {
- cycle += 1;
- console.log(
- `[${TOOL_NAME}] Cleanup watch cycle=${cycle} (interval=${options.intervalSeconds}s, idleMinutes=${options.idleMinutes}, maxBranches=${options.maxBranches > 0 ? options.maxBranches : 'unbounded'}).`,
- );
- runCleanupCycle();
- if (options.once) {
- break;
- }
- const sleepResult = run('sleep', [String(options.intervalSeconds)], { cwd: repoRoot });
- if (sleepResult.status !== 0) {
- throw new Error(`Cleanup watch sleep failed (interval=${options.intervalSeconds}s)`);
- }
- }
- process.exitCode = 0;
- return;
+ const deletedFiles = uniquePreserveOrder([
+ ...gitOutputLines(worktreePath, [
+ 'diff',
+ '--name-only',
+ '--diff-filter=D',
+ '--',
+ '.',
+ ':(exclude).omx/state/agent-file-locks.json',
+ ]),
+ ...gitOutputLines(worktreePath, [
+ 'diff',
+ '--cached',
+ '--name-only',
+ '--diff-filter=D',
+ '--',
+ '.',
+ ':(exclude).omx/state/agent-file-locks.json',
+ ]),
+ ]);
+
+ if (deletedFiles.length > 0) {
+ const allowDelete = runPackageAsset('lockTool', ['allow-delete', '--branch', branch, ...deletedFiles], {
+ cwd: repoRoot,
+ stdio: 'pipe',
+ });
+ if (allowDelete.status !== 0) {
+ throw new Error(
+ `Delete-lock grant failed for ${branch}: ${(
+ allowDelete.stderr || allowDelete.stdout || ''
+ ).trim()}`,
+ );
}
+ }
+}
- runCleanupCycle();
- process.exitCode = 0;
+function autoCommitWorktreeForFinish(repoRoot, worktreePath, branch, options) {
+ const hasChanges = worktreeHasLocalChanges(worktreePath);
+ if (!hasChanges) {
+ return { changed: false, committed: false };
}
- function merge(rawArgs) {
- const options = parseMergeArgs(rawArgs);
- const repoRoot = resolveRepoRoot(options.target);
+ if (options.noAutoCommit) {
+ throw new Error(
+ `Branch '${branch}' has local changes in ${worktreePath}. Re-run without --no-auto-commit or commit manually first.`,
+ );
+ }
- const args = [];
- if (options.base) {
- args.push('--base', options.base);
- }
- if (options.into) {
- args.push('--into', options.into);
- }
- if (options.task) {
- args.push('--task', options.task);
- }
- if (options.agent) {
- args.push('--agent', options.agent);
- }
- for (const branch of options.branches) {
- args.push('--branch', branch);
- }
+ if (options.dryRun) {
+ return { changed: true, committed: false, dryRun: true };
+ }
- const mergeResult = runPackageAsset('branchMerge', args, { cwd: repoRoot, stdio: 'pipe' });
- if (mergeResult.stdout) {
- process.stdout.write(mergeResult.stdout);
- }
- if (mergeResult.stderr) {
- process.stderr.write(mergeResult.stderr);
- }
- if (mergeResult.status !== 0) {
- throw new Error(`merge command failed with status ${mergeResult.status}`);
- }
+ claimLocksForAutoCommit(repoRoot, worktreePath, branch);
- process.exitCode = 0;
+ const addResult = run('git', ['-C', worktreePath, 'add', '-A'], { stdio: 'pipe' });
+ if (addResult.status !== 0) {
+ throw new Error(`git add failed in ${worktreePath}: ${(addResult.stderr || addResult.stdout || '').trim()}`);
}
- function finish(rawArgs, defaults = {}) {
- const options = parseFinishArgs(rawArgs, defaults);
- const repoRoot = resolveRepoRoot(options.target);
+ const stagedHasChanges = run('git', [
+ '-C',
+ worktreePath,
+ 'diff',
+ '--cached',
+ '--quiet',
+ '--',
+ '.',
+ ':(exclude).omx/state/agent-file-locks.json',
+ ], { stdio: 'pipe' }).status === 1;
+ if (!stagedHasChanges) {
+ return { changed: true, committed: false };
+ }
- const worktreeEntries = listAgentWorktrees(repoRoot);
- const worktreeByBranch = new Map(worktreeEntries.map((entry) => [entry.branch, entry.worktreePath]));
+ const commitMessage = options.commitMessage || `Auto-finish: ${branch}`;
+ const commitResult = run('git', ['-C', worktreePath, 'commit', '-m', commitMessage], { stdio: 'pipe' });
+ if (commitResult.status !== 0) {
+ throw new Error(
+ `Auto-commit failed on '${branch}': ${(
+ commitResult.stderr || commitResult.stdout || ''
+ ).trim()}`,
+ );
+ }
- let candidateBranches = [];
- if (options.branch) {
- if (!branchExists(repoRoot, options.branch)) {
- throw new Error(`Local branch not found: ${options.branch}`);
- }
- candidateBranches = [options.branch];
- } else {
- candidateBranches = uniquePreserveOrder([
- ...listLocalAgentBranchesForFinish(repoRoot),
- ...worktreeEntries.map((entry) => entry.branch),
- ]);
- }
+ return { changed: true, committed: true, message: commitMessage };
+}
- const candidates = [];
- for (const branch of candidateBranches) {
- const worktreePath = worktreeByBranch.get(branch) || '';
- const baseBranch = resolveFinishBaseBranch(repoRoot, branch, options.base);
- const hasChanges = worktreePath ? worktreeHasLocalChanges(worktreePath) : false;
- const alreadyMerged = branchMergedIntoBase(repoRoot, branch, baseBranch);
- if (options.all || options.branch || hasChanges || !alreadyMerged) {
- candidates.push({
- branch,
- baseBranch,
- worktreePath,
- hasChanges,
- alreadyMerged,
- });
- }
- }
+function cleanup(rawArgs) {
+ const options = parseCleanupArgs(rawArgs);
+ const repoRoot = resolveRepoRoot(options.target);
- if (candidates.length === 0) {
- console.log(`[${TOOL_NAME}] No pending agent branches to finish.`);
- process.exitCode = 0;
- return;
- }
+ const args = [];
+ if (options.base) {
+ args.push('--base', options.base);
+ }
+ if (options.branch) {
+ args.push('--branch', options.branch);
+ }
+ if (options.forceDirty) {
+ args.push('--force-dirty');
+ }
+ if (options.dryRun) {
+ args.push('--dry-run');
+ }
+ if (!options.keepCleanWorktrees) {
+ args.push('--only-dirty-worktrees');
+ }
+ if (options.includePrMerged) {
+ args.push('--include-pr-merged');
+ }
+ if (options.idleMinutes > 0) {
+ args.push('--idle-minutes', String(options.idleMinutes));
+ }
+ if (options.maxBranches > 0) {
+ args.push('--max-branches', String(options.maxBranches));
+ }
+ args.push('--delete-branches');
+ if (!options.keepRemote) {
+ args.push('--delete-remote-branches');
+ }
- let succeeded = 0;
- let failed = 0;
- let autoCommitted = 0;
+ const runCleanupCycle = () => {
+ const runResult = runPackageAsset('worktreePrune', args, { cwd: repoRoot, stdio: 'inherit' });
+ if (runResult.status !== 0) {
+ throw new Error('Cleanup command failed');
+ }
+ };
- for (const candidate of candidates) {
- const { branch, baseBranch, worktreePath } = candidate;
+ if (options.watch) {
+ let cycle = 0;
+ while (true) {
+ cycle += 1;
console.log(
- `[${TOOL_NAME}] Finishing '${branch}' -> '${baseBranch}'${worktreePath ? ` (${worktreePath})` : ''}...`,
+ `[${TOOL_NAME}] Cleanup watch cycle=${cycle} (interval=${options.intervalSeconds}s, idleMinutes=${options.idleMinutes}, maxBranches=${options.maxBranches > 0 ? options.maxBranches : 'unbounded'}).`,
);
+ runCleanupCycle();
+ if (options.once) {
+ break;
+ }
+ const sleepResult = run('sleep', [String(options.intervalSeconds)], { cwd: repoRoot });
+ if (sleepResult.status !== 0) {
+ throw new Error(`Cleanup watch sleep failed (interval=${options.intervalSeconds}s)`);
+ }
+ }
+ process.exitCode = 0;
+ return;
+ }
- try {
- let commitState = { changed: false, committed: false };
- if (worktreePath) {
- commitState = autoCommitWorktreeForFinish(repoRoot, worktreePath, branch, options);
- }
+ runCleanupCycle();
+ process.exitCode = 0;
+}
- if (commitState.committed) {
- autoCommitted += 1;
- console.log(`[${TOOL_NAME}] Auto-committed '${branch}' before finish.`);
- } else if (commitState.changed && commitState.dryRun) {
- console.log(`[${TOOL_NAME}] [dry-run] Would auto-commit pending changes on '${branch}'.`);
- }
+function merge(rawArgs) {
+ const options = parseMergeArgs(rawArgs);
+ const repoRoot = resolveRepoRoot(options.target);
- const finishArgs = [
- '--branch',
- branch,
- '--base',
- baseBranch,
- options.waitForMerge ? '--wait-for-merge' : '--no-wait-for-merge',
- options.cleanup ? '--cleanup' : '--no-cleanup',
- ];
- if (options.mergeMode === 'pr') {
- finishArgs.push('--via-pr');
- } else if (options.mergeMode === 'direct') {
- finishArgs.push('--direct-only');
- } else {
- finishArgs.push('--mode', 'auto');
- }
- if (options.keepRemote) {
- finishArgs.push('--keep-remote-branch');
- }
+ const args = [];
+ if (options.base) {
+ args.push('--base', options.base);
+ }
+ if (options.into) {
+ args.push('--into', options.into);
+ }
+ if (options.task) {
+ args.push('--task', options.task);
+ }
+ if (options.agent) {
+ args.push('--agent', options.agent);
+ }
+ for (const branch of options.branches) {
+ args.push('--branch', branch);
+ }
- if (options.dryRun) {
- console.log(`[${TOOL_NAME}] [dry-run] Would run: gx branch finish ${finishArgs.join(' ')}`);
- succeeded += 1;
- continue;
- }
+ const mergeResult = runPackageAsset('branchMerge', args, { cwd: repoRoot, stdio: 'pipe' });
+ if (mergeResult.stdout) {
+ process.stdout.write(mergeResult.stdout);
+ }
+ if (mergeResult.stderr) {
+ process.stderr.write(mergeResult.stderr);
+ }
+ if (mergeResult.status !== 0) {
+ throw new Error(`merge command failed with status ${mergeResult.status}`);
+ }
- const finishResult = runPackageAsset('branchFinish', finishArgs, { cwd: repoRoot, stdio: 'pipe' });
- if (finishResult.stdout) {
- process.stdout.write(finishResult.stdout);
- }
- if (finishResult.stderr) {
- process.stderr.write(finishResult.stderr);
- }
- if (finishResult.status !== 0) {
- throw new Error(`agent-branch-finish exited with status ${finishResult.status}`);
- }
+ process.exitCode = 0;
+}
- succeeded += 1;
- } catch (error) {
- failed += 1;
- console.error(`[${TOOL_NAME}] Finish failed for '${branch}': ${error.message}`);
- if (options.failFast) {
- break;
- }
- }
- }
+function finish(rawArgs, defaults = {}) {
+ const options = parseFinishArgs(rawArgs, defaults);
+ const repoRoot = resolveRepoRoot(options.target);
- console.log(
- `[${TOOL_NAME}] Finish summary: total=${candidates.length}, success=${succeeded}, failed=${failed}, autoCommitted=${autoCommitted}`,
- );
+ const worktreeEntries = listAgentWorktrees(repoRoot);
+ const worktreeByBranch = new Map(worktreeEntries.map((entry) => [entry.branch, entry.worktreePath]));
+
+ let candidateBranches = [];
+ if (options.branch) {
+ if (!branchExists(repoRoot, options.branch)) {
+ throw new Error(`Local branch not found: ${options.branch}`);
+ }
+ candidateBranches = [options.branch];
+ } else {
+ candidateBranches = uniquePreserveOrder([
+ ...listLocalAgentBranchesForFinish(repoRoot),
+ ...worktreeEntries.map((entry) => entry.branch),
+ ]);
+ }
- if (failed > 0) {
- throw new Error('finish command failed for one or more agent branches');
+ const candidates = [];
+ for (const branch of candidateBranches) {
+ const worktreePath = worktreeByBranch.get(branch) || '';
+ const baseBranch = resolveFinishBaseBranch(repoRoot, branch, options.base);
+ const hasChanges = worktreePath ? worktreeHasLocalChanges(worktreePath) : false;
+ const alreadyMerged = branchMergedIntoBase(repoRoot, branch, baseBranch);
+ if (options.all || options.branch || hasChanges || !alreadyMerged) {
+ candidates.push({
+ branch,
+ baseBranch,
+ worktreePath,
+ hasChanges,
+ alreadyMerged,
+ });
}
+ }
+ if (candidates.length === 0) {
+ console.log(`[${TOOL_NAME}] No pending agent branches to finish.`);
process.exitCode = 0;
+ return;
}
- function sync(rawArgs) {
- const options = parseSyncArgs(rawArgs);
- const repoRoot = resolveRepoRoot(options.target);
- const baseBranch = resolveBaseBranch(repoRoot, options.base);
- const strategy = resolveSyncStrategy(repoRoot, options.strategy);
- const baseRef = `origin/${baseBranch}`;
+ let succeeded = 0;
+ let failed = 0;
+ let autoCommitted = 0;
- ensureOriginBaseRef(repoRoot, baseBranch);
+ for (const candidate of candidates) {
+ const { branch, baseBranch, worktreePath } = candidate;
+ console.log(
+ `[${TOOL_NAME}] Finishing '${branch}' -> '${baseBranch}'${worktreePath ? ` (${worktreePath})` : ''}...`,
+ );
- if (options.allAgentBranches) {
- const refs = gitRun(repoRoot, ['for-each-ref', '--format=%(refname:short)', 'refs/heads/agent/*'], { allowFailure: true });
- if (refs.status !== 0) {
- throw new Error('Unable to list local agent branches');
+ try {
+ let commitState = { changed: false, committed: false };
+ if (worktreePath) {
+ commitState = autoCommitWorktreeForFinish(repoRoot, worktreePath, branch, options);
}
- const branches = (refs.stdout || '').split('\n').map((item) => item.trim()).filter(Boolean);
- const rows = branches.map((branch) => {
- const counts = aheadBehind(repoRoot, branch, baseRef);
- return {
- branch,
- base: baseRef,
- ahead: counts.ahead,
- behind: counts.behind,
- syncRequired: counts.behind > 0,
- };
- });
- if (options.json) {
- process.stdout.write(`${JSON.stringify({
- repoRoot,
- base: baseRef,
- branchCount: rows.length,
- rows,
- }, null, 2)}\n`);
- } else {
- console.log(`[${TOOL_NAME}] Sync report target: ${repoRoot}`);
- console.log(`[${TOOL_NAME}] Base: ${baseRef}`);
- if (rows.length === 0) {
- console.log(`[${TOOL_NAME}] No local agent branches found.`);
- } else {
- for (const row of rows) {
- console.log(` - ${row.branch} | ahead ${row.ahead} | behind ${row.behind} | syncRequired=${row.syncRequired}`);
- }
- }
+ if (commitState.committed) {
+ autoCommitted += 1;
+ console.log(`[${TOOL_NAME}] Auto-committed '${branch}' before finish.`);
+ } else if (commitState.changed && commitState.dryRun) {
+ console.log(`[${TOOL_NAME}] [dry-run] Would auto-commit pending changes on '${branch}'.`);
}
- const hasBehind = rows.some((row) => row.behind > 0);
- process.exitCode = options.check && hasBehind ? 1 : 0;
- return;
- }
-
- const branch = currentBranchName(repoRoot);
- if (!options.allowNonAgent && !branch.startsWith('agent/')) {
- throw new Error(`sync is limited to agent/* branches by default (current: ${branch}). Use --allow-non-agent to override.`);
- }
-
- const dirty = workingTreeIsDirty(repoRoot);
- if (!options.check && !options.allowDirty && dirty) {
- throw new Error('Sync blocked: working tree is not clean. Commit or stash changes first, or pass --allow-dirty.');
- }
-
- const before = aheadBehind(repoRoot, branch, baseRef);
-
- const payload = {
- repoRoot,
- branch,
- base: baseRef,
- strategy,
- dirty,
- aheadBefore: before.ahead,
- behindBefore: before.behind,
- syncRequired: before.behind > 0,
- status: 'checked',
- };
-
- if (options.check) {
- if (options.json) {
- process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`);
+ const finishArgs = [
+ '--branch',
+ branch,
+ '--base',
+ baseBranch,
+ options.waitForMerge ? '--wait-for-merge' : '--no-wait-for-merge',
+ options.cleanup ? '--cleanup' : '--no-cleanup',
+ ];
+ if (options.mergeMode === 'pr') {
+ finishArgs.push('--via-pr');
+ } else if (options.mergeMode === 'direct') {
+ finishArgs.push('--direct-only');
} else {
- console.log(`[${TOOL_NAME}] Sync check target: ${repoRoot}`);
- console.log(`[${TOOL_NAME}] Branch: ${branch}`);
- console.log(`[${TOOL_NAME}] Base: ${baseRef}`);
- console.log(`[${TOOL_NAME}] Ahead: ${before.ahead}`);
- console.log(`[${TOOL_NAME}] Behind: ${before.behind}`);
- console.log(`[${TOOL_NAME}] Sync required: ${before.behind > 0 ? 'yes' : 'no'}`);
+ finishArgs.push('--mode', 'auto');
+ }
+ if (options.keepRemote) {
+ finishArgs.push('--keep-remote-branch');
}
- process.exitCode = before.behind > 0 ? 1 : 0;
- return;
- }
- if (before.behind === 0) {
- const result = { ...payload, status: 'no-op', aheadAfter: before.ahead, behindAfter: before.behind };
- if (options.json) {
- process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
- } else {
- console.log(`[${TOOL_NAME}] Branch '${branch}' is already up to date with ${baseRef}.`);
+ if (options.dryRun) {
+ console.log(`[${TOOL_NAME}] [dry-run] Would run: gx branch finish ${finishArgs.join(' ')}`);
+ succeeded += 1;
+ continue;
}
- process.exitCode = 0;
- return;
- }
- if (options.dryRun) {
- const result = { ...payload, status: 'dry-run' };
- if (options.json) {
- process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
- } else {
- console.log(`[${TOOL_NAME}] Dry run: would sync '${branch}' onto ${baseRef} via ${strategy}.`);
+ const finishResult = runPackageAsset('branchFinish', finishArgs, { cwd: repoRoot, stdio: 'pipe' });
+ if (finishResult.stdout) {
+ process.stdout.write(finishResult.stdout);
+ }
+ if (finishResult.stderr) {
+ process.stderr.write(finishResult.stderr);
+ }
+ if (finishResult.status !== 0) {
+ throw new Error(`agent-branch-finish exited with status ${finishResult.status}`);
}
- process.exitCode = 0;
- return;
- }
- const lockPath = path.join(repoRoot, LOCK_FILE_RELATIVE);
- const lockState = lockRegistryStatus(repoRoot);
- let lockBackup = null;
- if (lockState.dirty && fs.existsSync(lockPath)) {
- lockBackup = fs.readFileSync(lockPath, 'utf8');
+ succeeded += 1;
+ } catch (error) {
+ failed += 1;
+ console.error(`[${TOOL_NAME}] Finish failed for '${branch}': ${error.message}`);
+ if (options.failFast) {
+ break;
+ }
}
+ }
+
+ console.log(
+ `[${TOOL_NAME}] Finish summary: total=${candidates.length}, success=${succeeded}, failed=${failed}, autoCommitted=${autoCommitted}`,
+ );
+
+ if (failed > 0) {
+ throw new Error('finish command failed for one or more agent branches');
+ }
+
+ process.exitCode = 0;
+}
+
+function sync(rawArgs) {
+ const options = parseSyncArgs(rawArgs);
+ const repoRoot = resolveRepoRoot(options.target);
+ const baseBranch = resolveBaseBranch(repoRoot, options.base);
+ const strategy = resolveSyncStrategy(repoRoot, options.strategy);
+ const baseRef = `origin/${baseBranch}`;
+
+ ensureOriginBaseRef(repoRoot, baseBranch);
+
+ if (options.allAgentBranches) {
+ const refs = gitRun(repoRoot, ['for-each-ref', '--format=%(refname:short)', 'refs/heads/agent/*'], { allowFailure: true });
+ if (refs.status !== 0) {
+ throw new Error('Unable to list local agent branches');
+ }
+ const branches = (refs.stdout || '').split('\n').map((item) => item.trim()).filter(Boolean);
+ const rows = branches.map((branch) => {
+ const counts = aheadBehind(repoRoot, branch, baseRef);
+ return {
+ branch,
+ base: baseRef,
+ ahead: counts.ahead,
+ behind: counts.behind,
+ syncRequired: counts.behind > 0,
+ };
+ });
- if (lockState.dirty) {
- if (lockState.untracked) {
- fs.rmSync(lockPath, { force: true });
+ if (options.json) {
+ process.stdout.write(`${JSON.stringify({
+ repoRoot,
+ base: baseRef,
+ branchCount: rows.length,
+ rows,
+ }, null, 2)}\n`);
+ } else {
+ console.log(`[${TOOL_NAME}] Sync report target: ${repoRoot}`);
+ console.log(`[${TOOL_NAME}] Base: ${baseRef}`);
+ if (rows.length === 0) {
+ console.log(`[${TOOL_NAME}] No local agent branches found.`);
} else {
- const resetLock = gitRun(repoRoot, ['checkout', '--', LOCK_FILE_RELATIVE], { allowFailure: true });
- if (resetLock.status !== 0) {
- throw new Error(`Unable to temporarily reset ${LOCK_FILE_RELATIVE} before sync`);
+ for (const row of rows) {
+ console.log(` - ${row.branch} | ahead ${row.ahead} | behind ${row.behind} | syncRequired=${row.syncRequired}`);
}
}
}
- try {
- syncOperation(repoRoot, strategy, baseRef, options.ffOnly);
- } finally {
- if (lockBackup !== null) {
- fs.mkdirSync(path.dirname(lockPath), { recursive: true });
- fs.writeFileSync(lockPath, lockBackup, 'utf8');
- }
- }
- const after = aheadBehind(repoRoot, branch, baseRef);
- const result = {
- ...payload,
- status: 'success',
- aheadAfter: after.ahead,
- behindAfter: after.behind,
- };
+ const hasBehind = rows.some((row) => row.behind > 0);
+ process.exitCode = options.check && hasBehind ? 1 : 0;
+ return;
+ }
+ const branch = currentBranchName(repoRoot);
+ if (!options.allowNonAgent && !branch.startsWith('agent/')) {
+ throw new Error(`sync is limited to agent/* branches by default (current: ${branch}). Use --allow-non-agent to override.`);
+ }
+
+ const dirty = workingTreeIsDirty(repoRoot);
+ if (!options.check && !options.allowDirty && dirty) {
+ throw new Error('Sync blocked: working tree is not clean. Commit or stash changes first, or pass --allow-dirty.');
+ }
+
+ const before = aheadBehind(repoRoot, branch, baseRef);
+
+ const payload = {
+ repoRoot,
+ branch,
+ base: baseRef,
+ strategy,
+ dirty,
+ aheadBefore: before.ahead,
+ behindBefore: before.behind,
+ syncRequired: before.behind > 0,
+ status: 'checked',
+ };
+
+ if (options.check) {
if (options.json) {
- process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
+ process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`);
} else {
- console.log(`[${TOOL_NAME}] Sync target: ${repoRoot}`);
+ console.log(`[${TOOL_NAME}] Sync check target: ${repoRoot}`);
console.log(`[${TOOL_NAME}] Branch: ${branch}`);
console.log(`[${TOOL_NAME}] Base: ${baseRef}`);
- console.log(`[${TOOL_NAME}] Strategy: ${strategy}`);
- console.log(`[${TOOL_NAME}] Behind before sync: ${before.behind}`);
- console.log(`[${TOOL_NAME}] Result: success (behind now: ${after.behind})`);
+ console.log(`[${TOOL_NAME}] Ahead: ${before.ahead}`);
+ console.log(`[${TOOL_NAME}] Behind: ${before.behind}`);
+ console.log(`[${TOOL_NAME}] Sync required: ${before.behind > 0 ? 'yes' : 'no'}`);
+ }
+ process.exitCode = before.behind > 0 ? 1 : 0;
+ return;
+ }
+
+ if (before.behind === 0) {
+ const result = { ...payload, status: 'no-op', aheadAfter: before.ahead, behindAfter: before.behind };
+ if (options.json) {
+ process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
+ } else {
+ console.log(`[${TOOL_NAME}] Branch '${branch}' is already up to date with ${baseRef}.`);
}
+ process.exitCode = 0;
+ return;
+ }
+ if (options.dryRun) {
+ const result = { ...payload, status: 'dry-run' };
+ if (options.json) {
+ process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
+ } else {
+ console.log(`[${TOOL_NAME}] Dry run: would sync '${branch}' onto ${baseRef} via ${strategy}.`);
+ }
process.exitCode = 0;
+ return;
}
- return {
- cleanup,
- merge,
- finish,
- sync,
+ const lockPath = path.join(repoRoot, LOCK_FILE_RELATIVE);
+ const lockState = lockRegistryStatus(repoRoot);
+ let lockBackup = null;
+ if (lockState.dirty && fs.existsSync(lockPath)) {
+ lockBackup = fs.readFileSync(lockPath, 'utf8');
+ }
+
+ if (lockState.dirty) {
+ if (lockState.untracked) {
+ fs.rmSync(lockPath, { force: true });
+ } else {
+ const resetLock = gitRun(repoRoot, ['checkout', '--', LOCK_FILE_RELATIVE], { allowFailure: true });
+ if (resetLock.status !== 0) {
+ throw new Error(`Unable to temporarily reset ${LOCK_FILE_RELATIVE} before sync`);
+ }
+ }
+ }
+
+ try {
+ syncOperation(repoRoot, strategy, baseRef, options.ffOnly);
+ } finally {
+ if (lockBackup !== null) {
+ fs.mkdirSync(path.dirname(lockPath), { recursive: true });
+ fs.writeFileSync(lockPath, lockBackup, 'utf8');
+ }
+ }
+ const after = aheadBehind(repoRoot, branch, baseRef);
+ const result = {
+ ...payload,
+ status: 'success',
+ aheadAfter: after.ahead,
+ behindAfter: after.behind,
};
+
+ if (options.json) {
+ process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
+ } else {
+ console.log(`[${TOOL_NAME}] Sync target: ${repoRoot}`);
+ console.log(`[${TOOL_NAME}] Branch: ${branch}`);
+ console.log(`[${TOOL_NAME}] Base: ${baseRef}`);
+ console.log(`[${TOOL_NAME}] Strategy: ${strategy}`);
+ console.log(`[${TOOL_NAME}] Behind before sync: ${before.behind}`);
+ console.log(`[${TOOL_NAME}] Result: success (behind now: ${after.behind})`);
+ }
+
+ process.exitCode = 0;
}
module.exports = {
- createFinishApi,
+ cleanup,
+ merge,
+ finish,
+ sync,
};
diff --git a/src/git/index.js b/src/git/index.js
index e2bc231..cea003c 100644
--- a/src/git/index.js
+++ b/src/git/index.js
@@ -1,6 +1,7 @@
const fs = require('node:fs');
const {
path,
+ TOOL_NAME,
GIT_PROTECTED_BRANCHES_KEY,
GIT_BASE_BRANCH_KEY,
GIT_SYNC_STRATEGY_KEY,
@@ -277,6 +278,39 @@ function readProtectedBranches(repoRoot) {
return parsed;
}
+function ensureSetupProtectedBranches(repoRoot, dryRun) {
+ const localUserBranches = listLocalUserBranches(repoRoot);
+ if (localUserBranches.length === 0) {
+ return {
+ status: 'unchanged',
+ file: `git config ${GIT_PROTECTED_BRANCHES_KEY}`,
+ note: 'no additional local user branches detected',
+ };
+ }
+
+ const configured = readConfiguredProtectedBranches(repoRoot);
+ const currentBranches = configured || [...DEFAULT_PROTECTED_BRANCHES];
+ const missingBranches = localUserBranches.filter((branchName) => !currentBranches.includes(branchName));
+ if (missingBranches.length === 0) {
+ return {
+ status: 'unchanged',
+ file: `git config ${GIT_PROTECTED_BRANCHES_KEY}`,
+ note: 'local user branches already protected',
+ };
+ }
+
+ const nextBranches = uniquePreserveOrder([...currentBranches, ...missingBranches]);
+ if (!dryRun) {
+ writeProtectedBranches(repoRoot, nextBranches);
+ }
+
+ return {
+ status: dryRun ? 'would-update' : 'updated',
+ file: `git config ${GIT_PROTECTED_BRANCHES_KEY}`,
+ note: `added local user branch(es): ${missingBranches.join(', ')}`,
+ };
+}
+
function writeProtectedBranches(repoRoot, branches) {
if (branches.length === 0) {
gitRun(repoRoot, ['config', '--unset-all', GIT_PROTECTED_BRANCHES_KEY], { allowFailure: true });
@@ -352,6 +386,37 @@ function detectComposeHintFiles(repoRoot) {
return COMPOSE_HINT_FILES.filter((relativePath) => fs.existsSync(path.join(repoRoot, relativePath)));
}
+function printSetupRepoHints(repoRoot, baseBranch, repoLabel = '') {
+ const branchDisplay = readBranchDisplayName(repoRoot);
+ const hasHeadCommit = repoHasHeadCommit(repoRoot);
+ const hasOrigin = hasOriginRemote(repoRoot);
+ const composeFiles = detectComposeHintFiles(repoRoot);
+ if (hasHeadCommit && hasOrigin && composeFiles.length === 0) {
+ return;
+ }
+
+ const label = repoLabel ? ` ${repoLabel}` : '';
+ if (!hasHeadCommit) {
+ console.log(`[${TOOL_NAME}] Fresh repo onboarding${label}: current branch is ${branchDisplay}.`);
+ console.log(`[${TOOL_NAME}] Bootstrap commit${label}: git add . && git commit -m "bootstrap gitguardex"`);
+ console.log(
+ `[${TOOL_NAME}] First agent flow${label}: ` +
+ `gx branch start "" "codex" -> ` +
+ `gx locks claim --branch "$(git branch --show-current)" -> ` +
+ `gx branch finish --branch "$(git branch --show-current)" --base ${baseBranch} --via-pr --wait-for-merge`,
+ );
+ }
+ if (!hasOrigin) {
+ console.log(`[${TOOL_NAME}] No origin remote${label}: finish and auto-merge flows stay local until you add one.`);
+ }
+ if (composeFiles.length > 0) {
+ console.log(
+ `[${TOOL_NAME}] Docker Compose helper${label}: detected ${composeFiles.join(', ')}. ` +
+ `Set GUARDEX_DOCKER_SERVICE and run 'bash scripts/guardex-docker-loader.sh -- '.`,
+ );
+ }
+}
+
function workingTreeIsDirty(repoRoot) {
const result = gitRun(repoRoot, ['status', '--porcelain'], { allowFailure: true });
if (result.status !== 0) {
@@ -631,6 +696,7 @@ module.exports = {
gitRefExists,
hasSignificantWorkingTreeChanges,
readProtectedBranches,
+ ensureSetupProtectedBranches,
writeProtectedBranches,
readGitConfig,
resolveBaseBranch,
@@ -641,6 +707,7 @@ module.exports = {
hasOriginRemote,
repoHasOriginRemote: hasOriginRemote,
detectComposeHintFiles,
+ printSetupRepoHints,
workingTreeIsDirty,
ensureRepoBranch,
ensureOriginBaseRef,
diff --git a/src/sandbox/index.js b/src/sandbox/index.js
index 66a7726..3fe15fd 100644
--- a/src/sandbox/index.js
+++ b/src/sandbox/index.js
@@ -1,68 +1,317 @@
-function createSandboxApi(deps) {
- const {
- protectedBaseWriteBlock,
- runInstallInternal,
- ensureSetupProtectedBranches,
- ensureParentWorkspaceView,
- buildParentWorkspaceView,
- runFixInternal,
- } = deps;
-
- function assertProtectedMainWriteAllowed(options, commandName) {
- const blocked = protectedBaseWriteBlock(options);
- if (!blocked) {
- return;
+const {
+ fs,
+ path,
+ SHORT_TOOL_NAME,
+ LOCK_FILE_RELATIVE,
+ defaultAgentWorktreeRelativeDir,
+} = require('../context');
+const { run, runPackageAsset } = require('../core/runtime');
+const {
+ resolveRepoRoot,
+ currentBranchName,
+ readProtectedBranches,
+ gitRefExists,
+ ensureRepoBranch,
+} = require('../git');
+
+function hasGuardexBootstrapFiles(repoRoot) {
+ const required = [
+ 'AGENTS.md',
+ '.githooks/pre-commit',
+ '.githooks/pre-push',
+ LOCK_FILE_RELATIVE,
+ ];
+ return required.every((relativePath) => require('../context').fs.existsSync(path.join(repoRoot, relativePath)));
+}
+
+function protectedBaseWriteBlock(options, { requireBootstrap = true } = {}) {
+ if (options.dryRun || options.allowProtectedBaseWrite) {
+ return null;
+ }
+
+ const repoRoot = resolveRepoRoot(options.target);
+ if (requireBootstrap && !hasGuardexBootstrapFiles(repoRoot)) {
+ return null;
+ }
+
+ const branch = currentBranchName(repoRoot);
+ if (branch !== 'main') {
+ return null;
+ }
+
+ const protectedBranches = readProtectedBranches(repoRoot);
+ if (!protectedBranches.includes(branch)) {
+ return null;
+ }
+
+ return {
+ repoRoot,
+ branch,
+ };
+}
+
+function assertProtectedMainWriteAllowed(options, commandName) {
+ const blocked = protectedBaseWriteBlock(options);
+ if (!blocked) {
+ return;
+ }
+
+ throw new Error(
+ `${commandName} blocked on protected branch '${blocked.branch}' in an initialized repo.\n` +
+ `Keep local '${blocked.branch}' pull-only: start an agent branch/worktree first:\n` +
+ ` gx branch start "" "codex"\n` +
+ `Override once only when intentional: --allow-protected-base-write`,
+ );
+}
+
+function extractAgentBranchStartMetadata(output) {
+ const branchMatch = String(output || '').match(/^\[agent-branch-start\] Created branch: (.+)$/m);
+ const worktreeMatch = String(output || '').match(/^\[agent-branch-start\] Worktree: (.+)$/m);
+ return {
+ branch: branchMatch ? branchMatch[1].trim() : '',
+ worktreePath: worktreeMatch ? worktreeMatch[1].trim() : '',
+ };
+}
+
+function resolveSandboxTarget(repoRoot, worktreePath, targetPath) {
+ const resolvedTarget = path.resolve(targetPath);
+ const relativeTarget = path.relative(repoRoot, resolvedTarget);
+ if (relativeTarget.startsWith('..') || path.isAbsolute(relativeTarget)) {
+ throw new Error(`sandbox target must stay inside repo root: ${resolvedTarget}`);
+ }
+ if (!relativeTarget || relativeTarget === '.') {
+ return worktreePath;
+ }
+ return path.join(worktreePath, relativeTarget);
+}
+
+function appendManagedForceArgs(args, options) {
+ if (!options.force) {
+ return;
+ }
+ args.push('--force');
+ for (const managedPath of options.forceManagedPaths || []) {
+ args.push(managedPath);
+ }
+}
+
+function buildSandboxSetupArgs(options, sandboxTarget) {
+ const args = ['setup', '--target', sandboxTarget, '--no-global-install', '--no-recursive'];
+ appendManagedForceArgs(args, options);
+ if (options.skipAgents) args.push('--skip-agents');
+ if (options.skipPackageJson) args.push('--skip-package-json');
+ if (options.skipGitignore) args.push('--no-gitignore');
+ if (options.dryRun) args.push('--dry-run');
+ return args;
+}
+
+function isSpawnFailure(result) {
+ return Boolean(result?.error) && typeof result?.status !== 'number';
+}
+
+function protectedBaseSandboxBranchPrefix() {
+ const now = new Date();
+ const stamp = [
+ now.getUTCFullYear(),
+ String(now.getUTCMonth() + 1).padStart(2, '0'),
+ String(now.getUTCDate()).padStart(2, '0'),
+ ].join('') + '-' + [
+ String(now.getUTCHours()).padStart(2, '0'),
+ String(now.getUTCMinutes()).padStart(2, '0'),
+ String(now.getUTCSeconds()).padStart(2, '0'),
+ ].join('');
+ return `agent/gx/${stamp}`;
+}
+
+function protectedBaseSandboxWorktreePath(repoRoot, branchName) {
+ return path.join(repoRoot, defaultAgentWorktreeRelativeDir(), branchName.replace(/\//g, '__'));
+}
+
+function resolveProtectedBaseSandboxStartRef(repoRoot, baseBranch) {
+ run('git', ['-C', repoRoot, 'fetch', 'origin', baseBranch, '--quiet'], { timeout: 20_000 });
+ if (gitRefExists(repoRoot, `refs/remotes/origin/${baseBranch}`)) {
+ return `origin/${baseBranch}`;
+ }
+ if (gitRefExists(repoRoot, `refs/heads/${baseBranch}`)) {
+ return baseBranch;
+ }
+ if (currentBranchName(repoRoot) === baseBranch) {
+ return null;
+ }
+ throw new Error(`Unable to find base ref for sandbox bootstrap: ${baseBranch}`);
+}
+
+function startProtectedBaseSandboxFallback(blocked, sandboxSuffix) {
+ const branchPrefix = protectedBaseSandboxBranchPrefix();
+ let selectedBranch = '';
+ let selectedWorktreePath = '';
+
+ for (let attempt = 0; attempt < 30; attempt += 1) {
+ const suffix = attempt === 0 ? sandboxSuffix : `${attempt + 1}-${sandboxSuffix}`;
+ const candidateBranch = `${branchPrefix}-${suffix}`;
+ const candidateWorktreePath = protectedBaseSandboxWorktreePath(blocked.repoRoot, candidateBranch);
+ if (gitRefExists(blocked.repoRoot, `refs/heads/${candidateBranch}`)) {
+ continue;
}
+ if (fs.existsSync(candidateWorktreePath)) {
+ continue;
+ }
+ selectedBranch = candidateBranch;
+ selectedWorktreePath = candidateWorktreePath;
+ break;
+ }
- throw new Error(
- `${commandName} blocked on protected branch '${blocked.branch}' in an initialized repo.\n` +
- `Keep local '${blocked.branch}' pull-only: start an agent branch/worktree first:\n` +
- ` gx branch start "" "codex"\n` +
- `Override once only when intentional: --allow-protected-base-write`,
- );
+ if (!selectedBranch || !selectedWorktreePath) {
+ throw new Error('Unable to allocate unique sandbox branch/worktree');
}
- function runSetupBootstrapInternal(options) {
- const installPayload = runInstallInternal(options);
- installPayload.operations.push(
- ensureSetupProtectedBranches(installPayload.repoRoot, Boolean(options.dryRun)),
- );
+ fs.mkdirSync(path.dirname(selectedWorktreePath), { recursive: true });
+ const startRef = resolveProtectedBaseSandboxStartRef(blocked.repoRoot, blocked.branch);
+ const addArgs = startRef
+ ? ['-C', blocked.repoRoot, 'worktree', 'add', '-b', selectedBranch, selectedWorktreePath, startRef]
+ : ['-C', blocked.repoRoot, 'worktree', 'add', '--orphan', selectedWorktreePath];
+ const addResult = run('git', addArgs);
+ if (isSpawnFailure(addResult)) {
+ throw addResult.error;
+ }
+ if (addResult.status !== 0) {
+ throw new Error((addResult.stderr || addResult.stdout || 'failed to create sandbox').trim());
+ }
- let parentWorkspace = null;
- if (options.parentWorkspaceView) {
- installPayload.operations.push(
- ensureParentWorkspaceView(installPayload.repoRoot, Boolean(options.dryRun)),
+ if (!startRef) {
+ const renameResult = run(
+ 'git',
+ ['-C', selectedWorktreePath, 'branch', '-m', selectedBranch],
+ { timeout: 20_000 },
+ );
+ if (isSpawnFailure(renameResult)) {
+ throw renameResult.error;
+ }
+ if (renameResult.status !== 0) {
+ throw new Error(
+ (renameResult.stderr || renameResult.stdout || 'failed to name orphan sandbox branch').trim(),
);
- if (!options.dryRun) {
- parentWorkspace = buildParentWorkspaceView(installPayload.repoRoot);
- }
}
+ }
+
+ return {
+ metadata: {
+ branch: selectedBranch,
+ worktreePath: selectedWorktreePath,
+ },
+ stdout:
+ `[agent-branch-start] Created branch: ${selectedBranch}\n` +
+ `[agent-branch-start] Worktree: ${selectedWorktreePath}\n`,
+ stderr: addResult.stderr || '',
+ };
+}
+
+function startProtectedBaseSandbox(blocked, { taskName, sandboxSuffix }) {
+ if (sandboxSuffix === 'gx-doctor') {
+ return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
+ }
+
+ const startResult = runPackageAsset('branchStart', [
+ '--task',
+ taskName,
+ '--agent',
+ SHORT_TOOL_NAME,
+ '--base',
+ blocked.branch,
+ ], { cwd: blocked.repoRoot });
+ if (isSpawnFailure(startResult)) {
+ throw startResult.error;
+ }
+ if (startResult.status !== 0) {
+ return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
+ }
- const fixPayload = runFixInternal({
- target: installPayload.repoRoot,
- dryRun: options.dryRun,
- force: options.force,
- forceManagedPaths: options.forceManagedPaths,
- dropStaleLocks: true,
- skipAgents: options.skipAgents,
- skipPackageJson: options.skipPackageJson,
- skipGitignore: options.skipGitignore,
- allowProtectedBaseWrite: options.allowProtectedBaseWrite,
- });
-
- return {
- installPayload,
- fixPayload,
- parentWorkspace,
- };
+ const metadata = extractAgentBranchStartMetadata(startResult.stdout);
+ const currentBranch = currentBranchName(blocked.repoRoot);
+ const worktreePath = metadata.worktreePath ? path.resolve(metadata.worktreePath) : '';
+ const repoRootPath = path.resolve(blocked.repoRoot);
+ const hasSafeWorktree = Boolean(worktreePath) && worktreePath !== repoRootPath;
+ const branchChanged = Boolean(currentBranch) && currentBranch !== blocked.branch;
+
+ if (!hasSafeWorktree || branchChanged) {
+ const restoreResult = ensureRepoBranch(blocked.repoRoot, blocked.branch);
+ if (!restoreResult.ok) {
+ const detail = [restoreResult.stderr, restoreResult.stdout].filter(Boolean).join('\n').trim();
+ throw new Error(
+ `sandbox startup switched protected base checkout and could not restore '${blocked.branch}'.` +
+ (detail ? `\n${detail}` : ''),
+ );
+ }
+ return startProtectedBaseSandboxFallback(blocked, sandboxSuffix);
}
return {
- assertProtectedMainWriteAllowed,
- runSetupBootstrapInternal,
+ metadata,
+ stdout: startResult.stdout || '',
+ stderr: startResult.stderr || '',
+ };
+}
+
+function cleanupProtectedBaseSandbox(repoRoot, metadata) {
+ const result = {
+ worktree: 'skipped',
+ branch: 'skipped',
+ note: 'missing sandbox metadata',
};
+
+ if (!metadata?.worktreePath || !metadata?.branch) {
+ return result;
+ }
+
+ if (fs.existsSync(metadata.worktreePath)) {
+ const removeResult = run(
+ 'git',
+ ['-C', repoRoot, 'worktree', 'remove', '--force', metadata.worktreePath],
+ { timeout: 30_000 },
+ );
+ if (isSpawnFailure(removeResult)) {
+ throw removeResult.error;
+ }
+ if (removeResult.status !== 0) {
+ throw new Error(
+ (removeResult.stderr || removeResult.stdout || 'failed to remove sandbox worktree').trim(),
+ );
+ }
+ result.worktree = 'removed';
+ } else {
+ result.worktree = 'missing';
+ }
+
+ if (gitRefExists(repoRoot, `refs/heads/${metadata.branch}`)) {
+ const branchDeleteResult = run(
+ 'git',
+ ['-C', repoRoot, 'branch', '-D', metadata.branch],
+ { timeout: 20_000 },
+ );
+ if (isSpawnFailure(branchDeleteResult)) {
+ throw branchDeleteResult.error;
+ }
+ if (branchDeleteResult.status !== 0) {
+ throw new Error(
+ (branchDeleteResult.stderr || branchDeleteResult.stdout || 'failed to delete sandbox branch').trim(),
+ );
+ }
+ result.branch = 'deleted';
+ } else {
+ result.branch = 'missing';
+ }
+
+ result.note = 'sandbox worktree pruned';
+ return result;
}
module.exports = {
- createSandboxApi,
+ protectedBaseWriteBlock,
+ assertProtectedMainWriteAllowed,
+ extractAgentBranchStartMetadata,
+ resolveSandboxTarget,
+ buildSandboxSetupArgs,
+ isSpawnFailure,
+ startProtectedBaseSandbox,
+ cleanupProtectedBaseSandbox,
};
diff --git a/src/scaffold/index.js b/src/scaffold/index.js
index e1f3424..86ec941 100644
--- a/src/scaffold/index.js
+++ b/src/scaffold/index.js
@@ -3,10 +3,28 @@ const {
path,
TOOL_NAME,
SHORT_TOOL_NAME,
+ GUARDEX_HOME_DIR,
+ AGENT_WORKTREE_RELATIVE_DIRS,
+ TEMPLATE_ROOT,
+ HOOK_NAMES,
+ LOCK_FILE_RELATIVE,
+ LEGACY_MANAGED_PACKAGE_SCRIPTS,
+ USER_LEVEL_SKILL_ASSETS,
+ AGENTS_MARKER_START,
+ AGENTS_MARKER_END,
+ GITIGNORE_MARKER_START,
+ GITIGNORE_MARKER_END,
+ SHARED_VSCODE_SETTINGS_RELATIVE,
+ REPO_SCAN_IGNORED_FOLDERS_SETTING,
+ MANAGED_REPO_SCAN_IGNORED_FOLDERS,
+ REPO_SCAFFOLD_DIRECTORIES,
+ OMX_SCAFFOLD_DIRECTORIES,
+ OMX_SCAFFOLD_FILES,
toDestinationPath,
EXECUTABLE_RELATIVE_PATHS,
CRITICAL_GUARDRAIL_PATHS,
} = require('../context');
+const { run } = require('../core/runtime');
function ensureParentDir(repoRoot, filePath, dryRun) {
if (dryRun) return;
@@ -108,6 +126,644 @@ function managedForceConflictMessage(relativePath) {
);
}
+function renderManagedFile(repoRoot, relativePath, content, options = {}) {
+ const destinationPath = path.join(repoRoot, relativePath);
+ const destinationExists = fs.existsSync(destinationPath);
+ const force = Boolean(options.force);
+ const dryRun = Boolean(options.dryRun);
+
+ if (destinationExists) {
+ const existingContent = fs.readFileSync(destinationPath, 'utf8');
+ if (existingContent === content) {
+ ensureExecutable(destinationPath, relativePath, dryRun);
+ return { status: 'unchanged', file: relativePath };
+ }
+ if (!force && !isCriticalGuardrailPath(relativePath)) {
+ throw new Error(managedForceConflictMessage(relativePath));
+ }
+ }
+
+ ensureParentDir(repoRoot, destinationPath, dryRun);
+ if (!dryRun) {
+ fs.writeFileSync(destinationPath, content, 'utf8');
+ ensureExecutable(destinationPath, relativePath, dryRun);
+ }
+
+ if (destinationExists && !force && isCriticalGuardrailPath(relativePath)) {
+ return { status: dryRun ? 'would-repair-critical' : 'repaired-critical', file: relativePath };
+ }
+
+ return { status: destinationExists ? 'overwritten' : 'created', file: relativePath };
+}
+
+function ensureGeneratedScriptShim(repoRoot, spec, options = {}) {
+ const content = spec.kind === 'python'
+ ? renderPythonDispatchShim(spec.command)
+ : renderShellDispatchShim(spec.command);
+ return renderManagedFile(repoRoot, spec.relativePath, content, options);
+}
+
+function ensureHookShim(repoRoot, hookName, options = {}) {
+ return renderManagedFile(
+ repoRoot,
+ path.posix.join('.githooks', hookName),
+ renderShellDispatchShim(['hook', 'run', hookName]),
+ options,
+ );
+}
+
+function copyTemplateFile(repoRoot, relativeTemplatePath, force, dryRun) {
+ const sourcePath = path.join(TEMPLATE_ROOT, relativeTemplatePath);
+ const destinationRelativePath = toDestinationPath(relativeTemplatePath);
+ const destinationPath = path.join(repoRoot, destinationRelativePath);
+
+ const sourceContent = fs.readFileSync(sourcePath, 'utf8');
+ const destinationExists = fs.existsSync(destinationPath);
+
+ if (destinationExists) {
+ const existingContent = fs.readFileSync(destinationPath, 'utf8');
+ if (existingContent === sourceContent) {
+ ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+ return { status: 'unchanged', file: destinationRelativePath };
+ }
+ if (!force && !isCriticalGuardrailPath(destinationRelativePath)) {
+ throw new Error(managedForceConflictMessage(destinationRelativePath));
+ }
+ }
+
+ ensureParentDir(repoRoot, destinationPath, dryRun);
+ if (!dryRun) {
+ fs.writeFileSync(destinationPath, sourceContent, 'utf8');
+ ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+ }
+
+ if (destinationExists && !force && isCriticalGuardrailPath(destinationRelativePath)) {
+ return { status: dryRun ? 'would-repair-critical' : 'repaired-critical', file: destinationRelativePath };
+ }
+
+ return { status: destinationExists ? 'overwritten' : 'created', file: destinationRelativePath };
+}
+
+function ensureTemplateFilePresent(repoRoot, relativeTemplatePath, dryRun) {
+ const sourcePath = path.join(TEMPLATE_ROOT, relativeTemplatePath);
+ const destinationRelativePath = toDestinationPath(relativeTemplatePath);
+ const destinationPath = path.join(repoRoot, destinationRelativePath);
+ const sourceContent = fs.readFileSync(sourcePath, 'utf8');
+
+ if (fs.existsSync(destinationPath)) {
+ const existingContent = fs.readFileSync(destinationPath, 'utf8');
+ if (existingContent === sourceContent) {
+ ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+ return { status: 'unchanged', file: destinationRelativePath };
+ }
+
+ if (isCriticalGuardrailPath(destinationRelativePath)) {
+ if (!dryRun) {
+ fs.writeFileSync(destinationPath, sourceContent, 'utf8');
+ ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+ }
+ return { status: dryRun ? 'would-repair-critical' : 'repaired-critical', file: destinationRelativePath };
+ }
+
+ return { status: 'skipped-conflict', file: destinationRelativePath };
+ }
+
+ ensureParentDir(repoRoot, destinationPath, dryRun);
+ if (!dryRun) {
+ fs.writeFileSync(destinationPath, sourceContent, 'utf8');
+ ensureExecutable(destinationPath, destinationRelativePath, dryRun);
+ }
+
+ return { status: 'created', file: destinationRelativePath };
+}
+
+function lockFilePath(repoRoot) {
+ return path.join(repoRoot, LOCK_FILE_RELATIVE);
+}
+
+function ensureOmxScaffold(repoRoot, dryRun) {
+ const operations = [];
+
+ for (const relativeDir of REPO_SCAFFOLD_DIRECTORIES) {
+ const absoluteDir = path.join(repoRoot, relativeDir);
+ if (fs.existsSync(absoluteDir)) {
+ if (!fs.statSync(absoluteDir).isDirectory()) {
+ throw new Error(`Expected directory at ${relativeDir} but found a file.`);
+ }
+ operations.push({ status: 'unchanged', file: relativeDir });
+ continue;
+ }
+
+ if (!dryRun) {
+ fs.mkdirSync(absoluteDir, { recursive: true });
+ }
+ operations.push({ status: 'created', file: relativeDir });
+ }
+
+ for (const relativeDir of OMX_SCAFFOLD_DIRECTORIES) {
+ const absoluteDir = path.join(repoRoot, relativeDir);
+ if (fs.existsSync(absoluteDir)) {
+ if (!fs.statSync(absoluteDir).isDirectory()) {
+ throw new Error(`Expected directory at ${relativeDir} but found a file.`);
+ }
+ operations.push({ status: 'unchanged', file: relativeDir });
+ continue;
+ }
+
+ if (!dryRun) {
+ fs.mkdirSync(absoluteDir, { recursive: true });
+ }
+ operations.push({ status: 'created', file: relativeDir });
+ }
+
+ for (const [relativeFile, defaultContent] of OMX_SCAFFOLD_FILES.entries()) {
+ const absoluteFile = path.join(repoRoot, relativeFile);
+ if (fs.existsSync(absoluteFile)) {
+ if (!fs.statSync(absoluteFile).isFile()) {
+ throw new Error(`Expected file at ${relativeFile} but found a directory.`);
+ }
+ operations.push({ status: 'unchanged', file: relativeFile });
+ continue;
+ }
+
+ if (!dryRun) {
+ fs.mkdirSync(path.dirname(absoluteFile), { recursive: true });
+ fs.writeFileSync(absoluteFile, defaultContent, 'utf8');
+ }
+ operations.push({ status: 'created', file: relativeFile });
+ }
+
+ return operations;
+}
+
+function ensureLockRegistry(repoRoot, dryRun) {
+ const absolutePath = lockFilePath(repoRoot);
+ if (fs.existsSync(absolutePath)) {
+ return { status: 'unchanged', file: LOCK_FILE_RELATIVE };
+ }
+
+ if (!dryRun) {
+ fs.mkdirSync(path.dirname(absolutePath), { recursive: true });
+ fs.writeFileSync(absolutePath, JSON.stringify({ locks: {} }, null, 2) + '\n', 'utf8');
+ }
+
+ return { status: 'created', file: LOCK_FILE_RELATIVE };
+}
+
+function lockStateOrError(repoRoot) {
+ const lockPath = lockFilePath(repoRoot);
+ if (!fs.existsSync(lockPath)) {
+ return { ok: false, error: `${LOCK_FILE_RELATIVE} is missing` };
+ }
+
+ try {
+ const parsed = JSON.parse(fs.readFileSync(lockPath, 'utf8'));
+ if (!parsed || typeof parsed !== 'object' || typeof parsed.locks !== 'object' || parsed.locks === null) {
+ return { ok: false, error: `${LOCK_FILE_RELATIVE} has invalid schema (expected { locks: {} })` };
+ }
+
+ for (const [filePath, entry] of Object.entries(parsed.locks)) {
+ if (!entry || typeof entry !== 'object') {
+ parsed.locks[filePath] = { branch: '', claimed_at: '', allow_delete: false };
+ continue;
+ }
+ if (!Object.prototype.hasOwnProperty.call(entry, 'allow_delete')) {
+ entry.allow_delete = false;
+ }
+ }
+
+ return { ok: true, raw: parsed, locks: parsed.locks };
+ } catch (error) {
+ return { ok: false, error: `${LOCK_FILE_RELATIVE} is invalid JSON: ${error.message}` };
+ }
+}
+
+function writeLockState(repoRoot, payload, dryRun) {
+ if (dryRun) return;
+ const lockPath = lockFilePath(repoRoot);
+ fs.mkdirSync(path.dirname(lockPath), { recursive: true });
+ fs.writeFileSync(lockPath, JSON.stringify(payload, null, 2) + '\n', 'utf8');
+}
+
+function removeLegacyPackageScripts(repoRoot, dryRun) {
+ const packagePath = path.join(repoRoot, 'package.json');
+ if (!fs.existsSync(packagePath)) {
+ return { status: 'skipped', file: 'package.json', note: 'package.json not found' };
+ }
+
+ let pkg;
+ try {
+ pkg = JSON.parse(fs.readFileSync(packagePath, 'utf8'));
+ } catch (error) {
+ throw new Error(`Unable to parse package.json in target repo: ${error.message}`);
+ }
+
+ const existingScripts = pkg.scripts && typeof pkg.scripts === 'object'
+ ? pkg.scripts
+ : {};
+ pkg.scripts = existingScripts;
+ let changed = false;
+ for (const [key, value] of Object.entries(LEGACY_MANAGED_PACKAGE_SCRIPTS)) {
+ if (existingScripts[key] === value) {
+ delete existingScripts[key];
+ changed = true;
+ }
+ }
+
+ if (!changed) {
+ return { status: 'unchanged', file: 'package.json', note: 'no Guardex-managed agent:* scripts found' };
+ }
+
+ if (!dryRun) {
+ fs.writeFileSync(packagePath, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
+ }
+
+ return { status: dryRun ? 'would-update' : 'updated', file: 'package.json', note: 'removed Guardex-managed agent:* scripts' };
+}
+
+function installUserLevelAsset(asset, options = {}) {
+ const dryRun = Boolean(options.dryRun);
+ const force = Boolean(options.force);
+ const destinationPath = path.join(GUARDEX_HOME_DIR, asset.destination);
+ const sourceContent = fs.readFileSync(asset.source, 'utf8');
+ const destinationExists = fs.existsSync(destinationPath);
+
+ if (destinationExists) {
+ const existingContent = fs.readFileSync(destinationPath, 'utf8');
+ if (existingContent === sourceContent) {
+ return { status: 'unchanged', file: asset.destination };
+ }
+ if (!force) {
+ return { status: 'skipped-conflict', file: asset.destination };
+ }
+ }
+
+ if (!dryRun) {
+ fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
+ fs.writeFileSync(destinationPath, sourceContent, 'utf8');
+ }
+ return { status: destinationExists ? (dryRun ? 'would-update' : 'updated') : 'created', file: asset.destination };
+}
+
+function removeLegacyManagedRepoFile(repoRoot, relativePath, options = {}) {
+ const dryRun = Boolean(options.dryRun);
+ const force = Boolean(options.force);
+ const absolutePath = path.join(repoRoot, relativePath);
+ if (!fs.existsSync(absolutePath)) {
+ return { status: 'unchanged', file: relativePath, note: 'not present' };
+ }
+ if (!fs.statSync(absolutePath).isFile()) {
+ return { status: 'skipped-conflict', file: relativePath, note: 'not a regular file' };
+ }
+
+ const skillAsset = USER_LEVEL_SKILL_ASSETS.find((asset) => asset.destination === relativePath);
+ if (skillAsset) {
+ const userLevelPath = path.join(GUARDEX_HOME_DIR, skillAsset.destination);
+ if (!fs.existsSync(userLevelPath)) {
+ return { status: 'skipped', file: relativePath, note: 'user-level replacement not installed' };
+ }
+ }
+
+ const templateRelative = skillAsset
+ ? skillAsset.source.slice(TEMPLATE_ROOT.length + 1)
+ : relativePath.replace(/^\./, '');
+ const sourcePath = path.join(TEMPLATE_ROOT, templateRelative);
+ if (!fs.existsSync(sourcePath)) {
+ return { status: 'skipped', file: relativePath, note: 'template source missing' };
+ }
+
+ const sourceContent = fs.readFileSync(sourcePath, 'utf8');
+ const existingContent = fs.readFileSync(absolutePath, 'utf8');
+ if (existingContent !== sourceContent && !force) {
+ return { status: 'skipped-conflict', file: relativePath, note: 'local edits differ from managed template' };
+ }
+
+ if (!dryRun) {
+ fs.rmSync(absolutePath, { force: true });
+ }
+ return { status: dryRun ? 'would-remove' : 'removed', file: relativePath };
+}
+
+function ensureAgentsSnippet(repoRoot, dryRun) {
+ const agentsPath = path.join(repoRoot, 'AGENTS.md');
+ const snippet = fs.readFileSync(path.join(TEMPLATE_ROOT, 'AGENTS.multiagent-safety.md'), 'utf8').trimEnd();
+ const managedRegex = new RegExp(
+ `${AGENTS_MARKER_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}[\\s\\S]*?${AGENTS_MARKER_END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`,
+ 'm',
+ );
+
+ if (!fs.existsSync(agentsPath)) {
+ if (!dryRun) {
+ fs.writeFileSync(agentsPath, `# AGENTS\n\n${snippet}\n`, 'utf8');
+ }
+ return { status: 'created', file: 'AGENTS.md' };
+ }
+
+ const existing = fs.readFileSync(agentsPath, 'utf8');
+ if (managedRegex.test(existing)) {
+ const next = existing.replace(managedRegex, snippet);
+ if (next === existing) {
+ return { status: 'unchanged', file: 'AGENTS.md' };
+ }
+ if (!dryRun) {
+ fs.writeFileSync(agentsPath, next, 'utf8');
+ }
+ return { status: 'updated', file: 'AGENTS.md', note: 'refreshed gitguardex-managed block' };
+ }
+
+ if (existing.includes(AGENTS_MARKER_START)) {
+ return { status: 'unchanged', file: 'AGENTS.md', note: 'existing marker found without managed end marker' };
+ }
+
+ const separator = existing.endsWith('\n') ? '\n' : '\n\n';
+ if (!dryRun) {
+ fs.writeFileSync(agentsPath, `${existing}${separator}${snippet}\n`, 'utf8');
+ }
+
+ return { status: 'updated', file: 'AGENTS.md' };
+}
+
+function ensureManagedGitignore(repoRoot, dryRun) {
+ const gitignorePath = path.join(repoRoot, '.gitignore');
+ const managedBlock = [
+ GITIGNORE_MARKER_START,
+ ...require('../context').MANAGED_GITIGNORE_PATHS,
+ GITIGNORE_MARKER_END,
+ ].join('\n');
+ const managedRegex = new RegExp(
+ `${GITIGNORE_MARKER_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}[\\s\\S]*?${GITIGNORE_MARKER_END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`,
+ 'm',
+ );
+
+ if (!fs.existsSync(gitignorePath)) {
+ if (!dryRun) {
+ fs.writeFileSync(gitignorePath, `${managedBlock}\n`, 'utf8');
+ }
+ return { status: 'created', file: '.gitignore', note: 'added gitguardex-managed entries' };
+ }
+
+ const existing = fs.readFileSync(gitignorePath, 'utf8');
+ if (managedRegex.test(existing)) {
+ const next = existing.replace(managedRegex, managedBlock);
+ if (next === existing) {
+ return { status: 'unchanged', file: '.gitignore' };
+ }
+ if (!dryRun) {
+ fs.writeFileSync(gitignorePath, next, 'utf8');
+ }
+ return { status: 'updated', file: '.gitignore', note: 'refreshed gitguardex-managed entries' };
+ }
+
+ const separator = existing.endsWith('\n') ? '\n' : '\n\n';
+ if (!dryRun) {
+ fs.writeFileSync(gitignorePath, `${existing}${separator}${managedBlock}\n`, 'utf8');
+ }
+ return { status: 'updated', file: '.gitignore', note: 'appended gitguardex-managed entries' };
+}
+
+function stripJsonComments(source) {
+ let result = '';
+ let inString = false;
+ let escapeNext = false;
+ let inLineComment = false;
+ let inBlockComment = false;
+
+ for (let index = 0; index < source.length; index += 1) {
+ const current = source[index];
+ const next = source[index + 1];
+
+ if (inLineComment) {
+ if (current === '\n' || current === '\r') {
+ inLineComment = false;
+ result += current;
+ }
+ continue;
+ }
+
+ if (inBlockComment) {
+ if (current === '*' && next === '/') {
+ inBlockComment = false;
+ index += 1;
+ continue;
+ }
+ if (current === '\n' || current === '\r') {
+ result += current;
+ }
+ continue;
+ }
+
+ if (inString) {
+ result += current;
+ if (escapeNext) {
+ escapeNext = false;
+ } else if (current === '\\') {
+ escapeNext = true;
+ } else if (current === '"') {
+ inString = false;
+ }
+ continue;
+ }
+
+ if (current === '"') {
+ inString = true;
+ result += current;
+ continue;
+ }
+
+ if (current === '/' && next === '/') {
+ inLineComment = true;
+ index += 1;
+ continue;
+ }
+
+ if (current === '/' && next === '*') {
+ inBlockComment = true;
+ index += 1;
+ continue;
+ }
+
+ result += current;
+ }
+
+ return result;
+}
+
+function stripJsonTrailingCommas(source) {
+ let result = '';
+ let inString = false;
+ let escapeNext = false;
+
+ for (let index = 0; index < source.length; index += 1) {
+ const current = source[index];
+
+ if (inString) {
+ result += current;
+ if (escapeNext) {
+ escapeNext = false;
+ } else if (current === '\\') {
+ escapeNext = true;
+ } else if (current === '"') {
+ inString = false;
+ }
+ continue;
+ }
+
+ if (current === '"') {
+ inString = true;
+ result += current;
+ continue;
+ }
+
+ if (current === ',') {
+ let lookahead = index + 1;
+ while (lookahead < source.length && /\s/.test(source[lookahead])) {
+ lookahead += 1;
+ }
+ if (source[lookahead] === '}' || source[lookahead] === ']') {
+ continue;
+ }
+ }
+
+ result += current;
+ }
+
+ return result;
+}
+
+function parseJsonObjectLikeFile(source, relativePath) {
+ let parsed;
+ try {
+ parsed = JSON.parse(stripJsonTrailingCommas(stripJsonComments(source)));
+ } catch (error) {
+ throw new Error(`Unable to parse ${relativePath} as JSON or JSONC: ${error.message}`);
+ }
+
+ if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
+ throw new Error(`${relativePath} must contain a top-level object.`);
+ }
+
+ return parsed;
+}
+
+function uniqueStringList(values) {
+ const seen = new Set();
+ const result = [];
+
+ for (const value of values) {
+ if (typeof value !== 'string' || seen.has(value)) {
+ continue;
+ }
+ seen.add(value);
+ result.push(value);
+ }
+
+ return result;
+}
+
+function buildRepoVscodeSettings(existingSettings = {}) {
+ const nextSettings = { ...existingSettings };
+ const existingIgnoredFolders = Array.isArray(existingSettings[REPO_SCAN_IGNORED_FOLDERS_SETTING])
+ ? existingSettings[REPO_SCAN_IGNORED_FOLDERS_SETTING]
+ : [];
+
+ nextSettings[REPO_SCAN_IGNORED_FOLDERS_SETTING] = uniqueStringList([
+ ...existingIgnoredFolders,
+ ...MANAGED_REPO_SCAN_IGNORED_FOLDERS,
+ ]);
+
+ return nextSettings;
+}
+
+function ensureRepoVscodeSettings(repoRoot, dryRun) {
+ const settingsPath = path.join(repoRoot, SHARED_VSCODE_SETTINGS_RELATIVE);
+ const destinationExists = fs.existsSync(settingsPath);
+ const existingContent = destinationExists ? fs.readFileSync(settingsPath, 'utf8') : '';
+ const existingSettings = destinationExists
+ ? parseJsonObjectLikeFile(existingContent, SHARED_VSCODE_SETTINGS_RELATIVE)
+ : {};
+ const nextContent = `${JSON.stringify(buildRepoVscodeSettings(existingSettings), null, 2)}\n`;
+
+ if (destinationExists && existingContent === nextContent) {
+ return { status: 'unchanged', file: SHARED_VSCODE_SETTINGS_RELATIVE };
+ }
+
+ ensureParentDir(repoRoot, settingsPath, dryRun);
+ if (!dryRun) {
+ fs.writeFileSync(settingsPath, nextContent, 'utf8');
+ }
+
+ return {
+ status: destinationExists ? 'updated' : 'created',
+ file: SHARED_VSCODE_SETTINGS_RELATIVE,
+ note: 'shared VS Code repo scan ignores for Guardex worktrees',
+ };
+}
+
+function normalizeWorkspacePath(relativePath) {
+ return String(relativePath || '.').replace(/\\/g, '/');
+}
+
+function buildParentWorkspaceView(repoRoot) {
+ const parentDir = path.dirname(repoRoot);
+ const workspaceFileName = `${path.basename(repoRoot)}-branches.code-workspace`;
+ const workspacePath = path.join(parentDir, workspaceFileName);
+ const repoRelativePath = normalizeWorkspacePath(path.relative(parentDir, repoRoot) || '.');
+
+ return {
+ workspacePath,
+ payload: {
+ folders: [
+ { path: repoRelativePath },
+ ...AGENT_WORKTREE_RELATIVE_DIRS.map((relativeDir) => ({
+ path: normalizeWorkspacePath(
+ path.join(repoRelativePath === '.' ? '' : repoRelativePath, relativeDir),
+ ),
+ })),
+ ],
+ settings: {
+ 'scm.alwaysShowRepositories': true,
+ },
+ },
+ };
+}
+
+function ensureParentWorkspaceView(repoRoot, dryRun) {
+ const { workspacePath, payload } = buildParentWorkspaceView(repoRoot);
+ const operationFile = path.relative(repoRoot, workspacePath) || path.basename(workspacePath);
+ const nextContent = `${JSON.stringify(payload, null, 2)}\n`;
+ const note = 'parent VS Code workspace view';
+
+ if (!fs.existsSync(workspacePath)) {
+ if (!dryRun) {
+ fs.writeFileSync(workspacePath, nextContent, 'utf8');
+ }
+ return { status: dryRun ? 'would-create' : 'created', file: operationFile, note };
+ }
+
+ const currentContent = fs.readFileSync(workspacePath, 'utf8');
+ if (currentContent === nextContent) {
+ return { status: 'unchanged', file: operationFile, note };
+ }
+
+ if (!dryRun) {
+ fs.writeFileSync(workspacePath, nextContent, 'utf8');
+ }
+ return { status: dryRun ? 'would-update' : 'updated', file: operationFile, note };
+}
+
+function configureHooks(repoRoot, dryRun) {
+ if (dryRun) {
+ return { status: 'would-set', key: 'core.hooksPath', value: '.githooks' };
+ }
+
+ const result = run('git', ['-C', repoRoot, 'config', 'core.hooksPath', '.githooks']);
+ if (result.status !== 0) {
+ throw new Error(`Failed to set git hooksPath: ${(result.stderr || '').trim()}`);
+ }
+
+ return { status: 'set', key: 'core.hooksPath', value: '.githooks' };
+}
+
function printOperations(title, payload, dryRun = false) {
console.log(`[${TOOL_NAME}] ${title}: ${payload.repoRoot}`);
for (const operation of payload.operations) {
@@ -135,6 +791,8 @@ function printStandaloneOperations(title, rootLabel, operations, dryRun = false)
}
module.exports = {
+ HOOK_NAMES,
+ LOCK_FILE_RELATIVE,
toDestinationPath,
ensureParentDir,
ensureExecutable,
@@ -143,6 +801,28 @@ module.exports = {
renderShellDispatchShim,
renderPythonDispatchShim,
managedForceConflictMessage,
+ renderManagedFile,
+ ensureGeneratedScriptShim,
+ ensureHookShim,
+ copyTemplateFile,
+ ensureTemplateFilePresent,
+ ensureOmxScaffold,
+ ensureLockRegistry,
+ lockStateOrError,
+ writeLockState,
+ removeLegacyPackageScripts,
+ installUserLevelAsset,
+ removeLegacyManagedRepoFile,
+ ensureAgentsSnippet,
+ ensureManagedGitignore,
+ stripJsonComments,
+ stripJsonTrailingCommas,
+ parseJsonObjectLikeFile,
+ buildRepoVscodeSettings,
+ ensureRepoVscodeSettings,
+ buildParentWorkspaceView,
+ ensureParentWorkspaceView,
+ configureHooks,
printOperations,
printStandaloneOperations,
};
diff --git a/src/toolchain/index.js b/src/toolchain/index.js
index ea746ec..0a6f871 100644
--- a/src/toolchain/index.js
+++ b/src/toolchain/index.js
@@ -1,223 +1,667 @@
-function createToolchainApi(deps) {
- const {
- TOOL_NAME,
- NPM_BIN,
- NPX_BIN,
- packageJson,
- OPENSPEC_PACKAGE,
- OPENSPEC_BIN,
- GLOBAL_TOOLCHAIN_PACKAGES,
- parseAutoApproval,
- isInteractiveTerminal,
- promptYesNoStrict,
- run,
- checkForGuardexUpdate,
- printUpdateAvailableBanner,
- readInstalledGuardexVersion,
- restartIntoUpdatedGuardex,
- checkForOpenSpecPackageUpdate,
- printOpenSpecUpdateAvailableBanner,
- resolveGlobalInstallApproval,
- detectGlobalToolchainPackages,
- detectOptionalLocalCompanionTools,
- formatGlobalToolchainServiceName,
- askGlobalInstallForMissing,
- } = deps;
-
- function maybeSelfUpdateBeforeStatus() {
- const check = checkForGuardexUpdate();
- if (!check.checked || !check.updateAvailable) {
- return;
- }
+const {
+ fs,
+ path,
+ cp,
+ packageJson,
+ TOOL_NAME,
+ SHORT_TOOL_NAME,
+ OPENSPEC_PACKAGE,
+ NPX_BIN,
+ GUARDEX_HOME_DIR,
+ GLOBAL_TOOLCHAIN_SERVICES,
+ GLOBAL_TOOLCHAIN_PACKAGES,
+ OPTIONAL_LOCAL_COMPANION_TOOLS,
+ REQUIRED_SYSTEM_TOOLS,
+ NPM_BIN,
+ OPENSPEC_BIN,
+ envFlagIsTruthy,
+} = require('../context');
+const { run } = require('../core/runtime');
+const { colorize } = require('../output');
+
+function isInteractiveTerminal() {
+ return Boolean(process.stdin.isTTY && process.stdout.isTTY);
+}
- printUpdateAvailableBanner(check.current, check.latest);
+const stdinWaitArray = new Int32Array(new SharedArrayBuffer(4));
- const autoApproval = parseAutoApproval('GUARDEX_AUTO_UPDATE_APPROVAL');
- const interactive = isInteractiveTerminal();
+function sleepSyncMs(milliseconds) {
+ Atomics.wait(stdinWaitArray, 0, 0, milliseconds);
+}
- if (!interactive && autoApproval == null) {
- console.log(`[${TOOL_NAME}] Non-interactive shell; skipping auto-update prompt.`);
- return;
+function readSingleLineFromStdin() {
+ let input = '';
+ const buffer = Buffer.alloc(1);
+
+ while (true) {
+ let bytesRead = 0;
+ try {
+ bytesRead = fs.readSync(process.stdin.fd, buffer, 0, 1);
+ } catch (error) {
+ if (error && ['EAGAIN', 'EWOULDBLOCK', 'EINTR'].includes(error.code)) {
+ sleepSyncMs(15);
+ continue;
+ }
+ return input;
}
- const shouldUpdate = interactive
- ? promptYesNoStrict(
- `Update now? (${NPM_BIN} i -g ${packageJson.name}@latest)`,
- )
- : autoApproval;
+ if (bytesRead === 0) {
+ if (process.stdin.isTTY) {
+ sleepSyncMs(15);
+ continue;
+ }
+ return input;
+ }
- if (!shouldUpdate) {
- console.log(`[${TOOL_NAME}] Skipped update.`);
- return;
+ const char = buffer.toString('utf8', 0, bytesRead);
+ if (char === '\n' || char === '\r') {
+ return input;
}
+ input += char;
+ }
+}
- const installResult = run(NPM_BIN, ['i', '-g', `${packageJson.name}@latest`], { stdio: 'inherit' });
- if (installResult.status !== 0) {
- console.log(`[${TOOL_NAME}] ⚠️ Update failed. You can retry manually.`);
- return;
+function parseAutoApproval(name) {
+ const raw = process.env[name];
+ if (raw == null) return null;
+ const normalized = String(raw).trim().toLowerCase();
+ if (['1', 'true', 'yes', 'y', 'on'].includes(normalized)) return true;
+ if (['0', 'false', 'no', 'n', 'off'].includes(normalized)) return false;
+ return null;
+}
+
+function parseVersionString(version) {
+ const match = String(version || '').trim().match(/^v?(\d+)\.(\d+)\.(\d+)/);
+ if (!match) return null;
+ return [
+ Number.parseInt(match[1], 10),
+ Number.parseInt(match[2], 10),
+ Number.parseInt(match[3], 10),
+ ];
+}
+
+function compareParsedVersions(left, right) {
+ if (!left || !right) return 0;
+ for (let index = 0; index < Math.max(left.length, right.length); index += 1) {
+ const leftValue = left[index] || 0;
+ const rightValue = right[index] || 0;
+ if (leftValue > rightValue) return 1;
+ if (leftValue < rightValue) return -1;
+ }
+ return 0;
+}
+
+function isNewerVersion(latest, current) {
+ const latestParts = parseVersionString(latest);
+ const currentParts = parseVersionString(current);
+
+ if (!latestParts || !currentParts) {
+ return String(latest || '').trim() !== String(current || '').trim();
+ }
+
+ return compareParsedVersions(latestParts, currentParts) > 0;
+}
+
+function parseNpmVersionOutput(stdout) {
+ const trimmed = String(stdout || '').trim();
+ if (!trimmed) return '';
+
+ try {
+ const parsed = JSON.parse(trimmed);
+ if (Array.isArray(parsed)) {
+ return String(parsed[parsed.length - 1] || '').trim();
}
+ return String(parsed || '').trim();
+ } catch {
+ const firstLine = trimmed.split('\n').map((line) => line.trim()).find(Boolean);
+ return firstLine || '';
+ }
+}
- const postInstallVersion = readInstalledGuardexVersion();
- if (postInstallVersion != null && postInstallVersion !== check.latest) {
- console.log(
- `[${TOOL_NAME}] Installed version is still ${postInstallVersion} (expected ${check.latest}). ` +
- `Retrying with pinned version ${check.latest}…`,
- );
- const pinnedResult = run(
- NPM_BIN,
- ['i', '-g', `${packageJson.name}@${check.latest}`],
- { stdio: 'inherit' },
- );
- if (pinnedResult.status !== 0) {
- console.log(
- `[${TOOL_NAME}] ⚠️ Pinned retry failed. Run manually: ${NPM_BIN} i -g ${packageJson.name}@${check.latest}`,
- );
- return;
- }
- const pinnedVersion = readInstalledGuardexVersion();
- if (pinnedVersion != null && pinnedVersion !== check.latest) {
- console.log(
- `[${TOOL_NAME}] ⚠️ On-disk version still ${pinnedVersion} after pinned retry. ` +
- `Investigate: ${NPM_BIN} root -g && ${NPM_BIN} cache verify`,
- );
- return;
+function checkForGuardexUpdate() {
+ if (envFlagIsTruthy(process.env.GUARDEX_SKIP_UPDATE_CHECK)) {
+ return { checked: false, reason: 'disabled' };
+ }
+
+ const forceCheck = envFlagIsTruthy(process.env.GUARDEX_FORCE_UPDATE_CHECK);
+ if (!forceCheck && !isInteractiveTerminal()) {
+ return { checked: false, reason: 'non-interactive' };
+ }
+
+ const result = run(NPM_BIN, ['view', packageJson.name, 'version', '--json'], { timeout: 5000 });
+ if (result.status !== 0) {
+ return { checked: false, reason: 'lookup-failed' };
+ }
+
+ const latest = parseNpmVersionOutput(result.stdout);
+ if (!latest) {
+ return { checked: false, reason: 'invalid-latest-version' };
+ }
+
+ return {
+ checked: true,
+ current: packageJson.version,
+ latest,
+ updateAvailable: isNewerVersion(latest, packageJson.version),
+ };
+}
+
+function printUpdateAvailableBanner(current, latest) {
+ const title = colorize('UPDATE AVAILABLE', '1;33');
+ console.log(`[${TOOL_NAME}] ${title}`);
+ console.log(`[${TOOL_NAME}] Current: ${current}`);
+ console.log(`[${TOOL_NAME}] Latest : ${latest}`);
+ console.log(`[${TOOL_NAME}] Command: ${NPM_BIN} i -g ${packageJson.name}@latest`);
+}
+
+function readInstalledGuardexVersion() {
+ const installInfo = readInstalledGuardexInstallInfo();
+ return installInfo ? installInfo.version : null;
+}
+
+function readInstalledGuardexInstallInfo() {
+ try {
+ const rootResult = run(NPM_BIN, ['root', '-g'], { timeout: 5000 });
+ if (rootResult.status !== 0) {
+ return null;
+ }
+ const globalRoot = String(rootResult.stdout || '').trim();
+ if (!globalRoot) {
+ return null;
+ }
+ const installedPkgPath = path.join(globalRoot, packageJson.name, 'package.json');
+ if (!fs.existsSync(installedPkgPath)) {
+ return null;
+ }
+ const parsed = JSON.parse(fs.readFileSync(installedPkgPath, 'utf8'));
+ if (parsed && typeof parsed.version === 'string') {
+ let binRelative = null;
+ if (typeof parsed.bin === 'string') {
+ binRelative = parsed.bin;
+ } else if (parsed.bin && typeof parsed.bin === 'object') {
+ const invokedName = path.basename(process.argv[1] || '');
+ binRelative =
+ parsed.bin[invokedName] ||
+ parsed.bin[SHORT_TOOL_NAME] ||
+ Object.values(parsed.bin).find((value) => typeof value === 'string') ||
+ null;
}
+ const packageRoot = path.dirname(installedPkgPath);
+ const binPath = binRelative ? path.join(packageRoot, binRelative) : null;
+ return {
+ version: parsed.version,
+ packageRoot,
+ binPath,
+ };
}
+ } catch {
+ return null;
+ }
+ return null;
+}
- console.log(`[${TOOL_NAME}] ✅ Updated to latest published version.`);
- restartIntoUpdatedGuardex(check.latest);
+function restartIntoUpdatedGuardex(expectedVersion) {
+ const installInfo = readInstalledGuardexInstallInfo();
+ if (!installInfo || installInfo.version !== expectedVersion || installInfo.version === packageJson.version) {
+ return;
+ }
+ if (!installInfo.binPath || !fs.existsSync(installInfo.binPath)) {
+ console.log(`[${TOOL_NAME}] Restart required to use ${installInfo.version}. Rerun ${SHORT_TOOL_NAME}.`);
+ return;
}
- function maybeOpenSpecUpdateBeforeStatus() {
- const check = checkForOpenSpecPackageUpdate();
- if (!check.checked || !check.updateAvailable) {
- return;
- }
+ console.log(`[${TOOL_NAME}] Restarting into ${installInfo.version}…`);
+ const restartResult = cp.spawnSync(
+ process.execPath,
+ [installInfo.binPath, ...process.argv.slice(2)],
+ {
+ cwd: process.cwd(),
+ env: {
+ ...process.env,
+ GUARDEX_SKIP_UPDATE_CHECK: '1',
+ },
+ stdio: 'inherit',
+ },
+ );
+ if (restartResult.error) {
+ console.log(
+ `[${TOOL_NAME}] Restart into ${installInfo.version} failed. Rerun ${SHORT_TOOL_NAME}.`,
+ );
+ return;
+ }
+ process.exit(restartResult.status == null ? 0 : restartResult.status);
+}
- printOpenSpecUpdateAvailableBanner(check.current, check.latest);
+function checkForOpenSpecPackageUpdate() {
+ if (envFlagIsTruthy(process.env.GUARDEX_SKIP_OPENSPEC_UPDATE_CHECK)) {
+ return { checked: false, reason: 'disabled' };
+ }
- const autoApproval = parseAutoApproval('GUARDEX_AUTO_OPENSPEC_UPDATE_APPROVAL');
- const interactive = isInteractiveTerminal();
+ const forceCheck = envFlagIsTruthy(process.env.GUARDEX_FORCE_OPENSPEC_UPDATE_CHECK);
+ if (!forceCheck && !isInteractiveTerminal()) {
+ return { checked: false, reason: 'non-interactive' };
+ }
- if (!interactive && autoApproval == null) {
- console.log(`[${TOOL_NAME}] Non-interactive shell; skipping OpenSpec update prompt.`);
- return;
+ const detection = detectGlobalToolchainPackages();
+ if (!detection.ok) {
+ return { checked: false, reason: 'package-detect-failed' };
+ }
+
+ const current = String((detection.installedVersions || {})[OPENSPEC_PACKAGE] || '').trim();
+ if (!current) {
+ return { checked: false, reason: 'not-installed' };
+ }
+
+ const latestResult = run(NPM_BIN, ['view', OPENSPEC_PACKAGE, 'version', '--json'], { timeout: 5000 });
+ if (latestResult.status !== 0) {
+ return { checked: false, reason: 'lookup-failed' };
+ }
+
+ const latest = parseNpmVersionOutput(latestResult.stdout);
+ if (!latest) {
+ return { checked: false, reason: 'invalid-latest-version' };
+ }
+
+ return {
+ checked: true,
+ current,
+ latest,
+ updateAvailable: isNewerVersion(latest, current),
+ };
+}
+
+function printOpenSpecUpdateAvailableBanner(current, latest) {
+ const title = colorize('OPENSPEC UPDATE AVAILABLE', '1;33');
+ console.log(`[${TOOL_NAME}] ${title}`);
+ console.log(`[${TOOL_NAME}] Current: ${current}`);
+ console.log(`[${TOOL_NAME}] Latest : ${latest}`);
+ console.log(`[${TOOL_NAME}] Command: ${NPM_BIN} i -g ${OPENSPEC_PACKAGE}@latest`);
+ console.log(`[${TOOL_NAME}] Then : ${OPENSPEC_BIN} update`);
+}
+
+function promptYesNoStrict(question) {
+ while (true) {
+ process.stdout.write(`${question} [y/n] `);
+ const answer = readSingleLineFromStdin().trim().toLowerCase();
+
+ if (answer === 'y' || answer === 'yes') {
+ process.stdout.write('\n');
+ return true;
+ }
+ if (answer === 'n' || answer === 'no') {
+ process.stdout.write('\n');
+ return false;
}
- const shouldUpdate = interactive
- ? promptYesNoStrict(
- `Update OpenSpec now? (${NPM_BIN} i -g ${OPENSPEC_PACKAGE}@latest && ${OPENSPEC_BIN} update)`,
- )
- : autoApproval;
+ process.stdout.write('Please answer with y or n.\n');
+ }
+}
- if (!shouldUpdate) {
- console.log(`[${TOOL_NAME}] Skipped OpenSpec update.`);
- return;
+function resolveGlobalInstallApproval(options) {
+ if (options.yesGlobalInstall && options.noGlobalInstall) {
+ throw new Error('Cannot use both --yes-global-install and --no-global-install');
+ }
+
+ if (options.yesGlobalInstall) {
+ return { approved: true, source: 'flag' };
+ }
+
+ if (options.noGlobalInstall) {
+ return { approved: false, source: 'flag' };
+ }
+
+ if (!isInteractiveTerminal()) {
+ return { approved: false, source: 'non-interactive-default' };
+ }
+ return { approved: true, source: 'prompt' };
+}
+
+function getGlobalToolchainService(packageName) {
+ const service = GLOBAL_TOOLCHAIN_SERVICES.find(
+ (candidate) => candidate.packageName === packageName,
+ );
+ return service || { name: packageName, packageName };
+}
+
+function formatGlobalToolchainServiceName(packageName) {
+ return getGlobalToolchainService(packageName).name;
+}
+
+function describeMissingGlobalDependencyWarnings(packageNames) {
+ return packageNames
+ .map((packageName) => getGlobalToolchainService(packageName))
+ .filter((service) => service.dependencyUrl)
+ .map(
+ (service) =>
+ `Guardex needs ${service.name} as a dependency: ${service.dependencyUrl}`,
+ );
+}
+
+function describeCompanionInstallCommands(missingPackages, missingLocalTools) {
+ const commands = [];
+ if (missingPackages.length > 0) {
+ commands.push(`${NPM_BIN} i -g ${missingPackages.join(' ')}`);
+ }
+ for (const tool of missingLocalTools) {
+ commands.push(tool.installCommand);
+ }
+ return commands;
+}
+
+function buildMissingCompanionInstallPrompt(missingPackages, missingLocalTools) {
+ const dependencyWarnings = describeMissingGlobalDependencyWarnings(missingPackages);
+ const installCommands = describeCompanionInstallCommands(missingPackages, missingLocalTools);
+ const dependencyPrefix = dependencyWarnings.length > 0
+ ? `${dependencyWarnings.join(' ')} `
+ : '';
+ return `${dependencyPrefix}Install missing companion tools now? (${installCommands.join(' && ')})`;
+}
+
+function detectGlobalToolchainPackages() {
+ const result = run(NPM_BIN, ['list', '-g', '--depth=0', '--json']);
+ if (result.status !== 0) {
+ const stderr = (result.stderr || '').trim();
+ return {
+ ok: false,
+ error: stderr || 'Unable to detect globally installed npm packages',
+ };
+ }
+
+ let parsed;
+ try {
+ parsed = JSON.parse(result.stdout || '{}');
+ } catch (error) {
+ return {
+ ok: false,
+ error: `Failed to parse npm list output: ${error.message}`,
+ };
+ }
+
+ const dependencyMap = parsed && parsed.dependencies && typeof parsed.dependencies === 'object'
+ ? parsed.dependencies
+ : {};
+ const installedSet = new Set(Object.keys(dependencyMap));
+
+ const installed = [];
+ const missing = [];
+ const installedVersions = {};
+ for (const pkg of GLOBAL_TOOLCHAIN_PACKAGES) {
+ if (installedSet.has(pkg)) {
+ installed.push(pkg);
+ const rawVersion = dependencyMap[pkg] && dependencyMap[pkg].version;
+ const version = String(rawVersion || '').trim();
+ if (version) {
+ installedVersions[pkg] = version;
+ }
+ } else {
+ missing.push(pkg);
}
+ }
+
+ return { ok: true, installed, missing, installedVersions };
+}
+
+function detectRequiredSystemTools() {
+ const services = [];
+ for (const tool of REQUIRED_SYSTEM_TOOLS) {
+ const result = run(tool.command, ['--version']);
+ const active = result.status === 0;
+ const rawReason = result.error && result.error.code
+ ? result.error.code
+ : (result.stderr || '').trim();
+ const reason = rawReason.split('\n')[0] || '';
+ services.push({
+ name: tool.name,
+ displayName: tool.displayName || tool.name,
+ command: tool.command,
+ installHint: tool.installHint,
+ status: active ? 'active' : 'inactive',
+ reason,
+ });
+ }
+ return services;
+}
+
+function detectOptionalLocalCompanionTools() {
+ return OPTIONAL_LOCAL_COMPANION_TOOLS.map((tool) => {
+ const detectedPath = tool.candidatePaths
+ .map((relativePath) => path.join(GUARDEX_HOME_DIR, relativePath))
+ .find((candidatePath) => fs.existsSync(candidatePath));
+ return {
+ name: tool.name,
+ displayName: tool.displayName || tool.name,
+ installCommand: tool.installCommand,
+ installArgs: [...tool.installArgs],
+ status: detectedPath ? 'active' : 'inactive',
+ detectedPath: detectedPath || null,
+ };
+ });
+}
+
+function askGlobalInstallForMissing(options, missingPackages, missingLocalTools) {
+ const approval = resolveGlobalInstallApproval(options);
+ if (!approval.approved) {
+ return approval;
+ }
+
+ if (approval.source === 'prompt') {
+ const approved = promptYesNoStrict(
+ buildMissingCompanionInstallPrompt(missingPackages, missingLocalTools),
+ );
+ return { approved, source: 'prompt' };
+ }
+
+ return approval;
+}
+
+function maybeSelfUpdateBeforeStatus() {
+ const check = checkForGuardexUpdate();
+ if (!check.checked || !check.updateAvailable) {
+ return;
+ }
- const installResult = run(NPM_BIN, ['i', '-g', `${OPENSPEC_PACKAGE}@latest`], { stdio: 'inherit' });
- if (installResult.status !== 0) {
- console.log(`[${TOOL_NAME}] ⚠️ OpenSpec npm install failed. You can retry manually.`);
+ printUpdateAvailableBanner(check.current, check.latest);
+
+ const autoApproval = parseAutoApproval('GUARDEX_AUTO_UPDATE_APPROVAL');
+ const interactive = isInteractiveTerminal();
+
+ if (!interactive && autoApproval == null) {
+ console.log(`[${TOOL_NAME}] Non-interactive shell; skipping auto-update prompt.`);
+ return;
+ }
+
+ const shouldUpdate = interactive
+ ? promptYesNoStrict(
+ `Update now? (${NPM_BIN} i -g ${packageJson.name}@latest)`,
+ )
+ : autoApproval;
+
+ if (!shouldUpdate) {
+ console.log(`[${TOOL_NAME}] Skipped update.`);
+ return;
+ }
+
+ const installResult = run(NPM_BIN, ['i', '-g', `${packageJson.name}@latest`], { stdio: 'inherit' });
+ if (installResult.status !== 0) {
+ console.log(`[${TOOL_NAME}] Update failed. You can retry manually.`);
+ return;
+ }
+
+ const postInstallVersion = readInstalledGuardexVersion();
+ if (postInstallVersion != null && postInstallVersion !== check.latest) {
+ console.log(
+ `[${TOOL_NAME}] Installed version is still ${postInstallVersion} (expected ${check.latest}). ` +
+ `Retrying with pinned version ${check.latest}...`,
+ );
+ const pinnedResult = run(
+ NPM_BIN,
+ ['i', '-g', `${packageJson.name}@${check.latest}`],
+ { stdio: 'inherit' },
+ );
+ if (pinnedResult.status !== 0) {
+ console.log(
+ `[${TOOL_NAME}] Pinned retry failed. Run manually: ${NPM_BIN} i -g ${packageJson.name}@${check.latest}`,
+ );
return;
}
-
- const toolUpdateResult = run(OPENSPEC_BIN, ['update'], { stdio: 'inherit' });
- if (toolUpdateResult.status !== 0) {
- console.log(`[${TOOL_NAME}] ⚠️ OpenSpec tool update failed. Run '${OPENSPEC_BIN} update' manually.`);
+ const pinnedVersion = readInstalledGuardexVersion();
+ if (pinnedVersion != null && pinnedVersion !== check.latest) {
+ console.log(
+ `[${TOOL_NAME}] On-disk version still ${pinnedVersion} after pinned retry. ` +
+ `Investigate: ${NPM_BIN} root -g && ${NPM_BIN} cache verify`,
+ );
return;
}
+ }
+
+ console.log(`[${TOOL_NAME}] Updated to latest published version.`);
+ restartIntoUpdatedGuardex(check.latest);
+}
- console.log(`[${TOOL_NAME}] ✅ OpenSpec updated to latest package and tool plugins refreshed.`);
+function maybeOpenSpecUpdateBeforeStatus() {
+ const check = checkForOpenSpecPackageUpdate();
+ if (!check.checked || !check.updateAvailable) {
+ return;
}
- function installGlobalToolchain(options) {
- const approval = resolveGlobalInstallApproval(options);
- if (approval.source === 'flag' && !approval.approved) {
- return {
- status: 'skipped',
- reason: approval.source,
- missingPackages: [],
- missingLocalTools: [],
- };
- }
+ printOpenSpecUpdateAvailableBanner(check.current, check.latest);
- if (options.dryRun) {
- return { status: 'dry-run-skip' };
- }
+ const autoApproval = parseAutoApproval('GUARDEX_AUTO_OPENSPEC_UPDATE_APPROVAL');
+ const interactive = isInteractiveTerminal();
- const detection = detectGlobalToolchainPackages();
- const localCompanionTools = detectOptionalLocalCompanionTools();
- if (!detection.ok) {
- console.log(`[${TOOL_NAME}] ⚠️ Could not detect global packages: ${detection.error}`);
- } else {
- if (detection.installed.length > 0) {
- console.log(
- `[${TOOL_NAME}] Already installed globally: ` +
- `${detection.installed.map((pkg) => formatGlobalToolchainServiceName(pkg)).join(', ')}`,
- );
- }
- const installedLocalTools = localCompanionTools
- .filter((tool) => tool.status === 'active')
- .map((tool) => tool.name);
- if (installedLocalTools.length > 0) {
- console.log(`[${TOOL_NAME}] Already installed locally: ${installedLocalTools.join(', ')}`);
- }
- if (detection.missing.length === 0 && localCompanionTools.every((tool) => tool.status === 'active')) {
- return { status: 'already-installed' };
- }
+ if (!interactive && autoApproval == null) {
+ console.log(`[${TOOL_NAME}] Non-interactive shell; skipping OpenSpec update prompt.`);
+ return;
+ }
+
+ const shouldUpdate = interactive
+ ? promptYesNoStrict(
+ `Update OpenSpec now? (${NPM_BIN} i -g ${OPENSPEC_PACKAGE}@latest && ${OPENSPEC_BIN} update)`,
+ )
+ : autoApproval;
+
+ if (!shouldUpdate) {
+ console.log(`[${TOOL_NAME}] Skipped OpenSpec update.`);
+ return;
+ }
+
+ const installResult = run(NPM_BIN, ['i', '-g', `${OPENSPEC_PACKAGE}@latest`], { stdio: 'inherit' });
+ if (installResult.status !== 0) {
+ console.log(`[${TOOL_NAME}] OpenSpec npm install failed. You can retry manually.`);
+ return;
+ }
+
+ const toolUpdateResult = run(OPENSPEC_BIN, ['update'], { stdio: 'inherit' });
+ if (toolUpdateResult.status !== 0) {
+ console.log(`[${TOOL_NAME}] OpenSpec tool update failed. Run '${OPENSPEC_BIN} update' manually.`);
+ return;
+ }
+
+ console.log(`[${TOOL_NAME}] OpenSpec updated to latest package and tool plugins refreshed.`);
+}
+
+function installGlobalToolchain(options) {
+ const approval = resolveGlobalInstallApproval(options);
+ if (approval.source === 'flag' && !approval.approved) {
+ return {
+ status: 'skipped',
+ reason: approval.source,
+ missingPackages: [],
+ missingLocalTools: [],
+ };
+ }
+
+ if (options.dryRun) {
+ return { status: 'dry-run-skip' };
+ }
+
+ const detection = detectGlobalToolchainPackages();
+ const localCompanionTools = detectOptionalLocalCompanionTools();
+ if (!detection.ok) {
+ console.log(`[${TOOL_NAME}] Could not detect global packages: ${detection.error}`);
+ } else {
+ if (detection.installed.length > 0) {
+ console.log(
+ `[${TOOL_NAME}] Already installed globally: ` +
+ `${detection.installed.map((pkg) => formatGlobalToolchainServiceName(pkg)).join(', ')}`,
+ );
+ }
+ const installedLocalTools = localCompanionTools
+ .filter((tool) => tool.status === 'active')
+ .map((tool) => tool.name);
+ if (installedLocalTools.length > 0) {
+ console.log(`[${TOOL_NAME}] Already installed locally: ${installedLocalTools.join(', ')}`);
}
+ if (detection.missing.length === 0 && localCompanionTools.every((tool) => tool.status === 'active')) {
+ return { status: 'already-installed' };
+ }
+ }
- const missingPackages = detection.ok ? detection.missing : [...GLOBAL_TOOLCHAIN_PACKAGES];
- const missingLocalTools = localCompanionTools.filter((tool) => tool.status !== 'active');
- const installApproval = askGlobalInstallForMissing(options, missingPackages, missingLocalTools);
- if (!installApproval.approved) {
+ const missingPackages = detection.ok ? detection.missing : [...GLOBAL_TOOLCHAIN_PACKAGES];
+ const missingLocalTools = localCompanionTools.filter((tool) => tool.status !== 'active');
+ const installApproval = askGlobalInstallForMissing(options, missingPackages, missingLocalTools);
+ if (!installApproval.approved) {
+ return {
+ status: 'skipped',
+ reason: installApproval.source,
+ missingPackages,
+ missingLocalTools,
+ };
+ }
+
+ const installed = [];
+ if (missingPackages.length > 0) {
+ console.log(
+ `[${TOOL_NAME}] Installing global toolchain: npm i -g ${missingPackages.join(' ')}`,
+ );
+ const result = run(NPM_BIN, ['i', '-g', ...missingPackages], { stdio: 'inherit' });
+ if (result.status !== 0) {
+ const stderr = (result.stderr || '').trim();
return {
- status: 'skipped',
- reason: installApproval.source,
- missingPackages,
- missingLocalTools,
+ status: 'failed',
+ reason: stderr || 'npm global install failed',
};
}
+ installed.push(...missingPackages);
+ }
- const installed = [];
- if (missingPackages.length > 0) {
- console.log(
- `[${TOOL_NAME}] Installing global toolchain: npm i -g ${missingPackages.join(' ')}`,
- );
- const result = run(NPM_BIN, ['i', '-g', ...missingPackages], { stdio: 'inherit' });
- if (result.status !== 0) {
- const stderr = (result.stderr || '').trim();
- return {
- status: 'failed',
- reason: stderr || 'npm global install failed',
- };
- }
- installed.push(...missingPackages);
- }
-
- for (const tool of missingLocalTools) {
- console.log(`[${TOOL_NAME}] Installing local companion tool: ${tool.installCommand}`);
- const result = run(NPX_BIN, tool.installArgs, { stdio: 'inherit' });
- if (result.status !== 0) {
- const stderr = (result.stderr || '').trim();
- return {
- status: 'failed',
- reason: stderr || `${tool.name} install failed`,
- };
- }
- installed.push(tool.name);
+ for (const tool of missingLocalTools) {
+ console.log(`[${TOOL_NAME}] Installing local companion tool: ${tool.installCommand}`);
+ const result = run(NPX_BIN, tool.installArgs, { stdio: 'inherit' });
+ if (result.status !== 0) {
+ const stderr = (result.stderr || '').trim();
+ return {
+ status: 'failed',
+ reason: stderr || `${tool.name} install failed`,
+ };
}
-
- return { status: 'installed', packages: installed };
+ installed.push(tool.name);
}
- return {
- maybeSelfUpdateBeforeStatus,
- maybeOpenSpecUpdateBeforeStatus,
- installGlobalToolchain,
- };
+ return { status: 'installed', packages: installed };
}
module.exports = {
- createToolchainApi,
+ isInteractiveTerminal,
+ parseAutoApproval,
+ checkForGuardexUpdate,
+ printUpdateAvailableBanner,
+ readInstalledGuardexVersion,
+ readInstalledGuardexInstallInfo,
+ restartIntoUpdatedGuardex,
+ checkForOpenSpecPackageUpdate,
+ printOpenSpecUpdateAvailableBanner,
+ promptYesNoStrict,
+ resolveGlobalInstallApproval,
+ getGlobalToolchainService,
+ formatGlobalToolchainServiceName,
+ describeMissingGlobalDependencyWarnings,
+ describeCompanionInstallCommands,
+ detectGlobalToolchainPackages,
+ detectRequiredSystemTools,
+ detectOptionalLocalCompanionTools,
+ askGlobalInstallForMissing,
+ maybeSelfUpdateBeforeStatus,
+ maybeOpenSpecUpdateBeforeStatus,
+ installGlobalToolchain,
};
diff --git a/test/metadata.test.js b/test/metadata.test.js
index 397da9e..e824895 100644
--- a/test/metadata.test.js
+++ b/test/metadata.test.js
@@ -172,11 +172,12 @@ test('cli main delegates extracted seams and keeps doctor single-source', () =>
assert.equal(doctorDefs.length, 1, 'doctor() must not be duplicated');
assert.doesNotMatch(cliSource, /function parseSetupArgs\(/);
assert.doesNotMatch(cliSource, /function parseDoctorArgs\(/);
- assert.match(cliSource, /function assertProtectedMainWriteAllowed\(options, commandName\)\s*{\s*return getSandboxApi\(\)\.assertProtectedMainWriteAllowed\(options, commandName\);\s*}/s);
- assert.match(cliSource, /function maybeSelfUpdateBeforeStatus\(\)\s*{\s*return getToolchainApi\(\)\.maybeSelfUpdateBeforeStatus\(\);\s*}/s);
+ assert.doesNotMatch(cliSource, /getSandboxApi|getToolchainApi|getFinishApi/);
+ assert.match(cliSource, /function assertProtectedMainWriteAllowed\(options, commandName\)\s*{\s*return sandboxModule\.assertProtectedMainWriteAllowed\(options, commandName\);\s*}/s);
+ assert.match(cliSource, /function maybeSelfUpdateBeforeStatus\(\)\s*{\s*return toolchainModule\.maybeSelfUpdateBeforeStatus\(\);\s*}/s);
assert.match(cliSource, /function hook\(rawArgs\)\s*{\s*return hooksModule\.hook\(rawArgs, \{/s);
assert.match(cliSource, /function internal\(rawArgs\)\s*{\s*return hooksModule\.internal\(rawArgs, \{/s);
- assert.match(cliSource, /function finish\(rawArgs, defaults = \{\}\)\s*{\s*return getFinishApi\(\)\.finish\(rawArgs, defaults\);\s*}/s);
+ assert.match(cliSource, /function finish\(rawArgs, defaults = \{\}\)\s*{\s*return finishCommands\.finish\(rawArgs, defaults\);\s*}/s);
assert.match(cliSource, /printOperations\('Doctor\/fix', fixPayload, (?:singleRepoOptions|options)\.dryRun\);/);
});
@@ -193,6 +194,6 @@ test('cli main module loads after extracted arg and dispatch seams move out', ()
});
test('worktree-change detection uses normal untracked-file mode', () => {
- const cliSource = fs.readFileSync(path.join(repoRoot, 'src', 'cli', 'main.js'), 'utf8');
- assert.match(cliSource, /'status',\s*'--porcelain',\s*'--untracked-files=normal',\s*'--'/s);
+ const gitSource = fs.readFileSync(path.join(repoRoot, 'src', 'git', 'index.js'), 'utf8');
+ assert.match(gitSource, /'status',\s*'--porcelain',\s*'--untracked-files=normal',\s*'--'/s);
});