diff --git a/.task-memory.json b/.task-memory.json new file mode 100644 index 0000000..9957245 --- /dev/null +++ b/.task-memory.json @@ -0,0 +1,6 @@ +{ + "planning_dir": "docs/planning", + "task_prefix": "TASK", + "min_engagements_to_block": 3, + "session_state_max_age_hours": 24 +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 65919d9..ef24d73 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,28 @@ Format inspired by [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). RPC just to discard them locally. `console` also accepts `errors: true` via RPC opts so the daemon drops non-error levels before serialising; the Rust CLI uses this on the hot path. +- **Bucket A payload-reduction sprint** — six flags and one new verb + to cut context cost for LLM operators driving ghax (sourced from + the 2026-04-20 jnremache field report): + - `screenshot --full-page` — kebab-case alias for the v0.1 + `--fullPage`, matching the rest of the CLI convention. Both + forms accepted. + - `tabs --filter --fields ` — server-side regex + filter (case-insensitive, matched against url + title) and + field projection (id, title, url, active). Cuts ~200 bytes per + google-product tab. + - `eval --max-bytes ` — caps the stringified eval result at + N utf-8 bytes. On trip, returns `{value, truncated: true, + originalBytes}`; when under cap the shape is unchanged. + - `text --selector --length --skip ` — scoped, + paged page-text dumps. Replaces hand-rolled + `document.body.innerText.substring(...)`. + - `upload <@ref|selector> [,…]` — first-class file + upload verb wrapping Playwright's `locator.setInputFiles`. + Comma-separated paths trigger multi-file mode. + - `snapshot --compact` now suppresses the cursor-interactive + pass when paired with `-i`. Explicit `-C` still forces it on. + Large SPAs shrink measurably in compact mode. ### Fixed - **Invariant enforcement**: `ctx.refs` is now cleared when the active diff --git a/CLAUDE.md b/CLAUDE.md index b4ad995..c5ded28 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -233,3 +233,42 @@ but "why is it this way" documentation. If a test fails that you can't explain, check the "Daemon restart required" invariant above. It's the single most common source of confusion. + +## Task Memory Integration + +This project uses task-memory for context-preserving task management. + +**Planning Location:** docs/planning/tasks.md +**Notes Location:** docs/planning/notes/TASK-XXX.md (auto-created on SessionStart for every in-progress task) + +### Session Start Protocol + +At the start of EVERY session: +1. The SessionStart hook auto-displays current task + notes summary. +2. If you see "⚠️ CONTEXT GAP DETECTED", recreate findings from the operations log BEFORE coding. +3. For full verification, run `/task-status` — it computes a Context Health Score (0-5). + +### Task vs. Question Triage + +- **TASK** (implement, fix, build, refactor, migrate): Create task in docs/planning/tasks.md FIRST, then work. +- **QUESTION** (what, how, why, explain): Answer directly — no task needed. +- **AMBIGUOUS** ("help me with X", "I'm stuck"): Ask one clarifying question before deciding. + +### Context Preservation Protocol + +The hook handles these automatically — you don't need to remember: +- Logs every WebFetch/WebSearch with response snippet to `**Visual Operations Log**` +- Auto-creates `docs/planning/notes/TASK-XXX.md` skeleton on SessionStart for every in-progress task +- Saves pre-compaction snapshot and appends ops log to notes file at PreCompact +- Blocks Stop if research ops ≥ 2 and notes file is empty/skeleton + +What YOU must still do: +- Fill in Patterns / Gotchas / Decisions in the notes file (hook creates, you synthesize) +- Check off subtasks as you complete them +- Run self-critique before marking Status: done + +### Commit Convention + +Reference the task ID in every commit: `feat: description (TASK-XXX)` + +**Skills:** `/tm-init` (setup) | `/task-memory` (full workflow) | `/task-status` (5-question + health score) diff --git a/README.md b/README.md index 68889ba..fc945e3 100644 --- a/README.md +++ b/README.md @@ -190,25 +190,31 @@ attach [--port N] [--browser edge|chrome|chromium|brave|arc] [--launch] status [--json] detach restart -tabs +tabs [--filter ] [--fields ] + # --filter: case-insensitive regex on url+title + # --fields: id,title,url,active — project only these tab [--quiet] # --quiet = don't bringToFront (agent mode) find # list matching tabs (pipe into `tab`) new-window [url] # new background window, same profile goto back | forward | reload -eval +eval [--max-bytes N] # --max-bytes caps result size to N utf-8 bytes try [] [--css ] [--selector ] [--measure ] [--shot ] -text +text [--selector ] [--length N] [--skip M] + # scoped, paged page-text dumps html [] -screenshot [<@ref|selector>] [--path p] [--fullPage] +screenshot [<@ref|selector>] [--path p] [--full-page] snapshot [-i] [-c] [-d N] [-s ] [-C] [-a] [-o ] + # --compact (-c) also suppresses cursor-interactive + # when combined with -i; use -C to force it on click <@ref|selector> fill <@ref|selector> +upload <@ref|selector> [,…] # wraps setInputFiles press type wait viewport -responsive [prefix] [--fullPage] +responsive [prefix] [--full-page] diff is <@ref|selector> xpath [--limit N] # list matching elements with text + box diff --git a/crates/cli/src/dispatch.rs b/crates/cli/src/dispatch.rs index ec5ff1f..b49c2ec 100644 --- a/crates/cli/src/dispatch.rs +++ b/crates/cli/src/dispatch.rs @@ -93,6 +93,15 @@ fn dispatch_inner(cfg: &Config, verb: &str, rest: &[String]) -> Result { simple(cfg, "fill", parsed) } + "upload" => { + let parsed = args::parse(rest); + if parsed.positional.len() < 2 { + eprintln!("Usage: ghax upload <@ref|selector> [,…]"); + return Ok(EXIT_USAGE); + } + simple(cfg, "upload", parsed) + } + "is" => { let parsed = args::parse(rest); let port = state::require_daemon(cfg)?; diff --git a/crates/cli/src/help.rs b/crates/cli/src/help.rs index 506fb21..c01162c 100644 --- a/crates/cli/src/help.rs +++ b/crates/cli/src/help.rs @@ -26,12 +26,13 @@ Tab: try [] [--css ] [--selector ] [--measure ] [--shot ] text html [] - screenshot [<@ref|selector>] [--path

] [--fullPage] + screenshot [<@ref|selector>] [--path

] [--full-page] Snapshot & interact: snapshot [-i] [-c] [-d ] [-s ] [-C] [-a] [-o ] click <@ref|selector> fill <@ref|selector> + upload <@ref|selector> [,…] # wraps setInputFiles press type wait diff --git a/docs/planning/archive.md b/docs/planning/archive.md new file mode 100644 index 0000000..d39648f --- /dev/null +++ b/docs/planning/archive.md @@ -0,0 +1,5 @@ +# Archived Tasks + +Tasks that have been completed and archived. + +--- diff --git a/docs/planning/sprint-bucket-a.md b/docs/planning/sprint-bucket-a.md new file mode 100644 index 0000000..f9db5d4 --- /dev/null +++ b/docs/planning/sprint-bucket-a.md @@ -0,0 +1,57 @@ +# Sprint: Bucket A — payload reduction + first-class upload + +## Goal + +Ship the six "high-ROI, same theme as item 10" items from the +2026-04-20 jnremache field report (see `plan.md` follow-up sprint +section, Bucket A). Every item cuts payload size sent to an LLM +operator, or removes a papercut the operator hand-rolled in +JavaScript during the field session. + +All six are narrow, backend-facing changes: new daemon options + +thin CLI wiring. Each is verifiable with one smoke assertion. + +## Tasks + +1. [x] `screenshot --full-page` kebab alias (GHAX-FR-06). Currently + only `--fullPage` works; the kebab form is the convention + everywhere else in the CLI. Add the alias in the daemon's + `screenshot` handler. Trivial. +2. [x] `tabs --filter --fields ` (TOK-04). Server-side + regex filter on URL + title, field projection on the returned + objects. Cuts ~200 bytes per google-product tab when filtering. +3. [x] `eval --max-bytes ` (TOK-02). Server-side truncation on + the stringified result. Protects LLM operators from accidental + context blow-outs. Returns `{value, truncated: true, originalBytes}` + when it trips. +4. [x] `text --selector --length --skip ` (TOK-10). + Scoped, paged page-text dumps. Replaces hand-rolled + `document.body.innerText.substring(...)`. +5. [x] `upload <@ref|selector> ` (JNR-07). First-class file + upload verb wrapping Playwright's `locator.setInputFiles`. Used + 5x in the field session via a hand-written shim. +6. [x] `snapshot --compact` suppresses cursor-interactive pass + (TOK-01). Today `--compact` only drops noise nodes from the + ARIA tree; the cursor-interactive section still runs whenever + `-i` is set and dominates the output on heavy SPAs. Gate the + cursor pass on `!opts.compact` so `-i --compact` gives the + interactive tree without the cursor bloat. + +## Acceptance criteria + +- Every new flag has a smoke check in `test/smoke.ts`. +- `npm run typecheck`, `npm run build`, `cargo build --release`, + and `npm run test:smoke` all green against the Rust binary + (`GHAX_BIN=$PWD/target/release/ghax npm run test:smoke`). +- `CHANGELOG.md` under `[Unreleased]` lists all six items. +- `README.md` command surface mentions each new flag. +- No new runtime deps (zero — all implementations are Playwright + features already in the dep tree). + +## Deferred + +(Populated during the run if items slip scope.) + +## Queued decisions + +(Empty at plan time.) diff --git a/docs/planning/tasks.md b/docs/planning/tasks.md new file mode 100644 index 0000000..1ccdd1a --- /dev/null +++ b/docs/planning/tasks.md @@ -0,0 +1,32 @@ +# Task Board + + + +## ⚙️ Configuration + +**Columns**: 📝 To Do (todo) | 🚀 In Progress (in-progress) | 👀 In Review (in-review) | ✅ Done (done) + +**Categories**: Frontend, Backend, Design, DevOps, Tests, Documentation + +**Users**: @user (User) + +**Priorities**: 🔴 Critical | 🟠 High | 🟡 Medium | 🟢 Low + +**Tags**: #bug #feature #ui #backend #urgent #refactor #docs #test + +--- + +## 📝 To Do + +## 🚀 In Progress + +## 👀 In Review + +## ✅ Done + +- **TASK-001**: `screenshot --full-page` kebab alias ✓ +- **TASK-002**: `tabs --filter --fields` ✓ +- **TASK-003**: `eval --max-bytes` truncation ✓ +- **TASK-004**: `text --selector --length --skip` ✓ +- **TASK-005**: `upload` verb ✓ +- **TASK-006**: `snapshot --compact` suppresses cursor pass ✓ diff --git a/src/daemon.ts b/src/daemon.ts index 156d375..2ab54e1 100644 --- a/src/daemon.ts +++ b/src/daemon.ts @@ -309,14 +309,39 @@ register('status', async (ctx) => { }; }); -register('tabs', async (ctx) => { +register('tabs', async (ctx, _args, opts) => { const pages = await allPages(ctx); - return Promise.all( + const filterStr = (opts.filter as string | undefined) ?? null; + let filterRe: RegExp | null = null; + if (filterStr) { + try { + filterRe = new RegExp(filterStr, 'i'); + } catch (err: any) { + throw new Error(`tabs --filter: invalid regex: ${err?.message || filterStr}`); + } + } + // --fields accepts a csv list of keys to keep. Valid keys: id, title, + // url, active. Invalid keys are ignored silently so a typo can't kill + // the whole command mid-session. Omitted → return every field. + const fieldsArg = (opts.fields as string | undefined) ?? null; + const fields: Set | null = fieldsArg + ? new Set(fieldsArg.split(',').map((s) => s.trim()).filter(Boolean)) + : null; + const all = await Promise.all( pages.map(async (p) => { const [id, title] = await Promise.all([pageTargetId(p), p.title().catch(() => '')]); return { id, title, url: p.url(), active: id === ctx.activePageId }; }), ); + const matched = filterRe + ? all.filter((t) => filterRe!.test(t.url) || filterRe!.test(t.title)) + : all; + if (!fields) return matched; + return matched.map((t) => { + const out: Record = {}; + for (const k of fields) if (k in t) out[k] = (t as any)[k]; + return out; + }); }); register('tab', async (ctx, args, opts) => { @@ -436,11 +461,29 @@ register('reload', async (ctx) => { return { url: page.url() }; }); -register('eval', async (ctx, args) => { +register('eval', async (ctx, args, opts) => { const js = String(args[0] ?? ''); if (!js) throw new Error('Usage: eval '); const page = await activePage(ctx); const result = await page.evaluate(js); + // --max-bytes caps the stringified result so an accidental + // `document.body.innerText` on a heavy page can't blow out the + // LLM operator's context window. Measured in UTF-8 bytes, not + // characters. When it trips we wrap the response so the caller + // can see what happened; when it doesn't trip we return the + // value unchanged (zero shape change for scripts that already + // expect the raw value). + const maxBytesRaw = opts['max-bytes'] ?? opts.maxBytes; + const maxBytes = maxBytesRaw !== undefined ? Number(maxBytesRaw) : null; + if (maxBytes !== null && Number.isFinite(maxBytes) && maxBytes > 0) { + const serialized = typeof result === 'string' ? result : JSON.stringify(result) ?? ''; + const bytes = Buffer.byteLength(serialized, 'utf8'); + if (bytes > maxBytes) { + // Slice in bytes — Buffer handles multi-byte UTF-8 correctly. + const truncated = Buffer.from(serialized, 'utf8').subarray(0, maxBytes).toString('utf8'); + return { value: truncated, truncated: true, originalBytes: bytes }; + } + } return result; }); @@ -513,9 +556,28 @@ register('try', async (ctx, args, opts) => { return { value, ...(shot ? { shot } : {}) }; }); -register('text', async (ctx) => { +register('text', async (ctx, _args, opts) => { const page = await activePage(ctx); - const text = await page.evaluate(() => document.body.innerText); + const selector = (opts.selector as string | undefined) ?? null; + // --skip/--length paginate the returned string. The daemon still + // pulls full innerText — the win is on the wire, which is where + // the operator's context budget lives. Pagination uses code-unit + // offsets to match JavaScript's substring semantics; if/when a + // field report complains about emoji-splitting we'll switch to + // grapheme segmentation. + const skip = opts.skip !== undefined ? Math.max(0, Number(opts.skip)) : 0; + const lengthRaw = opts.length !== undefined ? Number(opts.length) : null; + const length = lengthRaw !== null && Number.isFinite(lengthRaw) && lengthRaw > 0 ? lengthRaw : null; + let text: string; + if (selector) { + text = await page.locator(selector).first().innerText(); + } else { + text = await page.evaluate(() => document.body.innerText); + } + if (skip > 0 || length !== null) { + const end = length !== null ? skip + length : undefined; + text = text.slice(skip, end); + } return text; }); @@ -530,10 +592,14 @@ register('screenshot', async (ctx, args, opts) => { const page = await activePage(ctx); const outPath = (opts.path as string) || `/tmp/ghax-shot-${Date.now()}.png`; const target = args[0] ? String(args[0]) : null; + // Accept both `--fullPage` (v0.1 camelCase) and `--full-page` (kebab, + // matches every other CLI flag). Kebab is the preferred form going + // forward; camelCase stays for back-compat with live scripts. + const fullPage = Boolean(opts.fullPage || opts['full-page']); if (target) { await resolveRef(ctx, target, page).screenshot({ path: outPath }); } else { - await page.screenshot({ path: outPath, fullPage: Boolean(opts.fullPage) }); + await page.screenshot({ path: outPath, fullPage }); } return { path: outPath }; }); @@ -737,6 +803,23 @@ register('press', async (ctx, args) => { return { ok: true }; }); +// ─── upload — first-class file upload via setInputFiles ──────── +// +// Wraps Playwright's locator.setInputFiles so operators don't have to +// hand-roll the DOM.setFileInputFiles CDP call every time. Accepts a +// single path or a comma-separated list for multi-file inputs. +// Paths are resolved relative to the daemon's cwd (captured at attach). +register('upload', async (ctx, args) => { + const target = String(args[0] ?? ''); + const pathArg = String(args[1] ?? ''); + if (!target || !pathArg) throw new Error('Usage: upload <@ref|selector> [,…]'); + const page = await activePage(ctx); + const loc = resolveRef(ctx, target, page); + const paths = pathArg.split(',').map((p) => p.trim()).filter(Boolean); + await loc.setInputFiles(paths.length === 1 ? paths[0] : paths); + return { ok: true, count: paths.length }; +}); + register('type', async (ctx, args) => { const text = String(args[0] ?? ''); const page = await activePage(ctx); @@ -999,7 +1082,7 @@ register('responsive', async (ctx, args, opts) => { // Let layout settle — some CSS grid + responsive components need a paint. await page.waitForTimeout(200); const outPath = `${prefix}-${preset.name}.png`; - await page.screenshot({ path: outPath, fullPage: Boolean(opts.fullPage) }); + await page.screenshot({ path: outPath, fullPage: Boolean(opts.fullPage || opts['full-page']) }); results.push({ ...preset, path: outPath }); } } finally { diff --git a/src/snapshot.ts b/src/snapshot.ts index d4cf5bc..f7385cc 100644 --- a/src/snapshot.ts +++ b/src/snapshot.ts @@ -143,7 +143,13 @@ export async function snapshot( // The scan walks both light DOM and any open shadow roots it encounters, // emitting Playwright-compatible chain selectors (`host >> inner`) when // it crosses a shadow boundary. - const wantCursor = opts.cursorInteractive || opts.interactive; + // + // `--compact` explicitly skips the cursor pass: on heavy SPAs it dominates + // the output size (hundreds of entries, each with a selector chain), and + // operators who asked for compact are saying "I want the ARIA tree and + // nothing else". `-C` (explicit cursorInteractive) still wins because it's + // an explicit ask — --compact only suppresses the implicit `-i` trigger. + const wantCursor = opts.cursorInteractive || (opts.interactive && !opts.compact); if (wantCursor) { try { const cursorElements = await target.evaluate(() => { diff --git a/test/smoke.ts b/test/smoke.ts index 6a143a5..7e598e9 100644 --- a/test/smoke.ts +++ b/test/smoke.ts @@ -115,6 +115,34 @@ c('tabs returns a non-empty list', async () => { assert(Array.isArray(tabs) && tabs.length > 0, 'expected at least one tab'); }); +c('tabs --filter matches by URL regex', async () => { + // goto example.com first so we know a matching tab exists in the set. + await run(['goto', 'https://example.com']); + const r = await run(['tabs', '--filter', 'example\\.com', '--json']); + const tabs = parseJson>(r.stdout); + assert(Array.isArray(tabs) && tabs.length > 0, 'expected at least one example.com tab'); + for (const t of tabs) { + assert(/example\.com/i.test(t.url), `tab leaked past filter: ${t.url}`); + } +}); + +c('tabs --fields projects only requested keys', async () => { + const r = await run(['tabs', '--fields', 'id,url', '--json']); + const tabs = parseJson>>(r.stdout); + assert(Array.isArray(tabs) && tabs.length > 0, 'expected at least one tab'); + for (const t of tabs) { + const keys = Object.keys(t).sort(); + assert(JSON.stringify(keys) === JSON.stringify(['id', 'url']), + `unexpected keys on projected tab: ${JSON.stringify(keys)}`); + } +}); + +c('tabs --filter with invalid regex fails cleanly', async () => { + const r = await run(['tabs', '--filter', '[unclosed'], { allowFailure: true }); + assert(r.exitCode !== 0, 'invalid regex should fail'); + assert(/invalid regex/i.test(r.stderr + r.stdout), `expected 'invalid regex' message, got: ${r.stderr || r.stdout}`); +}); + c('goto example.com lands on example.com', async () => { const r = await run(['goto', 'https://example.com']); assert(/example\.com/.test(r.stdout), `goto output: ${r.stdout}`); @@ -125,6 +153,24 @@ c('text returns page body', async () => { assert(/Example Domain/i.test(r.stdout), 'text should contain "Example Domain"'); }); +c('text --selector scopes to one element', async () => { + const r = await run(['text', '--selector', 'h1']); + assert(/^\s*Example Domain\s*$/i.test(r.stdout), `expected only h1 text, got: ${r.stdout}`); +}); + +c('text --length paginates the output', async () => { + const r = await run(['text', '--length', '10']); + // stdout has a trailing newline from println; trim for the length check. + assert(r.stdout.trim().length <= 10, `expected ≤10 chars, got ${r.stdout.trim().length}: ${r.stdout.trim()}`); +}); + +c('text --skip skips leading chars', async () => { + const full = (await run(['text'])).stdout.trim(); + const skipped = (await run(['text', '--skip', '5'])).stdout.trim(); + assert(full.startsWith(full.slice(0, 5)), 'sanity check on full text'); + assert(skipped === full.slice(5), `--skip 5 mismatch: full=${full.slice(0, 20)}… skipped=${skipped.slice(0, 20)}…`); +}); + c('html returns innerHTML', async () => { const r = await run(['html', 'h1']); assert(/Example Domain/i.test(r.stdout), 'h1 innerHTML should include "Example Domain"'); @@ -135,6 +181,43 @@ c('snapshot -i produces @e refs', async () => { assert(/@e\d+/.test(r.stdout), `snapshot didn't produce @e refs: ${r.stdout.slice(0, 200)}`); }); +c('snapshot -i --compact suppresses the cursor-interactive section', async () => { + // Build a page with a cursor:pointer div so the cursor-scan has something + // to emit when unsuppressed. Then verify -i alone emits the scan header + // and -i --compact doesn't. + const html = ` + +

cursor div
+ `; + await run(['goto', `data:text/html,${encodeURIComponent(html)}`]); + await run(['wait', '300']); + const withScan = await run(['snapshot', '-i']); + assert( + /cursor-interactive/.test(withScan.stdout), + `expected cursor-interactive header on -i: ${withScan.stdout.slice(0, 200)}`, + ); + const compact = await run(['snapshot', '-i', '--compact']); + assert( + !/cursor-interactive/.test(compact.stdout), + `expected no cursor-interactive section on -i --compact: ${compact.stdout.slice(0, 200)}`, + ); + assert( + compact.stdout.length < withScan.stdout.length, + `compact output should be smaller (compact=${compact.stdout.length}, full=${withScan.stdout.length})`, + ); +}); + +c('snapshot -C overrides --compact to force the cursor pass', async () => { + const html = `
explicit
`; + await run(['goto', `data:text/html,${encodeURIComponent(html)}`]); + await run(['wait', '300']); + const r = await run(['snapshot', '-C', '--compact']); + assert( + /cursor-interactive/.test(r.stdout), + `expected cursor-interactive when -C is explicit, got: ${r.stdout.slice(0, 200)}`, + ); +}); + c('snapshot -a writes annotated PNG', async () => { const outPath = `/tmp/ghax-smoke-anno-${Date.now()}.png`; const r = await run(['snapshot', '-i', '-a', '-o', outPath]); @@ -153,11 +236,33 @@ c('screenshot writes a PNG', async () => { fs.unlinkSync(outPath); }); +c('screenshot --full-page kebab alias writes a PNG', async () => { + const outPath = `/tmp/ghax-smoke-fp-${Date.now()}.png`; + await run(['screenshot', '--path', outPath, '--full-page']); + assert(fs.existsSync(outPath), 'full-page screenshot missing'); + assert(fs.statSync(outPath).size > 500, 'full-page screenshot suspiciously small'); + fs.unlinkSync(outPath); +}); + c('eval runs JS in the active tab', async () => { const r = await run(['eval', '1 + 2']); assert(r.stdout.trim() === '3', `eval 1+2 → ${r.stdout.trim()}`); }); +c('eval --max-bytes truncates oversized strings', async () => { + const r = await run(['eval', '"x".repeat(1000)', '--max-bytes', '50', '--json']); + const v = parseJson<{ value: string; truncated: boolean; originalBytes: number }>(r.stdout); + assert(v.truncated === true, 'expected truncated=true'); + assert(v.originalBytes === 1000, `expected originalBytes=1000, got ${v.originalBytes}`); + assert(Buffer.byteLength(v.value, 'utf8') <= 50, `truncated value exceeds 50 bytes: ${Buffer.byteLength(v.value, 'utf8')}`); +}); + +c('eval --max-bytes pass-through when under cap', async () => { + const r = await run(['eval', '"short"', '--max-bytes', '100', '--json']); + const v = parseJson(r.stdout); + assert(v === 'short', `expected 'short', got ${JSON.stringify(v)}`); +}); + c('viewport sets the size', async () => { const r = await run(['viewport', '1024x768', '--json']); const v = parseJson<{ width: number; height: number }>(r.stdout); @@ -880,6 +985,27 @@ c('fill writes into a resolved input', async () => { assert(val === 'ghax-fill-value', `fill should produce ghax-fill-value, got ${JSON.stringify(val)}`); }); +c('upload sets a file onto a file input', async () => { + const uploadPath = `/tmp/ghax-smoke-upload-${Date.now()}.txt`; + fs.writeFileSync(uploadPath, 'smoke upload payload\n'); + try { + await run(['goto', 'data:text/html,']); + await run(['wait', '300']); + await run(['upload', '#up', uploadPath]); + const r = await run(['eval', 'document.getElementById("up").files[0].name']); + const name = r.stdout.trim().replace(/^"|"$/g, ''); + assert(name === path.basename(uploadPath), `upload name mismatch: ${name}`); + } finally { + fs.unlinkSync(uploadPath); + } +}); + +c('upload rejects missing args', async () => { + const r = await run(['upload', '#only-one-arg'], { allowFailure: true }); + assert(r.exitCode !== 0, 'upload should fail when path missing'); + assert(/Usage: ghax upload/.test(r.stderr + r.stdout), `expected usage hint, got: ${r.stderr || r.stdout}`); +}); + c('pair status prints tunnel instructions while attached', async () => { const r = await run(['pair']); assert(/pair/i.test(r.stdout), `pair output missing header: ${r.stdout.slice(0, 120)}`);