From 9d8ac8b8b4648cfeb7c15d0e7aac5e20e0942632 Mon Sep 17 00:00:00 2001 From: Doan Thieu Date: Wed, 8 Apr 2026 16:33:51 +0700 Subject: [PATCH 1/5] [648] Add `tophat-build-install` agent skill --- skills/tophat-build-install/SKILL.md | 95 +++++++++++ .../references/public-contract.md | 74 +++++++++ skills/tophat-build-install/scripts/gha | 153 ++++++++++++++++++ .../scripts/install_with_tophat.py | 56 +++++++ .../scripts/make_recipe.py | 130 +++++++++++++++ 5 files changed, 508 insertions(+) create mode 100644 skills/tophat-build-install/SKILL.md create mode 100644 skills/tophat-build-install/references/public-contract.md create mode 100755 skills/tophat-build-install/scripts/gha create mode 100644 skills/tophat-build-install/scripts/install_with_tophat.py create mode 100755 skills/tophat-build-install/scripts/make_recipe.py diff --git a/skills/tophat-build-install/SKILL.md b/skills/tophat-build-install/SKILL.md new file mode 100644 index 00000000..613e07c8 --- /dev/null +++ b/skills/tophat-build-install/SKILL.md @@ -0,0 +1,95 @@ +--- +name: tophat-build-install +description: Find, build, and install mobile artifacts for the current repository through GitHub Actions and Tophat. Use when an agent or model needs to help developers, PMs, or reviewers test in-progress work without stashing local changes, switching branches, building locally, or waiting for a manually shared QA build. Handle requests like "install the latest build from main", "install PR #123 on my simulator", or "build this branch and install it". Infer the current GitHub repository from `git remote -v` or `git remote get-url origin` by default. This skill is GitHub Actions only and requires `gh` CLI for build lookup and workflow dispatch. +--- + +# Tophat Build Install + +## Overview + +Use this skill to turn a user request into a concrete GitHub Actions artifact install through Tophat for the current repository. The main use cases are: + +- developers testing another branch, PR, or commit without stashing work or switching branches +- PMs or reviewers testing pre-merge work early +- reusing an existing artifact when the target commit was already built + +This skill supports GitHub Actions artifacts only. Require `gh` for GitHub lookup and workflow dispatch. Do not switch to GitHub MCP, direct REST workarounds, or other artifact providers. + +## Gather Inputs + +Collect only the missing inputs. + +- Platform: `ios` or `android`. +- Repository: derive owner and repo from the current checkout first. Prefer `git remote get-url origin`, then fall back to `git remote -v`. Ask only if there is no usable GitHub remote. +- Source selector: branch, PR number, workflow run ID, or explicit artifact ID. +- Workflow: workflow file or workflow name when a new build must be triggered. +- Artifact choice: artifact name when a run publishes multiple artifacts. +- Destination: `simulator` or `device` when the user specifies it or when the build output differs by target. +- Launch arguments: only if the app needs them. + +If the user says "latest build", prefer the latest successful existing run for the requested branch instead of forcing a rebuild. If the user says "build", "rebuild", or "trigger", dispatch a new workflow run first. + +If the user already supplies an explicit `artifact_id`, you may skip build lookup, but still keep the workflow within the GitHub Actions artifact path. + +For requests in the current repository, do not ask for owner or repo unless Git remotes are missing or point somewhere non-GitHub. + +Prefer defaulting to the current repository over asking broad clarifying questions. + +## Validate Prerequisites + +Before installing, confirm: + +- `gh` is authenticated +- Tophat is installed and running +- Tophat exposes the `gha` provider +- the helper scripts in `scripts/` are available from the skill directory + +Use `/Applications/Tophat.app/Contents/MacOS/tophatctl` as the Tophat CLI path. Require Tophat to be installed and running. Require a GitHub personal access token to already be configured in Tophat's GitHub Actions extension settings. Require the GitHub Actions provider ID `gha` to be present in `tophatctl list providers`. Here, `gha` is Tophat's provider ID for the GitHub Actions extension. + +If `gh` is not authenticated, the GitHub PAT is not configured in Tophat, or `gha` is not installed, stop and tell the user what is missing. Do not fall back to GitHub MCP, raw REST calls, or another artifact provider. + +## Default Behavior + +Use this decision order: + +1. Infer `owner/repo` from the current checkout. +2. Determine the target ref or artifact from the user request. +3. If the user gave an explicit artifact ID, install it directly. +4. Otherwise, look for an existing matching artifact before triggering a new build. +5. Trigger a workflow only when no suitable artifact already exists or when the user explicitly asks for a rebuild. + +This keeps the skill useful for both developer and PM flows, while minimizing unnecessary builds. + +## Resolve The Build + +Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen ref. + +If a workflow must be triggered, add workflow inputs when required. + +If multiple artifacts exist, match by platform or ask the user which artifact to install. + +When the repository follows an artifact naming convention tied to commit SHA, prefer an existing artifact for the target commit before triggering a new workflow. + +## Create The Tophat Recipe + +Create a temporary recipe in `tmp/` with `scripts/make_recipe.py`. The provider ID must be `gha`, and the recipe must include: + +- `owner` +- `repo` +- `artifact_id` + +When `owner` and `repo` are omitted, infer them from the current Git checkout. + +Do not use another provider from this skill. + +## Install + +Install through `scripts/install_with_tophat.py` using the temporary recipe. After `tophatctl` returns a result, remove the temporary recipe unless the user explicitly wants to keep it for debugging. + +When the install succeeds, report the branch or PR, workflow run, artifact ID, platform, and destination that were used. + +`scripts/install_with_tophat.py` should treat the known `tophatctl` timeout as "install may still be in progress in Tophat" instead of a definitive failure. + +## Reference Notes + +Read `references/public-contract.md` when you need the public Tophat schema, the GitHub Actions provider details, or command examples. diff --git a/skills/tophat-build-install/references/public-contract.md b/skills/tophat-build-install/references/public-contract.md new file mode 100644 index 00000000..6e8333a1 --- /dev/null +++ b/skills/tophat-build-install/references/public-contract.md @@ -0,0 +1,74 @@ +# Public Contract + +This skill is anchored to the public `Shopify/tophat` repo, not a private internal fork. + +## Public `tophatctl` Recipe Schema + +When specifying the path to a JSON configuration file for `tophatctl install`, use a JSON array of recipes. + +General form: + +```json +[ + { + "artifactProviderID": "", + "artifactProviderParameters": {}, + "launchArguments": [], + "platformHint": "ios", + "destinationHint": "simulator" + } +] +``` + +To target a specific device by name and runtime version: + +```json +[ + { + "artifactProviderID": "", + "artifactProviderParameters": {}, + "launchArguments": [], + "device": { + "name": "iPhone 16 Pro", + "platform": "ios", + "runtimeVersion": "18.2" + } + } +] +``` + +Relevant public fields: + +- `artifactProviderID` +- `artifactProviderParameters` +- `launchArguments` +- `platformHint` +- `destinationHint` +- `device.name` +- `device.platform` +- `device.runtimeVersion` + +## Public GitHub Actions Provider + +The checked-out public repo includes `TophatGitHubActionsExtension`. + +- Provider ID: `gha` +- Provider title: `GitHub Actions` +- Required parameters: + - `owner` + - `repo` + - `artifact_id` + +The public implementation downloads an artifact archive from: + +```text +GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip +``` + +The extension requires a GitHub personal access token in Tophat settings. + +For this skill, owner and repo should normally come from the current checkout via `git remote get-url origin` or `git remote -v`. Only ask the user for repository details when the local checkout does not expose a usable GitHub remote. + +This skill is intentionally limited to GitHub Actions artifacts through `gh` and Tophat's `gha` provider. Do not add fallback behavior to GitHub MCP, direct REST calls, or other providers. + +Use `/Applications/Tophat.app/Contents/MacOS/tophatctl` as the CLI path for this skill. diff --git a/skills/tophat-build-install/scripts/gha b/skills/tophat-build-install/scripts/gha new file mode 100755 index 00000000..ae9d8c6a --- /dev/null +++ b/skills/tophat-build-install/scripts/gha @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 + +import argparse +import json +import re +import subprocess +import sys + + +def parse_github_owner_repo(remote_url: str) -> tuple[str, str] | None: + remote_url = remote_url.strip() + patterns = ( + r"^git@github\.com:(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", + r"^https://github\.com/(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", + r"^ssh://git@github\.com/(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", + ) + for pattern in patterns: + match = re.match(pattern, remote_url) + if match: + return match.group("owner"), match.group("repo") + return None + + +def infer_owner_repo_from_git() -> tuple[str, str] | None: + preferred_remotes = ("origin", "upstream") + checked = set() + + for remote in preferred_remotes: + checked.add(remote) + result = subprocess.run( + ["git", "remote", "get-url", remote], + check=False, + capture_output=True, + text=True, + ) + if result.returncode == 0: + parsed = parse_github_owner_repo(result.stdout) + if parsed: + return parsed + + result = subprocess.run( + ["git", "remote", "-v"], + check=False, + capture_output=True, + text=True, + ) + if result.returncode != 0: + return None + + for line in result.stdout.splitlines(): + parts = line.split() + if len(parts) < 2: + continue + remote_name = parts[0] + remote_url = parts[1] + if remote_name in checked: + continue + parsed = parse_github_owner_repo(remote_url) + if parsed: + return parsed + + return None + + +def run_gh_json(args: list[str]) -> dict: + result = subprocess.run( + ["gh", *args], + check=False, + capture_output=True, + text=True, + ) + if result.returncode != 0: + message = result.stderr.strip() or result.stdout.strip() or "gh command failed" + raise SystemExit(message) + try: + return json.loads(result.stdout) + except json.JSONDecodeError as exc: + raise SystemExit(f"Failed to parse gh JSON output: {exc}") from exc + + +def list_artifacts(repo: str, ref: str, platform: str) -> int: + payload = run_gh_json(["api", f"repos/{repo}/actions/artifacts"]) + artifacts = payload.get("artifacts", []) + + for artifact in artifacts: + if artifact.get("expired"): + continue + + workflow_run = artifact.get("workflow_run") or {} + head_branch = workflow_run.get("head_branch") or "" + head_sha = workflow_run.get("head_sha") or "" + name = artifact.get("name") or "" + + if ref and head_branch != ref: + continue + + if platform and platform.lower() not in name.lower(): + continue + + short_sha = head_sha[:7] + created_at = artifact.get("created_at") or "" + print( + "\t".join( + [ + str(artifact.get("id", "")), + name, + head_branch, + short_sha, + created_at, + ] + ) + ) + + return 0 + + +def main() -> int: + parser = argparse.ArgumentParser( + description="GitHub Actions helpers for the tophat-build-install skill." + ) + subparsers = parser.add_subparsers(dest="command") + + list_parser = subparsers.add_parser( + "list-artifacts", + help="List non-expired GitHub Actions artifacts for the current repo.", + ) + list_parser.add_argument("--repo", help="Repository in owner/repo format.") + list_parser.add_argument("--ref", default="", help="Branch name to match.") + list_parser.add_argument( + "--platform", + default="", + choices=("ios", "android", ""), + help="Platform substring to match in artifact names.", + ) + + args = parser.parse_args() + + if args.command == "list-artifacts": + repo = args.repo + if not repo: + inferred = infer_owner_repo_from_git() + if not inferred: + raise SystemExit("Could not infer GitHub repository from git remotes.") + owner, repository = inferred + repo = f"{owner}/{repository}" + return list_artifacts(repo=repo, ref=args.ref, platform=args.platform) + + parser.print_help(sys.stderr) + return 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/skills/tophat-build-install/scripts/install_with_tophat.py b/skills/tophat-build-install/scripts/install_with_tophat.py new file mode 100644 index 00000000..371feae3 --- /dev/null +++ b/skills/tophat-build-install/scripts/install_with_tophat.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 + +import argparse +import subprocess +import sys +from pathlib import Path + + +TOPHATCTL_PATH = "/Applications/Tophat.app/Contents/MacOS/tophatctl" +TIMEOUT_MESSAGE = "Error: The operation timed out." + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Install a Tophat recipe file and treat the known CLI timeout as in-progress." + ) + parser.add_argument("recipe", help="Path to a Tophat recipe JSON file.") + args = parser.parse_args() + + recipe_path = Path(args.recipe).resolve() + if not recipe_path.exists(): + parser.error(f"Recipe file not found: {recipe_path}") + + result = subprocess.run( + [TOPHATCTL_PATH, "install", str(recipe_path)], + check=False, + capture_output=True, + text=True, + ) + + stdout = result.stdout.strip() + stderr = result.stderr.strip() + combined = "\n".join(part for part in (stdout, stderr) if part) + + if result.returncode == 0: + if combined: + print(combined) + return 0 + + if TIMEOUT_MESSAGE in combined: + if stdout: + print(stdout) + print( + "Tophat CLI timed out while waiting for completion. " + "The install request was sent and installation may still be in progress in the Tophat app.", + file=sys.stderr, + ) + return 0 + + if combined: + print(combined, file=sys.stderr) + return result.returncode + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/skills/tophat-build-install/scripts/make_recipe.py b/skills/tophat-build-install/scripts/make_recipe.py new file mode 100755 index 00000000..0ce51624 --- /dev/null +++ b/skills/tophat-build-install/scripts/make_recipe.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 + +import argparse +import importlib.util +import json +import sys +from importlib.machinery import SourceFileLoader +from pathlib import Path + + +def load_gha_module(): + gha_path = Path(__file__).with_name("gha") + loader = SourceFileLoader("gha_helpers", str(gha_path)) + spec = importlib.util.spec_from_loader("gha_helpers", loader) + if spec is None or spec.loader is None: + raise RuntimeError(f"Could not load helpers from {gha_path}") + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +GHA = load_gha_module() + + +def parse_key_value(item: str) -> tuple[str, str]: + if "=" not in item: + raise argparse.ArgumentTypeError(f"expected key=value, got: {item}") + key, value = item.split("=", 1) + key = key.strip() + value = value.strip() + if not key: + raise argparse.ArgumentTypeError(f"missing key in: {item}") + return key, value + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Create a public tophatctl recipe JSON file." + ) + parser.add_argument("--provider", default="gha", help="Tophat artifact provider ID.") + parser.add_argument("--owner", help="GitHub owner for the public gha provider.") + parser.add_argument("--repo", help="GitHub repository for the public gha provider.") + parser.add_argument( + "--artifact-id", + help="GitHub Actions artifact ID for the public gha provider.", + ) + parser.add_argument( + "--platform", + required=True, + choices=("ios", "android"), + help="Recipe platformHint.", + ) + parser.add_argument( + "--destination", + choices=("simulator", "device"), + help="Recipe destinationHint.", + ) + parser.add_argument( + "--launch-arg", + action="append", + default=[], + help="Launch argument to pass through to the app. Repeat as needed.", + ) + parser.add_argument( + "--param", + action="append", + default=[], + type=parse_key_value, + help="Additional artifactProviderParameters as key=value. Repeat as needed.", + ) + parser.add_argument( + "--output", + help="Write JSON to this path instead of stdout.", + ) + args = parser.parse_args() + + provider_parameters = dict(args.param) + + if args.provider == "gha": + if not args.owner or not args.repo: + inferred = GHA.infer_owner_repo_from_git() + if inferred: + inferred_owner, inferred_repo = inferred + args.owner = args.owner or inferred_owner + args.repo = args.repo or inferred_repo + + missing = [ + name + for name, value in ( + ("owner", args.owner), + ("repo", args.repo), + ("artifact-id", args.artifact_id), + ) + if not value + ] + if missing: + parser.error( + "provider 'gha' requires --owner, --repo, and --artifact-id; " + f"missing: {', '.join(missing)}" + ) + provider_parameters = { + "owner": args.owner, + "repo": args.repo, + "artifact_id": args.artifact_id, + **provider_parameters, + } + + recipe = [ + { + "artifactProviderID": args.provider, + "artifactProviderParameters": provider_parameters, + "launchArguments": args.launch_arg, + "platformHint": args.platform, + "destinationHint": args.destination, + } + ] + + output = json.dumps(recipe, indent=2) + "\n" + + if args.output: + path = Path(args.output) + path.write_text(output) + else: + sys.stdout.write(output) + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) From 0ad93fd90f35b15234ae65c71327e4596c18f976 Mon Sep 17 00:00:00 2001 From: Doan Thieu Date: Thu, 9 Apr 2026 10:12:05 +0700 Subject: [PATCH 2/5] [648] Refine scripts --- skills/tophat-build-install/SKILL.md | 2 +- .../scripts/{gha => gha.py} | 0 .../scripts/make_recipe.py | 18 ++---------------- 3 files changed, 3 insertions(+), 17 deletions(-) rename skills/tophat-build-install/scripts/{gha => gha.py} (100%) mode change 100755 => 100644 diff --git a/skills/tophat-build-install/SKILL.md b/skills/tophat-build-install/SKILL.md index 613e07c8..68f0966a 100644 --- a/skills/tophat-build-install/SKILL.md +++ b/skills/tophat-build-install/SKILL.md @@ -62,7 +62,7 @@ This keeps the skill useful for both developer and PM flows, while minimizing un ## Resolve The Build -Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen ref. +Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha.py list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen ref. If a workflow must be triggered, add workflow inputs when required. diff --git a/skills/tophat-build-install/scripts/gha b/skills/tophat-build-install/scripts/gha.py old mode 100755 new mode 100644 similarity index 100% rename from skills/tophat-build-install/scripts/gha rename to skills/tophat-build-install/scripts/gha.py diff --git a/skills/tophat-build-install/scripts/make_recipe.py b/skills/tophat-build-install/scripts/make_recipe.py index 0ce51624..6f6feff5 100755 --- a/skills/tophat-build-install/scripts/make_recipe.py +++ b/skills/tophat-build-install/scripts/make_recipe.py @@ -1,25 +1,11 @@ #!/usr/bin/env python3 import argparse -import importlib.util import json import sys -from importlib.machinery import SourceFileLoader from pathlib import Path - -def load_gha_module(): - gha_path = Path(__file__).with_name("gha") - loader = SourceFileLoader("gha_helpers", str(gha_path)) - spec = importlib.util.spec_from_loader("gha_helpers", loader) - if spec is None or spec.loader is None: - raise RuntimeError(f"Could not load helpers from {gha_path}") - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) - return module - - -GHA = load_gha_module() +from gha import infer_owner_repo_from_git def parse_key_value(item: str) -> tuple[str, str]: @@ -78,7 +64,7 @@ def main() -> int: if args.provider == "gha": if not args.owner or not args.repo: - inferred = GHA.infer_owner_repo_from_git() + inferred = infer_owner_repo_from_git() if inferred: inferred_owner, inferred_repo = inferred args.owner = args.owner or inferred_owner From 5966c4dee582025ce9f22e72d382092305953f77 Mon Sep 17 00:00:00 2001 From: Doan Thieu Date: Thu, 9 Apr 2026 17:07:15 +0700 Subject: [PATCH 3/5] [648] Improve tophat artifact resolution --- skills/tophat-build-install/SKILL.md | 56 ++++- skills/tophat-build-install/scripts/gha.py | 198 +++++++++++++++++- .../scripts/install_artifact.py | 181 ++++++++++++++++ .../scripts/install_with_tophat.py | 81 +++++-- .../scripts/make_recipe.py | 129 ++++++++---- 5 files changed, 573 insertions(+), 72 deletions(-) create mode 100644 skills/tophat-build-install/scripts/install_artifact.py diff --git a/skills/tophat-build-install/SKILL.md b/skills/tophat-build-install/SKILL.md index 68f0966a..9ba20fc9 100644 --- a/skills/tophat-build-install/SKILL.md +++ b/skills/tophat-build-install/SKILL.md @@ -35,6 +35,8 @@ For requests in the current repository, do not ask for owner or repo unless Git Prefer defaulting to the current repository over asking broad clarifying questions. +For device requests, ask for `simulator` vs `device` early if the user did not specify it, because CI signing availability materially changes whether the install can succeed. + ## Validate Prerequisites Before installing, confirm: @@ -44,10 +46,14 @@ Before installing, confirm: - Tophat exposes the `gha` provider - the helper scripts in `scripts/` are available from the skill directory +For `destination=device`, also confirm the repository already has a CI workflow that produces a signed device-installable artifact. + Use `/Applications/Tophat.app/Contents/MacOS/tophatctl` as the Tophat CLI path. Require Tophat to be installed and running. Require a GitHub personal access token to already be configured in Tophat's GitHub Actions extension settings. Require the GitHub Actions provider ID `gha` to be present in `tophatctl list providers`. Here, `gha` is Tophat's provider ID for the GitHub Actions extension. If `gh` is not authenticated, the GitHub PAT is not configured in Tophat, or `gha` is not installed, stop and tell the user what is missing. Do not fall back to GitHub MCP, raw REST calls, or another artifact provider. +For device installs, do not assume local Xcode automatic signing has any effect on CI. CI must already have access to signing assets and the workflow must already be able to produce a signed device build. + ## Default Behavior Use this decision order: @@ -62,7 +68,45 @@ This keeps the skill useful for both developer and PM flows, while minimizing un ## Resolve The Build -Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha.py list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen ref. +Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha.py list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen selector. + +When using `gh run list --json`, only request fields that `gh` actually supports in the current CLI. For workflow runs, prefer the stable set: + +- `databaseId` +- `headBranch` +- `headSha` +- `name` +- `status` +- `conclusion` +- `createdAt` +- `updatedAt` +- `url` +- `workflowDatabaseId` + +Do not assume REST field names work in `gh run list --json`. For example, `id`, `head_branch`, and `head_sha` are not valid there. + +Do not assume `headRefOid` exists in `gh pr view --json`. When you need a PR head SHA, prefer `gh api repos///pulls/` and read `head.ref` plus `head.sha`. + +Do not request `artifacts` from `gh run view --json`. If you need artifacts for a workflow run, use `scripts/gha.py list-artifacts --run-id ` or call the artifact API shape directly instead of relying on `gh run view`. + +Prefer this fallback order when resolving an artifact: + +1. If the user gave an explicit artifact ID, install it directly. +2. If the user gave a workflow run ID, list artifacts and match `artifact.workflow_run.id`. +3. If the user gave a PR number, resolve the PR head branch and head SHA, then list artifacts for that head. +4. If the user gave a branch or commit SHA, list artifacts and match branch and-or SHA. +5. Only inspect workflow runs with `gh run list` when you need workflow metadata to decide whether to trigger a new build. +6. Trigger a workflow only when no matching non-expired artifact exists or the user explicitly asked for a rebuild. + +`scripts/gha.py list-artifacts` supports these selectors: + +- `--ref ` +- `--sha ` +- `--pr ` +- `--run-id ` +- `--platform ios|android` + +The helper must be treated as the authoritative artifact fallback because it uses the repository artifact API shape directly. It should paginate through repository artifacts instead of assuming the first page contains the desired result. If a workflow must be triggered, add workflow inputs when required. @@ -70,9 +114,11 @@ If multiple artifacts exist, match by platform or ask the user which artifact to When the repository follows an artifact naming convention tied to commit SHA, prefer an existing artifact for the target commit before triggering a new workflow. +For `destination=device`, only use workflows that are already configured to sign for device installation. Do not require any specific signing toolchain from this skill. Do not use this skill to create certificates, provisioning profiles, or signing configuration from scratch during a normal install request. + ## Create The Tophat Recipe -Create a temporary recipe in `tmp/` with `scripts/make_recipe.py`. The provider ID must be `gha`, and the recipe must include: +Create a temporary recipe in `tmp/` with the filename convention `tophat-recipe-.json`. Use `scripts/make_recipe.py` directly only when you need the raw JSON helper. Prefer `scripts/install_artifact.py` for the full caller flow so recipe naming, cleanup, and user-facing status stay consistent. The provider ID must be `gha`, and the recipe must include: - `owner` - `repo` @@ -84,12 +130,14 @@ Do not use another provider from this skill. ## Install -Install through `scripts/install_with_tophat.py` using the temporary recipe. After `tophatctl` returns a result, remove the temporary recipe unless the user explicitly wants to keep it for debugging. +Prefer `scripts/install_artifact.py` for the install flow. It should create `tmp/tophat-recipe-.json`, call `scripts/install_with_tophat.py`, print the user-facing status, and remove the temporary recipe unless the user explicitly wants to keep it for debugging. -When the install succeeds, report the branch or PR, workflow run, artifact ID, platform, and destination that were used. +On a normal successful install, print one concise green success line that includes the source, artifact ID, platform, and destination when present. On the known timeout case, do not claim the build is fully installed; report that installation may still be in progress in Tophat instead. `scripts/install_with_tophat.py` should treat the known `tophatctl` timeout as "install may still be in progress in Tophat" instead of a definitive failure. +If the user asks for a device install but the repository has no known signed-device CI path, stop and explain that a signed device artifact must exist first. Recommend `simulator` as the fallback destination when appropriate. + ## Reference Notes Read `references/public-contract.md` when you need the public Tophat schema, the GitHub Actions provider details, or command examples. diff --git a/skills/tophat-build-install/scripts/gha.py b/skills/tophat-build-install/scripts/gha.py index ae9d8c6a..b14fd5a0 100644 --- a/skills/tophat-build-install/scripts/gha.py +++ b/skills/tophat-build-install/scripts/gha.py @@ -5,6 +5,7 @@ import re import subprocess import sys +from typing import Any def parse_github_owner_repo(remote_url: str) -> tuple[str, str] | None: @@ -62,7 +63,7 @@ def infer_owner_repo_from_git() -> tuple[str, str] | None: return None -def run_gh_json(args: list[str]) -> dict: +def run_gh_json(args: list[str]) -> Any: result = subprocess.run( ["gh", *args], check=False, @@ -78,25 +79,190 @@ def run_gh_json(args: list[str]) -> dict: raise SystemExit(f"Failed to parse gh JSON output: {exc}") from exc -def list_artifacts(repo: str, ref: str, platform: str) -> int: - payload = run_gh_json(["api", f"repos/{repo}/actions/artifacts"]) +def run_gh_json_with_repo_hint(args: list[str], repo: str) -> Any: + try: + return run_gh_json(args) + except SystemExit as exc: + message = str(exc) + if "Not Found (HTTP 404)" not in message: + raise + raise SystemExit( + "GitHub artifact lookup returned 404 for " + f"{repo}. Check that the repository exists, your gh auth can access it, " + "and GitHub Actions artifacts are available for that repository." + ) from exc + + +def normalize_sha(value: str) -> str: + return value.strip().lower() + + +def sha_matches(head_sha: str, requested_sha: str) -> bool: + if not requested_sha: + return True + normalized_head = normalize_sha(head_sha) + normalized_requested = normalize_sha(requested_sha) + return normalized_head.startswith(normalized_requested) + + +def workflow_run_matches_ref(workflow_run: dict[str, Any], ref: str, sha: str) -> bool: + head_branch = workflow_run.get("head_branch") or workflow_run.get("headBranch") or "" + head_sha = workflow_run.get("head_sha") or workflow_run.get("headSha") or "" + + if ref and head_branch != ref: + return False + if not sha_matches(head_sha, sha): + return False + return True + + +def list_repo_artifacts(repo: str) -> list[dict[str, Any]]: + artifacts: list[dict[str, Any]] = [] + page = 1 + + while True: + payload = run_gh_json_with_repo_hint( + [ + "api", + f"repos/{repo}/actions/artifacts", + "-F", + "per_page=100", + "-F", + f"page={page}", + ], + repo=repo, + ) + page_artifacts = payload.get("artifacts", []) + if not page_artifacts: + break + artifacts.extend(page_artifacts) + if len(page_artifacts) < 100: + break + page += 1 + + return artifacts + + +def list_runs(repo: str, ref: str) -> list[dict[str, Any]]: + args = [ + "run", + "list", + "--repo", + repo, + "--limit", + "100", + "--json", + "databaseId,headBranch,headSha,name,status,conclusion,createdAt,updatedAt,url", + ] + if ref: + args.extend(["--branch", ref]) + payload = run_gh_json(args) + if not isinstance(payload, list): + raise SystemExit("Failed to parse gh run list output.") + return payload + + +def list_run_artifacts(repo: str, run_id: int) -> list[dict[str, Any]]: + payload = run_gh_json_with_repo_hint( + ["api", f"repos/{repo}/actions/runs/{run_id}/artifacts"], + repo=repo, + ) artifacts = payload.get("artifacts", []) + for artifact in artifacts: + workflow_run = artifact.get("workflow_run") or {} + if not workflow_run: + artifact["workflow_run"] = {"id": run_id} + return artifacts + + +def list_matching_run_artifacts(repo: str, ref: str, sha: str) -> list[dict[str, Any]]: + runs = list_runs(repo, ref=ref) + matching_runs = [ + run + for run in runs + if workflow_run_matches_ref(run, ref=ref, sha=sha) + ] + + artifacts: list[dict[str, Any]] = [] + for run in matching_runs: + run_id = run.get("databaseId") + if not run_id: + continue + run_artifacts = list_run_artifacts(repo, int(run_id)) + for artifact in run_artifacts: + workflow_run = artifact.get("workflow_run") or {} + if "head_branch" not in workflow_run and run.get("headBranch"): + workflow_run["head_branch"] = run.get("headBranch") + if "head_sha" not in workflow_run and run.get("headSha"): + workflow_run["head_sha"] = run.get("headSha") + artifact["workflow_run"] = workflow_run + artifacts.extend(run_artifacts) + + return artifacts + + +def resolve_pr_ref(repo: str, pr: int) -> tuple[str, str]: + payload = run_gh_json(["api", f"repos/{repo}/pulls/{pr}"]) + head = payload.get("head") or {} + head_ref = head.get("ref") or "" + head_sha = head.get("sha") or "" + if not head_ref or not head_sha: + raise SystemExit(f"Could not resolve PR #{pr} to a head ref and SHA.") + return head_ref, head_sha + + +def list_artifacts( + repo: str, + ref: str, + sha: str, + pr: int | None, + run_id: int | None, + platform: str, +) -> int: + if pr is not None: + pr_ref, pr_sha = resolve_pr_ref(repo, pr) + ref = ref or pr_ref + sha = sha or pr_sha + + if run_id is not None: + artifacts = list_run_artifacts(repo, run_id) + elif ref or sha: + artifacts = list_matching_run_artifacts(repo, ref=ref, sha=sha) + else: + artifacts = list_repo_artifacts(repo) + filtered_artifacts: list[dict[str, Any]] = [] for artifact in artifacts: if artifact.get("expired"): continue workflow_run = artifact.get("workflow_run") or {} + workflow_run_id = workflow_run.get("id") head_branch = workflow_run.get("head_branch") or "" head_sha = workflow_run.get("head_sha") or "" name = artifact.get("name") or "" - if ref and head_branch != ref: + if run_id is not None and workflow_run_id != run_id: + continue + + if not workflow_run_matches_ref(workflow_run, ref=ref, sha=sha): continue if platform and platform.lower() not in name.lower(): continue + filtered_artifacts.append(artifact) + + filtered_artifacts.sort( + key=lambda artifact: artifact.get("created_at") or "", + reverse=True, + ) + + for artifact in filtered_artifacts: + workflow_run = artifact.get("workflow_run") or {} + name = artifact.get("name") or "" + head_branch = workflow_run.get("head_branch") or "" + head_sha = workflow_run.get("head_sha") or "" short_sha = head_sha[:7] created_at = artifact.get("created_at") or "" print( @@ -126,6 +292,21 @@ def main() -> int: ) list_parser.add_argument("--repo", help="Repository in owner/repo format.") list_parser.add_argument("--ref", default="", help="Branch name to match.") + list_parser.add_argument( + "--sha", + default="", + help="Commit SHA or prefix to match against the artifact workflow run.", + ) + list_parser.add_argument( + "--pr", + type=int, + help="PR number to resolve to its current head branch and SHA.", + ) + list_parser.add_argument( + "--run-id", + type=int, + help="Workflow run ID to match against artifact.workflow_run.id.", + ) list_parser.add_argument( "--platform", default="", @@ -143,7 +324,14 @@ def main() -> int: raise SystemExit("Could not infer GitHub repository from git remotes.") owner, repository = inferred repo = f"{owner}/{repository}" - return list_artifacts(repo=repo, ref=args.ref, platform=args.platform) + return list_artifacts( + repo=repo, + ref=args.ref, + sha=args.sha, + pr=args.pr, + run_id=args.run_id, + platform=args.platform, + ) parser.print_help(sys.stderr) return 1 diff --git a/skills/tophat-build-install/scripts/install_artifact.py b/skills/tophat-build-install/scripts/install_artifact.py new file mode 100644 index 00000000..c93a93eb --- /dev/null +++ b/skills/tophat-build-install/scripts/install_artifact.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 + +import argparse +import sys +from pathlib import Path + +from install_with_tophat import ( + STATUS_INSTALLED, + STATUS_PENDING, + install_recipe, +) +from make_recipe import build_provider_parameters, build_recipe, parse_key_value, write_recipe + + +GREEN = "\033[32m" +YELLOW = "\033[33m" +RESET = "\033[0m" + + +def colorize(message: str, color: str) -> str: + return f"{color}{message}{RESET}" + + +def format_success_message( + source: str, + artifact_id: str, + platform: str, + destination: str | None, +) -> str: + parts = [ + f"Installed build successfully: {source}", + f"artifact {artifact_id}", + platform, + ] + if destination: + parts.append(destination) + return " · ".join(parts) + + +def format_pending_message( + source: str, + artifact_id: str, + platform: str, + destination: str | None, +) -> str: + parts = [ + f"Install is still in progress in Tophat: {source}", + f"artifact {artifact_id}", + platform, + ] + if destination: + parts.append(destination) + return " · ".join(parts) + + +def recipe_path_for_artifact(tmp_dir: Path, artifact_id: str) -> Path: + return tmp_dir / f"tophat-recipe-{artifact_id}.json" + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Create a Tophat recipe, install it, and print user-facing status." + ) + parser.add_argument("--provider", default="gha", help="Tophat artifact provider ID.") + parser.add_argument("--owner", help="GitHub owner for the public gha provider.") + parser.add_argument("--repo", help="GitHub repository for the public gha provider.") + parser.add_argument( + "--artifact-id", + required=True, + help="GitHub Actions artifact ID for the public gha provider.", + ) + parser.add_argument( + "--platform", + required=True, + choices=("ios", "android"), + help="Recipe platformHint.", + ) + parser.add_argument( + "--destination", + choices=("simulator", "device"), + help="Recipe destinationHint.", + ) + parser.add_argument( + "--source", + required=True, + help="Short source label used in the user-facing status message.", + ) + parser.add_argument( + "--launch-arg", + action="append", + default=[], + help="Launch argument to pass through to the app. Repeat as needed.", + ) + parser.add_argument( + "--param", + action="append", + default=[], + type=parse_key_value, + help="Additional artifactProviderParameters as key=value. Repeat as needed.", + ) + parser.add_argument( + "--tmp-dir", + default="tmp", + help="Directory for the temporary recipe file.", + ) + parser.add_argument( + "--keep-recipe", + action="store_true", + help="Keep the generated recipe file for debugging.", + ) + args = parser.parse_args() + + try: + provider_parameters = build_provider_parameters( + provider=args.provider, + owner=args.owner, + repo=args.repo, + artifact_id=args.artifact_id, + extra_parameters=dict(args.param), + ) + except ValueError as exc: + parser.error(str(exc)) + + recipe = build_recipe( + provider=args.provider, + platform=args.platform, + destination=args.destination, + provider_parameters=provider_parameters, + launch_arguments=args.launch_arg, + ) + + tmp_dir = Path(args.tmp_dir) + tmp_dir.mkdir(parents=True, exist_ok=True) + recipe_path = recipe_path_for_artifact(tmp_dir, args.artifact_id) + write_recipe(recipe, recipe_path) + + try: + result = install_recipe(recipe_path) + finally: + if not args.keep_recipe and recipe_path.exists(): + recipe_path.unlink() + + if result.stdout: + print(result.stdout) + if result.status == STATUS_INSTALLED: + print( + colorize( + format_success_message( + source=args.source, + artifact_id=args.artifact_id, + platform=args.platform, + destination=args.destination, + ), + GREEN, + ) + ) + return 0 + + if result.status == STATUS_PENDING: + if result.stderr: + print(result.stderr, file=sys.stderr) + print( + colorize( + format_pending_message( + source=args.source, + artifact_id=args.artifact_id, + platform=args.platform, + destination=args.destination, + ), + YELLOW, + ) + ) + return 0 + + if result.stderr: + print(result.stderr, file=sys.stderr) + return result.returncode + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/skills/tophat-build-install/scripts/install_with_tophat.py b/skills/tophat-build-install/scripts/install_with_tophat.py index 371feae3..463846e9 100644 --- a/skills/tophat-build-install/scripts/install_with_tophat.py +++ b/skills/tophat-build-install/scripts/install_with_tophat.py @@ -3,24 +3,30 @@ import argparse import subprocess import sys +from dataclasses import dataclass from pathlib import Path TOPHATCTL_PATH = "/Applications/Tophat.app/Contents/MacOS/tophatctl" TIMEOUT_MESSAGE = "Error: The operation timed out." +STATUS_INSTALLED = "installed" +STATUS_PENDING = "pending" +STATUS_FAILED = "failed" -def main() -> int: - parser = argparse.ArgumentParser( - description="Install a Tophat recipe file and treat the known CLI timeout as in-progress." - ) - parser.add_argument("recipe", help="Path to a Tophat recipe JSON file.") - args = parser.parse_args() +@dataclass +class InstallResult: + status: str + returncode: int + stdout: str + stderr: str + + @property + def combined_output(self) -> str: + return "\n".join(part for part in (self.stdout, self.stderr) if part) - recipe_path = Path(args.recipe).resolve() - if not recipe_path.exists(): - parser.error(f"Recipe file not found: {recipe_path}") +def install_recipe(recipe_path: Path) -> InstallResult: result = subprocess.run( [TOPHATCTL_PATH, "install", str(recipe_path)], check=False, @@ -33,22 +39,59 @@ def main() -> int: combined = "\n".join(part for part in (stdout, stderr) if part) if result.returncode == 0: - if combined: - print(combined) - return 0 + return InstallResult( + status=STATUS_INSTALLED, + returncode=0, + stdout=stdout, + stderr=stderr, + ) if TIMEOUT_MESSAGE in combined: - if stdout: - print(stdout) - print( + pending_stderr = ( "Tophat CLI timed out while waiting for completion. " - "The install request was sent and installation may still be in progress in the Tophat app.", - file=sys.stderr, + "The install request was sent and installation may still be in progress in the Tophat app." ) + return InstallResult( + status=STATUS_PENDING, + returncode=0, + stdout=stdout, + stderr=pending_stderr, + ) + + return InstallResult( + status=STATUS_FAILED, + returncode=result.returncode, + stdout=stdout, + stderr=stderr, + ) + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Install a Tophat recipe file and treat the known CLI timeout as in-progress." + ) + parser.add_argument("recipe", help="Path to a Tophat recipe JSON file.") + args = parser.parse_args() + + recipe_path = Path(args.recipe).resolve() + if not recipe_path.exists(): + parser.error(f"Recipe file not found: {recipe_path}") + + result = install_recipe(recipe_path) + + if result.status == STATUS_INSTALLED: + if result.combined_output: + print(result.combined_output) + return 0 + + if result.status == STATUS_PENDING: + if result.stdout: + print(result.stdout) + print(result.stderr, file=sys.stderr) return 0 - if combined: - print(combined, file=sys.stderr) + if result.combined_output: + print(result.combined_output, file=sys.stderr) return result.returncode diff --git a/skills/tophat-build-install/scripts/make_recipe.py b/skills/tophat-build-install/scripts/make_recipe.py index 6f6feff5..cea8d26f 100755 --- a/skills/tophat-build-install/scripts/make_recipe.py +++ b/skills/tophat-build-install/scripts/make_recipe.py @@ -4,6 +4,7 @@ import json import sys from pathlib import Path +from typing import Any from gha import infer_owner_repo_from_git @@ -19,6 +20,71 @@ def parse_key_value(item: str) -> tuple[str, str]: return key, value +def build_provider_parameters( + provider: str, + owner: str | None, + repo: str | None, + artifact_id: str | None, + extra_parameters: dict[str, str] | None = None, +) -> dict[str, str]: + provider_parameters = dict(extra_parameters or {}) + + if provider != "gha": + return provider_parameters + + if not owner or not repo: + inferred = infer_owner_repo_from_git() + if inferred: + inferred_owner, inferred_repo = inferred + owner = owner or inferred_owner + repo = repo or inferred_repo + + missing = [ + name + for name, value in ( + ("owner", owner), + ("repo", repo), + ("artifact-id", artifact_id), + ) + if not value + ] + if missing: + missing_list = ", ".join(missing) + raise ValueError( + f"provider 'gha' requires --owner, --repo, and --artifact-id; missing: {missing_list}" + ) + + return { + "owner": owner, + "repo": repo, + "artifact_id": artifact_id, + **provider_parameters, + } + + +def build_recipe( + provider: str, + platform: str, + destination: str | None, + provider_parameters: dict[str, str], + launch_arguments: list[str] | None = None, +) -> list[dict[str, Any]]: + return [ + { + "artifactProviderID": provider, + "artifactProviderParameters": provider_parameters, + "launchArguments": list(launch_arguments or []), + "platformHint": platform, + "destinationHint": destination, + } + ] + + +def write_recipe(recipe: list[dict[str, Any]], output_path: Path) -> None: + output = json.dumps(recipe, indent=2) + "\n" + output_path.write_text(output) + + def main() -> int: parser = argparse.ArgumentParser( description="Create a public tophatctl recipe JSON file." @@ -60,54 +126,29 @@ def main() -> int: ) args = parser.parse_args() - provider_parameters = dict(args.param) - - if args.provider == "gha": - if not args.owner or not args.repo: - inferred = infer_owner_repo_from_git() - if inferred: - inferred_owner, inferred_repo = inferred - args.owner = args.owner or inferred_owner - args.repo = args.repo or inferred_repo - - missing = [ - name - for name, value in ( - ("owner", args.owner), - ("repo", args.repo), - ("artifact-id", args.artifact_id), - ) - if not value - ] - if missing: - parser.error( - "provider 'gha' requires --owner, --repo, and --artifact-id; " - f"missing: {', '.join(missing)}" - ) - provider_parameters = { - "owner": args.owner, - "repo": args.repo, - "artifact_id": args.artifact_id, - **provider_parameters, - } - - recipe = [ - { - "artifactProviderID": args.provider, - "artifactProviderParameters": provider_parameters, - "launchArguments": args.launch_arg, - "platformHint": args.platform, - "destinationHint": args.destination, - } - ] + try: + provider_parameters = build_provider_parameters( + provider=args.provider, + owner=args.owner, + repo=args.repo, + artifact_id=args.artifact_id, + extra_parameters=dict(args.param), + ) + except ValueError as exc: + parser.error(str(exc)) - output = json.dumps(recipe, indent=2) + "\n" + recipe = build_recipe( + provider=args.provider, + platform=args.platform, + destination=args.destination, + provider_parameters=provider_parameters, + launch_arguments=args.launch_arg, + ) if args.output: - path = Path(args.output) - path.write_text(output) + write_recipe(recipe, Path(args.output)) else: - sys.stdout.write(output) + sys.stdout.write(json.dumps(recipe, indent=2) + "\n") return 0 From 8f078f19c124c62a2a2bea0d29293e7f991ad048 Mon Sep 17 00:00:00 2001 From: Doan Thieu Date: Fri, 10 Apr 2026 10:50:33 +0700 Subject: [PATCH 4/5] [648] Improve tophat repo canonicalization --- skills/tophat-build-install/SKILL.md | 19 +++++++++++----- skills/tophat-build-install/scripts/gha.py | 22 +++++++++++++++++-- .../scripts/install_artifact.py | 8 +++++++ 3 files changed, 41 insertions(+), 8 deletions(-) diff --git a/skills/tophat-build-install/SKILL.md b/skills/tophat-build-install/SKILL.md index 9ba20fc9..f21b08f3 100644 --- a/skills/tophat-build-install/SKILL.md +++ b/skills/tophat-build-install/SKILL.md @@ -1,6 +1,6 @@ --- name: tophat-build-install -description: Find, build, and install mobile artifacts for the current repository through GitHub Actions and Tophat. Use when an agent or model needs to help developers, PMs, or reviewers test in-progress work without stashing local changes, switching branches, building locally, or waiting for a manually shared QA build. Handle requests like "install the latest build from main", "install PR #123 on my simulator", or "build this branch and install it". Infer the current GitHub repository from `git remote -v` or `git remote get-url origin` by default. This skill is GitHub Actions only and requires `gh` CLI for build lookup and workflow dispatch. +description: Find, build, and install mobile artifacts for the current repository through GitHub Actions and Tophat. Use when an agent or model needs to help developers, PMs, or reviewers test in-progress work without stashing local changes, switching branches, building locally, or waiting for a manually shared QA build. Handle requests like "install the latest build from main", "install PR #123 on my simulator", or "build this branch and install it". Infer the current GitHub repository from `git remote -v` or `git remote get-url origin` by default, then canonicalize it with `gh repo view` before artifact lookups. This skill is GitHub Actions only and requires `gh` CLI for build lookup and workflow dispatch. --- # Tophat Build Install @@ -20,7 +20,7 @@ This skill supports GitHub Actions artifacts only. Require `gh` for GitHub looku Collect only the missing inputs. - Platform: `ios` or `android`. -- Repository: derive owner and repo from the current checkout first. Prefer `git remote get-url origin`, then fall back to `git remote -v`. Ask only if there is no usable GitHub remote. +- Repository: derive owner and repo from the current checkout first. Prefer `git remote get-url origin`, then fall back to `git remote -v`. Canonicalize the inferred repo with `gh repo view / --json nameWithOwner` before using any Actions API path. Ask only if there is no usable GitHub remote. - Source selector: branch, PR number, workflow run ID, or explicit artifact ID. - Workflow: workflow file or workflow name when a new build must be triggered. - Artifact choice: artifact name when a run publishes multiple artifacts. @@ -59,10 +59,11 @@ For device installs, do not assume local Xcode automatic signing has any effect Use this decision order: 1. Infer `owner/repo` from the current checkout. -2. Determine the target ref or artifact from the user request. -3. If the user gave an explicit artifact ID, install it directly. -4. Otherwise, look for an existing matching artifact before triggering a new build. -5. Trigger a workflow only when no suitable artifact already exists or when the user explicitly asks for a rebuild. +2. Canonicalize that repository with `gh repo view` and use `nameWithOwner` for all later GitHub API calls. +3. Determine the target ref or artifact from the user request. +4. If the user gave an explicit artifact ID, install it directly. +5. Otherwise, look for an existing matching artifact before triggering a new build. +6. Trigger a workflow only when no suitable artifact already exists or when the user explicitly asks for a rebuild. This keeps the skill useful for both developer and PM flows, while minimizing unnecessary builds. @@ -70,6 +71,8 @@ This keeps the skill useful for both developer and PM flows, while minimizing un Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha.py list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen selector. +Before calling any Actions artifact endpoint, canonicalize the repository with `gh repo view --json nameWithOwner,url`. A stale Git remote can still resolve to a different canonical repository on GitHub, and the Actions API must use that canonical `nameWithOwner`. + When using `gh run list --json`, only request fields that `gh` actually supports in the current CLI. For workflow runs, prefer the stable set: - `databaseId` @@ -108,6 +111,10 @@ Prefer this fallback order when resolving an artifact: The helper must be treated as the authoritative artifact fallback because it uses the repository artifact API shape directly. It should paginate through repository artifacts instead of assuming the first page contains the desired result. +If an artifact endpoint returns `404`, do not stop immediately. First confirm whether `gh repo view` resolves the repository to a different `nameWithOwner`, then retry the artifact lookup with that canonical repository. If the canonical repository still returns `404`, explain whether the repository exists but exposes no accessible artifact API, or whether the original repo path itself was stale. + +When recent successful runs exist but each run's artifact endpoint returns zero artifacts, say that explicitly. That is a different outcome from "artifact lookup failed" and usually means the workflow distributed the build elsewhere or did not upload an Actions artifact. + If a workflow must be triggered, add workflow inputs when required. If multiple artifacts exist, match by platform or ask the user which artifact to install. diff --git a/skills/tophat-build-install/scripts/gha.py b/skills/tophat-build-install/scripts/gha.py index b14fd5a0..c6c464f2 100644 --- a/skills/tophat-build-install/scripts/gha.py +++ b/skills/tophat-build-install/scripts/gha.py @@ -88,11 +88,25 @@ def run_gh_json_with_repo_hint(args: list[str], repo: str) -> Any: raise raise SystemExit( "GitHub artifact lookup returned 404 for " - f"{repo}. Check that the repository exists, your gh auth can access it, " - "and GitHub Actions artifacts are available for that repository." + f"{repo}. The repository exists if `gh repo view {repo}` succeeds, " + "so this usually means the artifact API path is unavailable, not accessible " + "to your token, or the original repo path was stale before canonicalization." ) from exc +def canonicalize_repo(repo: str) -> tuple[str, bool]: + try: + payload = run_gh_json(["repo", "view", repo, "--json", "nameWithOwner"]) + except SystemExit as exc: + raise SystemExit( + f"GitHub repository lookup failed for {repo}. " + "Check that the repository exists and your gh auth can access it." + ) from exc + + canonical_repo = payload.get("nameWithOwner") or repo + return canonical_repo, canonical_repo != repo + + def normalize_sha(value: str) -> str: return value.strip().lower() @@ -219,6 +233,10 @@ def list_artifacts( run_id: int | None, platform: str, ) -> int: + repo, repo_was_canonicalized = canonicalize_repo(repo) + if repo_was_canonicalized: + print(f"Using canonical GitHub repository: {repo}", file=sys.stderr) + if pr is not None: pr_ref, pr_sha = resolve_pr_ref(repo, pr) ref = ref or pr_ref diff --git a/skills/tophat-build-install/scripts/install_artifact.py b/skills/tophat-build-install/scripts/install_artifact.py index c93a93eb..2ad2aab5 100644 --- a/skills/tophat-build-install/scripts/install_artifact.py +++ b/skills/tophat-build-install/scripts/install_artifact.py @@ -26,6 +26,7 @@ def format_success_message( artifact_id: str, platform: str, destination: str | None, + repo: str | None, ) -> str: parts = [ f"Installed build successfully: {source}", @@ -34,6 +35,8 @@ def format_success_message( ] if destination: parts.append(destination) + if repo: + parts.append(repo) return " · ".join(parts) @@ -42,6 +45,7 @@ def format_pending_message( artifact_id: str, platform: str, destination: str | None, + repo: str | None, ) -> str: parts = [ f"Install is still in progress in Tophat: {source}", @@ -50,6 +54,8 @@ def format_pending_message( ] if destination: parts.append(destination) + if repo: + parts.append(repo) return " · ".join(parts) @@ -150,6 +156,7 @@ def main() -> int: artifact_id=args.artifact_id, platform=args.platform, destination=args.destination, + repo=args.repo, ), GREEN, ) @@ -166,6 +173,7 @@ def main() -> int: artifact_id=args.artifact_id, platform=args.platform, destination=args.destination, + repo=args.repo, ), YELLOW, ) From 2f877d835ed23485293ac5b6443370f5215cbd0c Mon Sep 17 00:00:00 2001 From: Doan Thieu Date: Fri, 10 Apr 2026 11:29:57 +0700 Subject: [PATCH 5/5] [648] Refine skill and scripts --- skills/tophat-build-install/SKILL.md | 125 ++------ skills/tophat-build-install/scripts/gha.py | 297 +++++++++--------- .../scripts/install_artifact.py | 35 +-- .../scripts/install_with_tophat.py | 13 +- 4 files changed, 194 insertions(+), 276 deletions(-) diff --git a/skills/tophat-build-install/SKILL.md b/skills/tophat-build-install/SKILL.md index f21b08f3..008086c1 100644 --- a/skills/tophat-build-install/SKILL.md +++ b/skills/tophat-build-install/SKILL.md @@ -1,105 +1,56 @@ --- name: tophat-build-install -description: Find, build, and install mobile artifacts for the current repository through GitHub Actions and Tophat. Use when an agent or model needs to help developers, PMs, or reviewers test in-progress work without stashing local changes, switching branches, building locally, or waiting for a manually shared QA build. Handle requests like "install the latest build from main", "install PR #123 on my simulator", or "build this branch and install it". Infer the current GitHub repository from `git remote -v` or `git remote get-url origin` by default, then canonicalize it with `gh repo view` before artifact lookups. This skill is GitHub Actions only and requires `gh` CLI for build lookup and workflow dispatch. +description: Find, build, and install mobile artifacts for the current repository through GitHub Actions and Tophat. Use it for requests like "install the latest build from main", "install PR #123 on my simulator", or "build this branch and install it". Default to the current GitHub repository, use `gh` for lookup and workflow dispatch, and use the local helper scripts for artifact listing and Tophat install. --- # Tophat Build Install -## Overview +Use this skill to turn a user request into a concrete GitHub Actions artifact install through Tophat. -Use this skill to turn a user request into a concrete GitHub Actions artifact install through Tophat for the current repository. The main use cases are: +This skill is GitHub Actions only. Require: -- developers testing another branch, PR, or commit without stashing work or switching branches -- PMs or reviewers testing pre-merge work early -- reusing an existing artifact when the target commit was already built +- `gh` +- Tophat at `/Applications/Tophat.app/Contents/MacOS/tophatctl` with the `gha` provider installed +- a repository that already publishes GitHub Actions artifacts -This skill supports GitHub Actions artifacts only. Require `gh` for GitHub lookup and workflow dispatch. Do not switch to GitHub MCP, direct REST workarounds, or other artifact providers. +Do not switch to another artifact provider from this skill. -## Gather Inputs +## Inputs Collect only the missing inputs. - Platform: `ios` or `android`. -- Repository: derive owner and repo from the current checkout first. Prefer `git remote get-url origin`, then fall back to `git remote -v`. Canonicalize the inferred repo with `gh repo view / --json nameWithOwner` before using any Actions API path. Ask only if there is no usable GitHub remote. +- Repository: derive it from the current checkout first. Ask only if there is no usable GitHub remote. - Source selector: branch, PR number, workflow run ID, or explicit artifact ID. -- Workflow: workflow file or workflow name when a new build must be triggered. -- Artifact choice: artifact name when a run publishes multiple artifacts. -- Destination: `simulator` or `device` when the user specifies it or when the build output differs by target. +- Workflow: only when a new build must be triggered. +- Destination: `simulator` or `device` when it matters. - Launch arguments: only if the app needs them. If the user says "latest build", prefer the latest successful existing run for the requested branch instead of forcing a rebuild. If the user says "build", "rebuild", or "trigger", dispatch a new workflow run first. -If the user already supplies an explicit `artifact_id`, you may skip build lookup, but still keep the workflow within the GitHub Actions artifact path. +## Default Flow -For requests in the current repository, do not ask for owner or repo unless Git remotes are missing or point somewhere non-GitHub. +Use this order: -Prefer defaulting to the current repository over asking broad clarifying questions. +1. Infer the current repository. +2. Resolve the target from the user request. +3. If the user gave an explicit artifact ID, install it directly. +4. Otherwise, look for an existing artifact first. +5. Trigger a workflow only when no suitable artifact exists or the user explicitly asked for a rebuild. -For device requests, ask for `simulator` vs `device` early if the user did not specify it, because CI signing availability materially changes whether the install can succeed. - -## Validate Prerequisites - -Before installing, confirm: - -- `gh` is authenticated -- Tophat is installed and running -- Tophat exposes the `gha` provider -- the helper scripts in `scripts/` are available from the skill directory - -For `destination=device`, also confirm the repository already has a CI workflow that produces a signed device-installable artifact. - -Use `/Applications/Tophat.app/Contents/MacOS/tophatctl` as the Tophat CLI path. Require Tophat to be installed and running. Require a GitHub personal access token to already be configured in Tophat's GitHub Actions extension settings. Require the GitHub Actions provider ID `gha` to be present in `tophatctl list providers`. Here, `gha` is Tophat's provider ID for the GitHub Actions extension. - -If `gh` is not authenticated, the GitHub PAT is not configured in Tophat, or `gha` is not installed, stop and tell the user what is missing. Do not fall back to GitHub MCP, raw REST calls, or another artifact provider. - -For device installs, do not assume local Xcode automatic signing has any effect on CI. CI must already have access to signing assets and the workflow must already be able to produce a signed device build. - -## Default Behavior - -Use this decision order: - -1. Infer `owner/repo` from the current checkout. -2. Canonicalize that repository with `gh repo view` and use `nameWithOwner` for all later GitHub API calls. -3. Determine the target ref or artifact from the user request. -4. If the user gave an explicit artifact ID, install it directly. -5. Otherwise, look for an existing matching artifact before triggering a new build. -6. Trigger a workflow only when no suitable artifact already exists or when the user explicitly asks for a rebuild. - -This keeps the skill useful for both developer and PM flows, while minimizing unnecessary builds. - -## Resolve The Build +## Build Resolution Resolve the user's target into a concrete artifact. Use `gh` when you need to inspect PRs, workflows, runs, or artifacts. Use `scripts/gha.py list-artifacts` when you want the local helper to list non-expired artifacts for the current repo or a chosen selector. -Before calling any Actions artifact endpoint, canonicalize the repository with `gh repo view --json nameWithOwner,url`. A stale Git remote can still resolve to a different canonical repository on GitHub, and the Actions API must use that canonical `nameWithOwner`. - -When using `gh run list --json`, only request fields that `gh` actually supports in the current CLI. For workflow runs, prefer the stable set: - -- `databaseId` -- `headBranch` -- `headSha` -- `name` -- `status` -- `conclusion` -- `createdAt` -- `updatedAt` -- `url` -- `workflowDatabaseId` +The helper prefers the repository-wide artifacts endpoint, but if GitHub returns `404` there it should fall back to recent workflow runs instead of stopping immediately. -Do not assume REST field names work in `gh run list --json`. For example, `id`, `head_branch`, and `head_sha` are not valid there. - -Do not assume `headRefOid` exists in `gh pr view --json`. When you need a PR head SHA, prefer `gh api repos///pulls/` and read `head.ref` plus `head.sha`. - -Do not request `artifacts` from `gh run view --json`. If you need artifacts for a workflow run, use `scripts/gha.py list-artifacts --run-id ` or call the artifact API shape directly instead of relying on `gh run view`. - -Prefer this fallback order when resolving an artifact: +Prefer this order: 1. If the user gave an explicit artifact ID, install it directly. 2. If the user gave a workflow run ID, list artifacts and match `artifact.workflow_run.id`. 3. If the user gave a PR number, resolve the PR head branch and head SHA, then list artifacts for that head. 4. If the user gave a branch or commit SHA, list artifacts and match branch and-or SHA. -5. Only inspect workflow runs with `gh run list` when you need workflow metadata to decide whether to trigger a new build. -6. Trigger a workflow only when no matching non-expired artifact exists or the user explicitly asked for a rebuild. +5. Trigger a workflow only when no matching non-expired artifact exists or the user explicitly asked for a rebuild. `scripts/gha.py list-artifacts` supports these selectors: @@ -109,41 +60,27 @@ Prefer this fallback order when resolving an artifact: - `--run-id ` - `--platform ios|android` -The helper must be treated as the authoritative artifact fallback because it uses the repository artifact API shape directly. It should paginate through repository artifacts instead of assuming the first page contains the desired result. - -If an artifact endpoint returns `404`, do not stop immediately. First confirm whether `gh repo view` resolves the repository to a different `nameWithOwner`, then retry the artifact lookup with that canonical repository. If the canonical repository still returns `404`, explain whether the repository exists but exposes no accessible artifact API, or whether the original repo path itself was stale. - -When recent successful runs exist but each run's artifact endpoint returns zero artifacts, say that explicitly. That is a different outcome from "artifact lookup failed" and usually means the workflow distributed the build elsewhere or did not upload an Actions artifact. - -If a workflow must be triggered, add workflow inputs when required. - If multiple artifacts exist, match by platform or ask the user which artifact to install. -When the repository follows an artifact naming convention tied to commit SHA, prefer an existing artifact for the target commit before triggering a new workflow. +## Install + +Prefer `scripts/install_artifact.py` for the normal install flow. It creates a temporary recipe, calls Tophat, prints a short status line, and removes the recipe unless `--keep-recipe` is set. -For `destination=device`, only use workflows that are already configured to sign for device installation. Do not require any specific signing toolchain from this skill. Do not use this skill to create certificates, provisioning profiles, or signing configuration from scratch during a normal install request. +Use `/Applications/Tophat.app/Contents/MacOS/tophatctl` as the Tophat CLI path. Keep that path explicit when reporting missing local tooling so agents do not guess. -## Create The Tophat Recipe +Use `scripts/make_recipe.py` directly only when you need the raw recipe JSON. -Create a temporary recipe in `tmp/` with the filename convention `tophat-recipe-.json`. Use `scripts/make_recipe.py` directly only when you need the raw JSON helper. Prefer `scripts/install_artifact.py` for the full caller flow so recipe naming, cleanup, and user-facing status stay consistent. The provider ID must be `gha`, and the recipe must include: +For `gha`, the recipe must include: - `owner` - `repo` - `artifact_id` -When `owner` and `repo` are omitted, infer them from the current Git checkout. - -Do not use another provider from this skill. - -## Install - -Prefer `scripts/install_artifact.py` for the install flow. It should create `tmp/tophat-recipe-.json`, call `scripts/install_with_tophat.py`, print the user-facing status, and remove the temporary recipe unless the user explicitly wants to keep it for debugging. - -On a normal successful install, print one concise green success line that includes the source, artifact ID, platform, and destination when present. On the known timeout case, do not claim the build is fully installed; report that installation may still be in progress in Tophat instead. +If `owner` and `repo` are omitted, infer them from Git. -`scripts/install_with_tophat.py` should treat the known `tophatctl` timeout as "install may still be in progress in Tophat" instead of a definitive failure. +Treat the known `tophatctl` timeout as "install may still be in progress in Tophat", not as a hard failure. -If the user asks for a device install but the repository has no known signed-device CI path, stop and explain that a signed device artifact must exist first. Recommend `simulator` as the fallback destination when appropriate. +For `destination=device`, use only workflows that already produce signed device builds. If that path does not exist, stop and explain it. ## Reference Notes diff --git a/skills/tophat-build-install/scripts/gha.py b/skills/tophat-build-install/scripts/gha.py index c6c464f2..e3450216 100644 --- a/skills/tophat-build-install/scripts/gha.py +++ b/skills/tophat-build-install/scripts/gha.py @@ -8,14 +8,16 @@ from typing import Any +GITHUB_REMOTE_PATTERNS = ( + r"^git@github\.com:(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", + r"^https://github\.com/(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", + r"^ssh://git@github\.com/(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", +) + + def parse_github_owner_repo(remote_url: str) -> tuple[str, str] | None: remote_url = remote_url.strip() - patterns = ( - r"^git@github\.com:(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", - r"^https://github\.com/(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", - r"^ssh://git@github\.com/(?P[^/]+)/(?P[^/]+?)(?:\.git)?$", - ) - for pattern in patterns: + for pattern in GITHUB_REMOTE_PATTERNS: match = re.match(pattern, remote_url) if match: return match.group("owner"), match.group("repo") @@ -23,11 +25,7 @@ def parse_github_owner_repo(remote_url: str) -> tuple[str, str] | None: def infer_owner_repo_from_git() -> tuple[str, str] | None: - preferred_remotes = ("origin", "upstream") - checked = set() - - for remote in preferred_remotes: - checked.add(remote) + for remote in ("origin", "upstream"): result = subprocess.run( ["git", "remote", "get-url", remote], check=False, @@ -52,11 +50,7 @@ def infer_owner_repo_from_git() -> tuple[str, str] | None: parts = line.split() if len(parts) < 2: continue - remote_name = parts[0] - remote_url = parts[1] - if remote_name in checked: - continue - parsed = parse_github_owner_repo(remote_url) + parsed = parse_github_owner_repo(parts[1]) if parsed: return parsed @@ -79,55 +73,34 @@ def run_gh_json(args: list[str]) -> Any: raise SystemExit(f"Failed to parse gh JSON output: {exc}") from exc -def run_gh_json_with_repo_hint(args: list[str], repo: str) -> Any: - try: - return run_gh_json(args) - except SystemExit as exc: - message = str(exc) - if "Not Found (HTTP 404)" not in message: - raise - raise SystemExit( - "GitHub artifact lookup returned 404 for " - f"{repo}. The repository exists if `gh repo view {repo}` succeeds, " - "so this usually means the artifact API path is unavailable, not accessible " - "to your token, or the original repo path was stale before canonicalization." - ) from exc - +def is_not_found_error(message: str) -> bool: + return "Not Found (HTTP 404)" in message -def canonicalize_repo(repo: str) -> tuple[str, bool]: - try: - payload = run_gh_json(["repo", "view", repo, "--json", "nameWithOwner"]) - except SystemExit as exc: - raise SystemExit( - f"GitHub repository lookup failed for {repo}. " - "Check that the repository exists and your gh auth can access it." - ) from exc +def canonicalize_repo(repo: str) -> str: + payload = run_gh_json(["repo", "view", repo, "--json", "nameWithOwner"]) canonical_repo = payload.get("nameWithOwner") or repo - return canonical_repo, canonical_repo != repo + if canonical_repo != repo: + print(f"Using canonical GitHub repository: {canonical_repo}", file=sys.stderr) + return canonical_repo def normalize_sha(value: str) -> str: return value.strip().lower() -def sha_matches(head_sha: str, requested_sha: str) -> bool: +def sha_matches(actual_sha: str, requested_sha: str) -> bool: if not requested_sha: return True - normalized_head = normalize_sha(head_sha) - normalized_requested = normalize_sha(requested_sha) - return normalized_head.startswith(normalized_requested) + return normalize_sha(actual_sha).startswith(normalize_sha(requested_sha)) def workflow_run_matches_ref(workflow_run: dict[str, Any], ref: str, sha: str) -> bool: head_branch = workflow_run.get("head_branch") or workflow_run.get("headBranch") or "" head_sha = workflow_run.get("head_sha") or workflow_run.get("headSha") or "" - if ref and head_branch != ref: return False - if not sha_matches(head_sha, sha): - return False - return True + return sha_matches(head_sha, sha) def list_repo_artifacts(repo: str) -> list[dict[str, Any]]: @@ -135,27 +108,34 @@ def list_repo_artifacts(repo: str) -> list[dict[str, Any]]: page = 1 while True: - payload = run_gh_json_with_repo_hint( - [ - "api", - f"repos/{repo}/actions/artifacts", - "-F", - "per_page=100", - "-F", - f"page={page}", - ], - repo=repo, - ) + try: + payload = run_gh_json( + [ + "api", + f"repos/{repo}/actions/artifacts", + "-F", + "per_page=100", + "-F", + f"page={page}", + ] + ) + except SystemExit as exc: + if page == 1 and is_not_found_error(str(exc)): + print( + "GitHub repository artifact API returned 404. " + "Falling back to recent workflow runs.", + file=sys.stderr, + ) + return list_all_run_artifacts(repo) + raise page_artifacts = payload.get("artifacts", []) if not page_artifacts: - break + return artifacts artifacts.extend(page_artifacts) if len(page_artifacts) < 100: - break + return artifacts page += 1 - return artifacts - def list_runs(repo: str, ref: str) -> list[dict[str, Any]]: args = [ @@ -166,7 +146,7 @@ def list_runs(repo: str, ref: str) -> list[dict[str, Any]]: "--limit", "100", "--json", - "databaseId,headBranch,headSha,name,status,conclusion,createdAt,updatedAt,url", + "databaseId,headBranch,headSha", ] if ref: args.extend(["--branch", ref]) @@ -177,10 +157,7 @@ def list_runs(repo: str, ref: str) -> list[dict[str, Any]]: def list_run_artifacts(repo: str, run_id: int) -> list[dict[str, Any]]: - payload = run_gh_json_with_repo_hint( - ["api", f"repos/{repo}/actions/runs/{run_id}/artifacts"], - repo=repo, - ) + payload = run_gh_json(["api", f"repos/{repo}/actions/runs/{run_id}/artifacts"]) artifacts = payload.get("artifacts", []) for artifact in artifacts: workflow_run = artifact.get("workflow_run") or {} @@ -190,27 +167,44 @@ def list_run_artifacts(repo: str, run_id: int) -> list[dict[str, Any]]: def list_matching_run_artifacts(repo: str, ref: str, sha: str) -> list[dict[str, Any]]: - runs = list_runs(repo, ref=ref) - matching_runs = [ - run - for run in runs - if workflow_run_matches_ref(run, ref=ref, sha=sha) - ] + artifacts: list[dict[str, Any]] = [] + + for run in list_runs(repo, ref=ref): + if not workflow_run_matches_ref(run, ref=ref, sha=sha): + continue + + run_id = run.get("databaseId") + if not run_id: + continue + + for artifact in list_run_artifacts(repo, int(run_id)): + workflow_run = artifact.get("workflow_run") or {} + if "head_branch" not in workflow_run and run.get("headBranch"): + workflow_run["head_branch"] = run.get("headBranch") + if "head_sha" not in workflow_run and run.get("headSha"): + workflow_run["head_sha"] = run.get("headSha") + artifact["workflow_run"] = workflow_run + artifacts.append(artifact) + return artifacts + + +def list_all_run_artifacts(repo: str) -> list[dict[str, Any]]: artifacts: list[dict[str, Any]] = [] - for run in matching_runs: + + for run in list_runs(repo, ref=""): run_id = run.get("databaseId") if not run_id: continue - run_artifacts = list_run_artifacts(repo, int(run_id)) - for artifact in run_artifacts: + + for artifact in list_run_artifacts(repo, int(run_id)): workflow_run = artifact.get("workflow_run") or {} if "head_branch" not in workflow_run and run.get("headBranch"): workflow_run["head_branch"] = run.get("headBranch") if "head_sha" not in workflow_run and run.get("headSha"): workflow_run["head_sha"] = run.get("headSha") artifact["workflow_run"] = workflow_run - artifacts.extend(run_artifacts) + artifacts.append(artifact) return artifacts @@ -225,6 +219,46 @@ def resolve_pr_ref(repo: str, pr: int) -> tuple[str, str]: return head_ref, head_sha +def artifact_matches( + artifact: dict[str, Any], + ref: str, + sha: str, + run_id: int | None, + platform: str, +) -> bool: + if artifact.get("expired"): + return False + + workflow_run = artifact.get("workflow_run") or {} + if run_id is not None and workflow_run.get("id") != run_id: + return False + if not workflow_run_matches_ref(workflow_run, ref=ref, sha=sha): + return False + if platform and platform.lower() not in (artifact.get("name") or "").lower(): + return False + return True + + +def print_artifacts(artifacts: list[dict[str, Any]]) -> None: + for artifact in sorted( + artifacts, + key=lambda item: item.get("created_at") or "", + reverse=True, + ): + workflow_run = artifact.get("workflow_run") or {} + print( + "\t".join( + [ + str(artifact.get("id", "")), + artifact.get("name") or "", + workflow_run.get("head_branch") or "", + (workflow_run.get("head_sha") or "")[:7], + artifact.get("created_at") or "", + ] + ) + ) + + def list_artifacts( repo: str, ref: str, @@ -233,9 +267,7 @@ def list_artifacts( run_id: int | None, platform: str, ) -> int: - repo, repo_was_canonicalized = canonicalize_repo(repo) - if repo_was_canonicalized: - print(f"Using canonical GitHub repository: {repo}", file=sys.stderr) + repo = canonicalize_repo(repo) if pr is not None: pr_ref, pr_sha = resolve_pr_ref(repo, pr) @@ -248,53 +280,20 @@ def list_artifacts( artifacts = list_matching_run_artifacts(repo, ref=ref, sha=sha) else: artifacts = list_repo_artifacts(repo) - filtered_artifacts: list[dict[str, Any]] = [] - - for artifact in artifacts: - if artifact.get("expired"): - continue - - workflow_run = artifact.get("workflow_run") or {} - workflow_run_id = workflow_run.get("id") - head_branch = workflow_run.get("head_branch") or "" - head_sha = workflow_run.get("head_sha") or "" - name = artifact.get("name") or "" - - if run_id is not None and workflow_run_id != run_id: - continue - - if not workflow_run_matches_ref(workflow_run, ref=ref, sha=sha): - continue - - if platform and platform.lower() not in name.lower(): - continue - - filtered_artifacts.append(artifact) - - filtered_artifacts.sort( - key=lambda artifact: artifact.get("created_at") or "", - reverse=True, - ) - for artifact in filtered_artifacts: - workflow_run = artifact.get("workflow_run") or {} - name = artifact.get("name") or "" - head_branch = workflow_run.get("head_branch") or "" - head_sha = workflow_run.get("head_sha") or "" - short_sha = head_sha[:7] - created_at = artifact.get("created_at") or "" - print( - "\t".join( - [ - str(artifact.get("id", "")), - name, - head_branch, - short_sha, - created_at, - ] + print_artifacts( + [ + artifact + for artifact in artifacts + if artifact_matches( + artifact, + ref=ref, + sha=sha, + run_id=run_id, + platform=platform, ) - ) - + ] + ) return 0 @@ -315,16 +314,8 @@ def main() -> int: default="", help="Commit SHA or prefix to match against the artifact workflow run.", ) - list_parser.add_argument( - "--pr", - type=int, - help="PR number to resolve to its current head branch and SHA.", - ) - list_parser.add_argument( - "--run-id", - type=int, - help="Workflow run ID to match against artifact.workflow_run.id.", - ) + list_parser.add_argument("--pr", type=int, help="PR number to resolve.") + list_parser.add_argument("--run-id", type=int, help="Workflow run database ID.") list_parser.add_argument( "--platform", default="", @@ -334,25 +325,25 @@ def main() -> int: args = parser.parse_args() - if args.command == "list-artifacts": - repo = args.repo - if not repo: - inferred = infer_owner_repo_from_git() - if not inferred: - raise SystemExit("Could not infer GitHub repository from git remotes.") - owner, repository = inferred - repo = f"{owner}/{repository}" - return list_artifacts( - repo=repo, - ref=args.ref, - sha=args.sha, - pr=args.pr, - run_id=args.run_id, - platform=args.platform, - ) + if args.command != "list-artifacts": + parser.print_help(sys.stderr) + return 1 - parser.print_help(sys.stderr) - return 1 + repo = args.repo + if not repo: + inferred = infer_owner_repo_from_git() + if not inferred: + raise SystemExit("Could not infer GitHub repository from git remotes.") + repo = "/".join(inferred) + + return list_artifacts( + repo=repo, + ref=args.ref, + sha=args.sha, + pr=args.pr, + run_id=args.run_id, + platform=args.platform, + ) if __name__ == "__main__": diff --git a/skills/tophat-build-install/scripts/install_artifact.py b/skills/tophat-build-install/scripts/install_artifact.py index 2ad2aab5..7111458f 100644 --- a/skills/tophat-build-install/scripts/install_artifact.py +++ b/skills/tophat-build-install/scripts/install_artifact.py @@ -21,41 +21,20 @@ def colorize(message: str, color: str) -> str: return f"{color}{message}{RESET}" -def format_success_message( +def format_status_message( + prefix: str, source: str, artifact_id: str, platform: str, destination: str | None, - repo: str | None, ) -> str: parts = [ - f"Installed build successfully: {source}", + f"{prefix}: {source}", f"artifact {artifact_id}", platform, ] if destination: parts.append(destination) - if repo: - parts.append(repo) - return " · ".join(parts) - - -def format_pending_message( - source: str, - artifact_id: str, - platform: str, - destination: str | None, - repo: str | None, -) -> str: - parts = [ - f"Install is still in progress in Tophat: {source}", - f"artifact {artifact_id}", - platform, - ] - if destination: - parts.append(destination) - if repo: - parts.append(repo) return " · ".join(parts) @@ -151,12 +130,12 @@ def main() -> int: if result.status == STATUS_INSTALLED: print( colorize( - format_success_message( + format_status_message( + prefix="Installed build successfully", source=args.source, artifact_id=args.artifact_id, platform=args.platform, destination=args.destination, - repo=args.repo, ), GREEN, ) @@ -168,12 +147,12 @@ def main() -> int: print(result.stderr, file=sys.stderr) print( colorize( - format_pending_message( + format_status_message( + prefix="Install is still in progress in Tophat", source=args.source, artifact_id=args.artifact_id, platform=args.platform, destination=args.destination, - repo=args.repo, ), YELLOW, ) diff --git a/skills/tophat-build-install/scripts/install_with_tophat.py b/skills/tophat-build-install/scripts/install_with_tophat.py index 463846e9..79c261d3 100644 --- a/skills/tophat-build-install/scripts/install_with_tophat.py +++ b/skills/tophat-build-install/scripts/install_with_tophat.py @@ -27,6 +27,14 @@ def combined_output(self) -> str: def install_recipe(recipe_path: Path) -> InstallResult: + if not Path(TOPHATCTL_PATH).exists(): + return InstallResult( + status=STATUS_FAILED, + returncode=1, + stdout="", + stderr=f"Tophat CLI not found at {TOPHATCTL_PATH}", + ) + result = subprocess.run( [TOPHATCTL_PATH, "install", str(recipe_path)], check=False, @@ -68,7 +76,10 @@ def install_recipe(recipe_path: Path) -> InstallResult: def main() -> int: parser = argparse.ArgumentParser( - description="Install a Tophat recipe file and treat the known CLI timeout as in-progress." + description=( + "Install a Tophat recipe file with " + f"{TOPHATCTL_PATH} and treat the known CLI timeout as in-progress." + ) ) parser.add_argument("recipe", help="Path to a Tophat recipe JSON file.") args = parser.parse_args()