diff --git a/.gitignore b/.gitignore index 9ebe964fddb..49b2768d4e2 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,6 @@ stores/snapcraft/insider/*.snap stores/snapcraft/stable/*.snap node_modules yarn.lock + +.claude/ +fv diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000000..5a231634b91 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,240 @@ +# Codex Development Guide + +This repository builds **Codex**, a freely-licensed VS Code distribution for scripture translation. It is a fork of [VSCodium](https://github.com/VSCodium/vscodium) with custom branding, patches, and bundled extensions. The build clones Microsoft's VS Code, applies patches and source overlays, bundles extensions, and compiles platform-specific binaries. + +## Upstream Relationship + +``` +Microsoft/vscode (source code) + ↓ (cloned at specific commit) +VSCodium/vscodium (origin) ──patches──→ VSCodium binaries + ↓ (forked) +This repo (Codex) ──patches──→ Codex binaries +``` + +**Remotes:** +- `origin` = VSCodium/vscodium (upstream we sync from) +- `nexus` = BiblioNexus-Foundation/codex (our main repo) + +## Repository Structure + +``` +patches/ # Patch files applied to vscode source (alphabetical order) + *.patch # Core patches applied to all builds + insider/ # Insider-only patches + osx/ linux/ windows/# Platform-specific patches + user/ # Optional user patches (hide-activity-bar, microphone, etc.) +src/stable/ # Source overlay — copied into vscode/ before patches + cli/src/commands/ # Rust CLI additions (e.g. pin.rs) + src/vs/workbench/contrib/ # Workbench contributions (e.g. codexConductor/) + resources/ # Branding assets (icons, desktop files) +extensions/ # Built-in extensions compiled with the VS Code build +bundle-extensions.json# Extensions downloaded from GitHub Releases during build +dev/ # Development helper scripts +vscode/ # Cloned vscode repo (gitignored, generated during build) +``` + +## Building + +### Local Development Build + +```bash +./dev/build.sh +``` + +This runs the full pipeline: clone vscode → copy source overlays → apply patches → `npm ci` → compile → bundle extensions → produce platform binary. + +**Flags:** +- `-s` — Skip source clone (reuse existing `vscode/`). Patches and overlays are still re-applied. +- `-o` — Prep source only, skip compilation. +- `-l` — Use latest VS Code version from Microsoft's update API. +- `-i` — Build insider variant. +- `-p` — Include asset packaging (installers). + +Flags combine: `./dev/build.sh -sl` skips clone and uses latest. + +### Build Pipeline + +``` +dev/build.sh + ├─ get_repo.sh # Clone vscode at commit from upstream/stable.json + ├─ version.sh # Compute release version (e.g. 1.108.12007) + ├─ prepare_vscode.sh # Copy src/stable/* overlay, merge product.json, + │ # apply patches/*.patch, run npm ci + ├─ build.sh # gulp compile, webpack extensions, minify, + │ ├─ get-extensions.sh # Download VSIXs from bundle-extensions.json + │ └─ gulp vscode-{platform}-{arch}-min-ci + └─ prepare_assets.sh # Create installers (only with -p flag) +``` + +### What Gets Modified vs What's New + +There are two ways to add Codex-specific code to the VS Code source: + +- **Source overlays** (`src/stable/`): For **new files**. Copied verbatim into `vscode/` before patches run. Use for new workbench contributions, new Rust CLI modules, new resources. +- **Patches** (`patches/`): For **modifying existing VS Code files**. Small, surgical diffs. Use for adding imports, registering contributions, changing config values. + +### Extension Bundling + +Extensions reach the final build three ways: + +| Method | Config | When | +|--------|--------|------| +| **Built-in** (compiled from source) | `vscode/extensions/` | Compiled by gulp during build | +| **Downloaded** (pre-built VSIX) | `bundle-extensions.json` | Downloaded from GitHub Releases by `get-extensions.sh` | +| **Sideloaded** (runtime install) | Extension sideloader config | Installed from OpenVSX on first launch | + +### Output + +| Platform | Output | +|----------|--------| +| macOS | `VSCode-darwin-{arch}/Codex.app` | +| Linux | `VSCode-linux-{arch}/` | +| Windows | `VSCode-win32-{arch}/` | + +On macOS: `open VSCode-darwin-arm64/Codex.app` + +## Working with Patches + +### Key Rules + +1. **Never edit patch files by hand.** Always generate them with `git diff --staged` inside `vscode/`. Hand-written patches fail with "corrupt patch" errors. +2. **Patches are applied alphabetically.** A patch can depend on patches that sort before it (e.g. `feat-cli-pinning.patch` depends on `binary-name.patch`). +3. **Patches use placeholder variables** (`!!APP_NAME!!`, `!!BINARY_NAME!!`, `!!GH_REPO_PATH!!`, etc.) that are substituted during application. +4. **New files go in the source overlay**, not in patches. Only use patches to modify existing VS Code files. + +### Creating or Updating a Patch + +Use `dev/patch.sh` to ensure the correct baseline: + +```bash +# Edit feat-cli-pinning.patch, which depends on binary-name.patch: +./dev/patch.sh binary-name feat-cli-pinning + +# The script: +# 1. Resets vscode/ to pristine upstream +# 2. Applies binary-name.patch as the baseline +# 3. Applies feat-cli-pinning.patch (with --reject if it partially fails) +# 4. Waits for you to make changes in vscode/ +# 5. Press any key → regenerates the patch from git diff --staged -U1 +``` + +The last argument is the patch being edited. All preceding arguments are prerequisites that form the baseline. **Always list all patches your target depends on.** + +### Manual Patch Workflow + +If `dev/patch.sh` isn't suitable (e.g. non-interactive environment): + +```bash +cd vscode +git reset --hard HEAD # Clean state + +# Apply prerequisites +git apply --ignore-whitespace ../patches/binary-name.patch +git add . && git commit --no-verify -q -m "baseline" + +# Make your changes to existing VS Code files +# ... + +# Generate the patch +git add . +git diff --staged -U1 > ../patches/my-feature.patch +``` + +### Validating Patches + +```bash +# Test all patches apply cleanly in sequence: +./dev/update_patches.sh + +# Or manually test one: +cd vscode +git apply --check ../patches/my-feature.patch +``` + +### Patch Dependencies + +Some Codex patches modify files that earlier patches also touch. When this happens, the later patch must be generated against a tree that includes the earlier patch. Current known dependencies: + +| Patch | Depends on | +|-------|-----------| +| `feat-cli-pinning.patch` | `binary-name.patch` (both modify `nativeHostMainService.ts`) | + +If a patch fails to apply with "patch does not apply", check whether a prerequisite patch changed the same file. Regenerate using `dev/patch.sh` with the prerequisite listed first. + +## Codex-Specific Components + +### CodexConductor (Workbench Contribution) + +**Location:** `src/stable/src/vs/workbench/contrib/codexConductor/` +**Patch:** `patches/feat-codex-conductor.patch` (adds the import to `workbench.common.main.ts`) +**Robustness Patch:** `patches/zzz-authoritative-reload.patch` (enables `forceProfile` in window reloads) + +Enforces project-scoped extension version pins. Reads `pinnedExtensions` from project `metadata.json` or Frontier's `workspaceState`, downloads VSIXs from GitHub Release URLs, installs into deterministic VS Code profiles, and switches the extension host. + +**Key Robustness Features:** +- **Authoritative Reload:** Uses a patched `reload({ forceProfile: name })` IPC command to ensure the Main process opens the new window in the correct profile, bypassing persistence race conditions and dev-mode restrictions. +- **Initialization Yielding:** Works in tandem with `codex-editor` which returns early from `activate()` if a mismatch is detected, showing a "pins applying" message on the splash screen. +- **Duplicate Prevention:** Explicitly calls `resetWorkspaces()` before associating a profile to ensure lookup consistency. +- **Loop Guard:** Includes a 3-cycle circuit breaker to prevent infinite reload loops if enforcement fails. +- **Lifecycle Management:** Automatic cleanup of orphaned profiles every 14 days. + +### CLI Pin Commands (Rust) + +**Overlay:** `src/stable/cli/src/commands/pin.rs` +**Patch:** `patches/feat-cli-pinning.patch` (registers the `pin` subcommand in args/argv, adds `PinningError`, refactors macOS shell command install for `codex-cli` symlink) + +Adds `codex pin list/add/remove` to the Rust CLI. The `add` command downloads a remote VSIX, extracts the extension ID and version, and writes the pin to `metadata.json`. + +### Extension Bundling + +**Config:** `bundle-extensions.json` +**Script:** `get-extensions.sh` + +Declarative JSON config for extensions downloaded as pre-built VSIXs from GitHub Releases during the build. + +## Key Scripts + +| Script | Purpose | +|--------|---------| +| `dev/build.sh` | Local development build (main entry point) | +| `dev/patch.sh` | Apply prerequisite patches + edit a target patch | +| `dev/update_patches.sh` | Validate/fix all patches sequentially | +| `dev/clean_codex.sh` | Remove all Codex app data from macOS (reset to clean state) | +| `get_repo.sh` | Clone vscode at the commit specified in `upstream/stable.json` | +| `prepare_vscode.sh` | Copy overlays, merge product.json, apply patches, npm ci | +| `build.sh` | Compile (gulp), bundle extensions, produce platform binary | +| `get-extensions.sh` | Download VSIXs listed in `bundle-extensions.json` | + +## Version Tracking + +The target VS Code version is in `upstream/stable.json`: + +```json +{ + "tag": "1.108.1", + "commit": "585eba7c0c34fd6b30faac7c62a42050bfbc0086" +} +``` + +The Codex release version appends a time-based patch number: `{tag}.{day*24+hour}` (e.g. `1.108.12007`). + +## Syncing with Upstream VSCodium + +### Codex-Specific Customizations to Preserve + +1. **Branding** — `src/stable/`, `src/insider/`, `icons/` +2. **GitHub Workflows** — Simplified vs VSCodium. Custom: `docker-build-push.yml`, `patch-rebuild.yml`, `manual-release.yml` +3. **Windows MSI** — `build/windows/msi/codex.*` (renamed from `vscodium.*`) +4. **Product config** — `prepare_vscode.sh` (URLs, app names) +5. **Custom patches** — `patches/feat-*` (Codex features), `patches/user/*` (microphone, UI tweaks) +6. **Windows code signing** — SSL.com eSigner in `stable-windows.yml` +7. **Extension bundling** — `bundle-extensions.json`, `get-extensions.sh` +8. **Workbench contributions** — `src/stable/src/vs/workbench/contrib/codexConductor/` +9. **Rust CLI additions** — `src/stable/cli/src/commands/pin.rs` + +### Merge Strategy + +For small gaps: `git merge origin/master`, resolve conflicts. +For large gaps: cherry-pick patch updates from upstream, re-apply Codex customizations. +After merging: `./dev/update_patches.sh` then `./dev/build.sh` to validate. diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index b02583892a3..00000000000 --- a/CLAUDE.md +++ /dev/null @@ -1,332 +0,0 @@ -# Codex Development Guide - -This repository builds Codex, a freely-licensed VS Code distribution. It is a fork of [VSCodium](https://github.com/VSCodium/vscodium) with custom branding and configuration. The build process clones Microsoft's vscode repository and modifies it via git patches. - -## Upstream Relationship - -``` -Microsoft/vscode (source code) - ↓ (cloned at specific commit) -VSCodium/vscodium (origin) ──patches──→ VSCodium binaries - ↓ (forked) -This repo (Codex) ──patches──→ Codex binaries -``` - -**Remotes:** -- `origin` = VSCodium/vscodium (upstream we sync from) -- `nexus` = BiblioNexus-Foundation/codex (our main repo) - -## Repository Structure - -``` -patches/ # All patch files that modify vscode source - *.patch # Core patches applied to all builds - insider/ # Patches specific to insider builds - osx/ # macOS-specific patches - linux/ # Linux-specific patches - windows/ # Windows-specific patches - user/ # Optional user patches - -vscode/ # Cloned vscode repository (gitignored, generated) -dev/ # Development helper scripts -src/ # Brand assets and configuration overlays -``` - -## Working with Patches - -### Understanding the Patch Workflow - -1. **Patches are the source of truth** - Never commit direct changes to the `vscode/` directory. All modifications to VS Code source must be captured as `.patch` files in the `patches/` directory. - -2. **Patches are applied sequentially** - Order matters. Core patches are applied first, then platform-specific patches. - -3. **Patches use placeholder variables** - Patches can use placeholders like `!!APP_NAME!!`, `!!BINARY_NAME!!`, etc. that get replaced during application. - -### Making Changes to VS Code Source - -#### Step 1: Set Up Working Environment - -```bash -# Fresh clone of vscode at the correct commit -./get_repo.sh - -# Or use dev/build.sh which does this automatically -./dev/build.sh -``` - -#### Step 2: Apply Existing Patches - -To work on an existing patch: -```bash -# Apply prerequisite patches + the target patch for editing -./dev/patch.sh prerequisite1 prerequisite2 target-patch - -# Example: To modify the brand.patch -./dev/patch.sh brand -``` - -The `dev/patch.sh` script: -- Resets vscode to clean state -- Applies the helper settings patch -- Applies all listed prerequisite patches -- Applies the target patch (last argument) -- Waits for you to make changes -- Regenerates the patch file when you press a key - -#### Step 3: Making Changes - -After running `dev/patch.sh`: -1. Edit files in `vscode/` as needed -2. Press any key in the terminal when done -3. The script regenerates the patch file automatically - -#### Manual Patch Creation/Update - -If working manually: -```bash -cd vscode - -# Make your changes to the source files -# ... - -# Stage and generate diff -git add . -git diff --staged -U1 > ../patches/your-patch-name.patch -``` - -### Testing Patches - -#### Validate All Patches Apply Cleanly - -```bash -./dev/update_patches.sh -``` - -This script: -- Iterates through all patches -- Attempts to apply each one -- If a patch fails, it applies with `--reject` and pauses for manual resolution -- Regenerates any patches that needed fixing - -#### Full Build Test - -```bash -# Run a complete local build -./dev/build.sh - -# Options: -# -i Build insider version -# -l Use latest vscode version -# -o Skip build (only prepare source) -# -s Skip source preparation (use existing vscode/) -``` - -### Common Development Tasks - -#### Creating a New Patch - -1. Apply all prerequisite patches that your change depends on -2. Make your changes in `vscode/` -3. Generate the patch: - ```bash - cd vscode - git add . - git diff --staged -U1 > ../patches/my-new-feature.patch - ``` -4. Add the patch to the appropriate location in `prepare_vscode.sh` if it should be applied during builds - -#### Updating a Patch After Upstream Changes - -When VS Code updates and a patch no longer applies: -```bash -# Run update script - it will pause on failing patches -./dev/update_patches.sh - -# Fix the conflicts in vscode/, then press any key -# The script regenerates the fixed patch -``` - -#### Debugging Patch Application - -```bash -cd vscode -git apply --check ../patches/problem.patch # Dry run -git apply --reject ../patches/problem.patch # Apply with .rej files for conflicts -``` - -## Key Scripts Reference - -| Script | Purpose | -|--------|---------| -| `get_repo.sh` | Clone vscode at correct version | -| `prepare_vscode.sh` | Apply patches and prepare for build | -| `build.sh` | Main build script | -| `dev/build.sh` | Local development build | -| `dev/patch.sh` | Apply patches for editing a single patch | -| `dev/update_patches.sh` | Validate/update all patches | -| `dev/clean_codex.sh` | Remove all Codex app data from macOS user dirs (reset to clean state; macOS only) | -| `utils.sh` | Common functions including `apply_patch` | - -## Build Environment - -The build process: -1. `get_repo.sh` - Fetches vscode source at a specific commit -2. `prepare_vscode.sh` - Applies patches, copies branding, runs npm install -3. `build.sh` - Compiles the application - -Environment variables: -- `VSCODE_QUALITY`: "stable" or "insider" -- `OS_NAME`: "osx", "linux", or "windows" -- `VSCODE_ARCH`: CPU architecture - -### Version Tracking - -The VS Code version to build is determined by: - -1. **`upstream/stable.json`** (or `insider.json`) - Contains the target VS Code tag and commit: - ```json - { - "tag": "1.100.0", - "commit": "19e0f9e681ecb8e5c09d8784acaa601316ca4571" - } - ``` - -2. **`VSCODE_LATEST=yes`** - If set, queries Microsoft's update API for the latest version instead - -When syncing upstream, update these JSON files to match VSCodium's versions to ensure patches are compatible. - -## Syncing with Upstream VSCodium - -This is the most challenging maintenance task. VSCodium regularly updates their patches and build scripts to support new VS Code versions. - -### Check Current Status - -```bash -git fetch origin -git log --oneline origin/master -5 # See upstream's recent changes -git rev-list --count $(git merge-base HEAD origin/master)..origin/master # Commits behind -``` - -### Codex-Specific Customizations to Preserve - -When merging upstream, these are our key customizations that must be preserved: - -1. **Branding** (`src/stable/`, `src/insider/`, `icons/`) - - Custom icons and splash screens - - Keep all Codex assets - -2. **GitHub Workflows** (`.github/workflows/`) - - Simplified compared to VSCodium - - Uses different release repos (genesis-ai-dev/codex, BiblioNexus-Foundation/codex) - - Has custom workflows: `docker-build-push.yml`, `patch-rebuild.yml`, `manual-release.yml` - -3. **Windows MSI Files** (`build/windows/msi/`) - - Files renamed from `vscodium.*` to `codex.*` - - References updated for Codex branding - -4. **Product Configuration** (`product.json`, `prepare_vscode.sh`) - - URLs point to genesis-ai-dev/codex repos - - App names, identifiers set to Codex - -5. **Custom Patches** (`patches/`) - - `patches/user/microphone.patch` - Codex-specific - - Minor modifications to other patches for branding - -6. **Windows Code Signing** (`.github/workflows/stable-windows.yml`) - - SSL.com eSigner integration for code signing - - Signs application binaries (.exe, .dll) before packaging - - Signs installer packages (.exe, .msi) after packaging - - Required secrets: `ES_USERNAME`, `ES_PASSWORD`, `ES_CREDENTIAL_ID`, `ES_TOTP_SECRET` - - **Must preserve**: The signing steps between "Build" and "Prepare assets", and after "Upload unsigned artifacts" - -### Merge Strategy - -#### Option A: Incremental Merge (Recommended for small gaps) - -```bash -# Create a working branch -git checkout -b upstream-sync - -# Merge upstream -git merge origin/master - -# Resolve conflicts - most will be in: -# - .github/workflows/ (keep ours, incorporate new build steps if needed) -# - patches/*.patch (need careful merge - see below) -# - build/windows/msi/ (keep our codex.* files) -# - prepare_vscode.sh (keep our branding, adopt new build logic) -``` - -#### Option B: Cherry-pick Patch Updates (Recommended for large gaps) - -When far behind (like 1.99 → 1.108), it's often easier to: - -1. **Identify patch update commits** in upstream: - ```bash - git log origin/master --oneline --grep="update patches" - ``` - -2. **Cherry-pick or manually apply** the patch changes: - ```bash - # See what patches changed in a specific upstream commit - git show -- patches/ - ``` - -3. **Copy updated patches** from upstream, then re-apply our branding changes - -#### Option C: Reset and Re-apply Customizations - -For very large gaps, it may be cleanest to: - -1. Create a fresh branch from upstream -2. Re-apply Codex customizations on top -3. This ensures we get all upstream fixes cleanly - -### Resolving Patch Conflicts - -When upstream updates patches that we've also modified: - -1. **Compare the patches:** - ```bash - git diff origin/master -- patches/brand.patch - ``` - -2. **Accept upstream's patch structure** (they've adapted to new VS Code) - -3. **Re-apply our branding on top:** - - Our changes are usually just `VSCodium` → `Codex` type substitutions - - The placeholder system (`!!APP_NAME!!`) handles most of this automatically - -### After Merging: Validate Everything - -```bash -# 1. Update upstream/stable.json to new version if needed -# 2. Test patches apply cleanly -./dev/update_patches.sh - -# 3. Run a full local build -./dev/build.sh -l # -l uses latest VS Code version - -# 4. If patches fail, fix them one by one -# The update_patches.sh script will pause on failures -``` - -### Common Conflict Patterns - -| File/Area | Typical Resolution | -|-----------|-------------------| -| `.github/workflows/*.yml` | Keep our simplified versions, cherry-pick important CI fixes | -| `.github/workflows/stable-windows.yml` | **Preserve code signing steps** - keep SSL.com eSigner integration intact | -| `patches/*.patch` | Take upstream's version, verify our branding placeholders work | -| `prepare_vscode.sh` | Keep our branding URLs/names, adopt new build logic | -| `build/windows/msi/` | Keep our `codex.*` files, apply equivalent changes from `vscodium.*` | -| `README.md` | Keep ours | -| `product.json` | Keep ours (merged at build time anyway) | - -## Tips - -- Always work from a clean vscode state when creating patches -- Keep patches focused and minimal - one logical change per patch -- Test patches apply to a fresh clone before committing -- The `vscode/` directory is gitignored - your patch files are the persistent record -- When syncing upstream, focus on patch files first - they're the core of the build diff --git a/CLAUDE.md b/CLAUDE.md new file mode 120000 index 00000000000..47dc3e3d863 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +AGENTS.md \ No newline at end of file diff --git a/build_cli.sh b/build_cli.sh index 746f27b2d9f..04311f2d9c7 100755 --- a/build_cli.sh +++ b/build_cli.sh @@ -19,7 +19,7 @@ TUNNEL_APPLICATION_NAME="$(node -p "require(\"../product.json\").tunnelApplicati NAME_SHORT="$(node -p "require(\"../product.json\").nameShort")" npm pack @vscode/openssl-prebuilt@0.0.11 -mkdir openssl +mkdir -p openssl tar -xvzf vscode-openssl-prebuilt-0.0.11.tgz --strip-components=1 --directory=openssl if [[ "${OS_NAME}" == "osx" ]]; then diff --git a/bundle-extensions.json b/bundle-extensions.json new file mode 100644 index 00000000000..adc32f6fa39 --- /dev/null +++ b/bundle-extensions.json @@ -0,0 +1,9 @@ +{ + "bundle": [ + { + "name": "extension-sideloader", + "github_release": "genesis-ai-dev/extension-sideloader", + "tag": "0.1.0" + } + ] +} diff --git a/dev/build.sh b/dev/build.sh index d8e56e9869a..7e2fb3b50be 100755 --- a/dev/build.sh +++ b/dev/build.sh @@ -13,6 +13,8 @@ export GH_REPO_PATH="genesis-ai-dev/codex" export ORG_NAME="Codex" export SHOULD_BUILD="yes" export SKIP_ASSETS="yes" +export SHOULD_BUILD_REH="no" +export SHOULD_BUILD_REH_WEB="no" export SKIP_BUILD="no" export SKIP_SOURCE="no" export VSCODE_LATEST="no" diff --git a/get-extensions.sh b/get-extensions.sh index 5badf7fdc3a..f6cf5454d27 100755 --- a/get-extensions.sh +++ b/get-extensions.sh @@ -1,32 +1,45 @@ #!/usr/bin/env bash +# Downloads and unpacks bundled extensions into ./extensions/. +# Sourced from build.sh while CWD is vscode/. -# Exit early if SKIP_EXTENSIONS is set -if [[ -n "$SKIP_EXTENSIONS" ]]; then +set -euo pipefail + +if [[ -n "${SKIP_EXTENSIONS:-}" ]]; then return 0 fi -jsonfile=$(curl -s https://raw.githubusercontent.com/genesis-ai-dev/extension-sideloader/refs/heads/main/extensions.json) -extensions_dir=./.build/extensions -base_dir=$(pwd) - -count=$(jq -r '.builtin | length' <<< ${jsonfile}) -for i in $(seq $count); do - url=$( jq -r ".builtin[$i-1].url" <<< ${jsonfile}) - name=$( jq -r ".builtin[$i-1].name" <<< ${jsonfile}) - echo $name $url - if [[ -d ${extensions_dir}/"$name" ]]; then - rm -rf ${extensions_dir}/"$name" - fi - mkdir -p ${extensions_dir}/"$name" - curl -Lso "$name".zip "$url" - unzip -q "$name".zip -d ${extensions_dir}/"$name" - mv ${extensions_dir}/"$name"/extension/* ${extensions_dir}/"$name"/ - cp -r ${extensions_dir}/"$name" ./extensions/ - rm "$name".zip -done +BUNDLE_JSON="../bundle-extensions.json" +EXTENSIONS_DIR="./extensions" + +TMP_DIR=$(mktemp -d) +trap 'rm -rf "${TMP_DIR}"' EXIT + +install_vsix() { + local name="$1" + local zip_file="$2" + local dest="${EXTENSIONS_DIR}/${name}" + + echo "[get-extensions] Installing ${name}..." + mkdir -p "${TMP_DIR}/${name}" + unzip -q "${zip_file}" -d "${TMP_DIR}/${name}" + rm -rf "${dest}" + mv "${TMP_DIR}/${name}/extension" "${dest}" + echo "[get-extensions] Installed ${name}" +} -# name="test" -# cp -r /Users/andrew.denhertog/Documents/Projects/andrewhertog/test-extension/test-extension-0.0.1.vsix ./ext.zip -# unzip -q ext.zip -d ${extensions_dir}/"$name" -# mv ${extensions_dir}/"$name"/extension/* ${extensions_dir}/"$name"/ -# rm ext.zip +count=$(jq -r '.bundle | length' "${BUNDLE_JSON}") + +for i in $(seq 0 $((count - 1))); do + name=$(jq -r ".bundle[$i].name" "${BUNDLE_JSON}") + repo=$(jq -r ".bundle[$i].github_release" "${BUNDLE_JSON}") + tag=$(jq -r ".bundle[$i].tag" "${BUNDLE_JSON}") + zip_file="${TMP_DIR}/${name}.vsix" + + echo "[get-extensions] Downloading ${name} from ${repo}@${tag}..." + gh release download "${tag}" \ + --repo "${repo}" \ + --pattern "*.vsix" \ + --output "${zip_file}" + + install_vsix "${name}" "${zip_file}" +done diff --git a/patches/binary-name.patch b/patches/binary-name.patch index b8214dfd3cd..8d254ba8451 100644 --- a/patches/binary-name.patch +++ b/patches/binary-name.patch @@ -1,5 +1,5 @@ diff --git a/build/gulpfile.vscode.ts b/build/gulpfile.vscode.ts -index d3ab651..63cd71f 100644 +index ac70ecb..9b7c25f 100644 --- a/build/gulpfile.vscode.ts +++ b/build/gulpfile.vscode.ts @@ -369,3 +369,3 @@ function packageTask(platform: string, arch: string, sourceFolderName: string, d @@ -7,6 +7,45 @@ index d3ab651..63cd71f 100644 - .pipe(rename('bin/code')); + .pipe(rename('bin/' + product.applicationName)); const policyDest = gulp.src('.build/policies/darwin/**', { base: '.build/policies/darwin' }) +diff --git a/cli/src/desktop/version_manager.rs b/cli/src/desktop/version_manager.rs +index e9cd1a1..535c403 100644 +--- a/cli/src/desktop/version_manager.rs ++++ b/cli/src/desktop/version_manager.rs +@@ -11,2 +11,3 @@ use std::{ + ++use const_format::concatcp; + use lazy_static::lazy_static; +@@ -16,3 +17,3 @@ use serde::{Deserialize, Serialize}; + use crate::{ +- constants::{PRODUCT_DOWNLOAD_URL, QUALITY, QUALITYLESS_PRODUCT_NAME}, ++ constants::{APPLICATION_NAME, PRODUCT_DOWNLOAD_URL, QUALITY, QUALITYLESS_PRODUCT_NAME}, + log, +@@ -245,3 +246,3 @@ pub fn prompt_to_install(version: &RequestedVersion) { + fn detect_installed_program(log: &log::Logger) -> io::Result> { +- use crate::constants::PRODUCT_NAME_LONG; ++ use crate::constants::{APPLICATION_NAME, PRODUCT_NAME_LONG}; + +@@ -251,3 +252,3 @@ fn detect_installed_program(log: &log::Logger) -> io::Result> { + if probable.exists() { +- probable.extend(["Contents/Resources", "app", "bin", "code"]); ++ probable.extend(["Contents/Resources", "app", "bin", APPLICATION_NAME]); + return Ok(vec![probable]); +@@ -296,3 +297,3 @@ fn detect_installed_program(log: &log::Logger) -> io::Result> { + output.push( +- [suffix.trim(), "Contents/Resources", "app", "bin", "code"] ++ [suffix.trim(), "Contents/Resources", "app", "bin", APPLICATION_NAME] + .iter() +@@ -401,7 +402,7 @@ fn detect_installed_program(log: &log::Logger) -> io::Result> { + const DESKTOP_CLI_RELATIVE_PATH: &str = if cfg!(target_os = "macos") { +- "Contents/Resources/app/bin/code" ++ concatcp!("Contents/Resources/app/bin/", APPLICATION_NAME) + } else if cfg!(target_os = "windows") { +- "bin/code.cmd,bin/code-insiders.cmd,bin/code-exploration.cmd" ++ concatcp!("bin/", APPLICATION_NAME, ".cmd") + } else { +- "bin/code,bin/code-insiders,bin/code-exploration" ++ concatcp!("bin/", APPLICATION_NAME) + }; diff --git a/src/vs/platform/native/electron-main/nativeHostMainService.ts b/src/vs/platform/native/electron-main/nativeHostMainService.ts index 2c3b710..8041f08 100644 --- a/src/vs/platform/native/electron-main/nativeHostMainService.ts diff --git a/patches/feat-cli-pinning.patch b/patches/feat-cli-pinning.patch new file mode 100644 index 00000000000..88a7b27f2e4 --- /dev/null +++ b/patches/feat-cli-pinning.patch @@ -0,0 +1,230 @@ +diff --git a/cli/src/bin/code/main.rs b/cli/src/bin/code/main.rs +index b73d0aa..d60d6be 100644 +--- a/cli/src/bin/code/main.rs ++++ b/cli/src/bin/code/main.rs +@@ -10,3 +10,3 @@ use clap::Parser; + use cli::{ +- commands::{args, serve_web, tunnels, update, version, CommandContext}, ++ commands::{args, pin, serve_web, tunnels, update, version, CommandContext}, + constants::get_default_user_agent, +@@ -67,2 +67,3 @@ async fn main() -> Result<(), std::convert::Infallible> { + args::StandaloneCommands::Update(args) => update::update(context!(), args).await, ++ args::StandaloneCommands::Pin(args) => pin::pin(context!(), args).await, + }, +diff --git a/cli/src/commands.rs b/cli/src/commands.rs +index 0277169..d4dfe66 100644 +--- a/cli/src/commands.rs ++++ b/cli/src/commands.rs +@@ -8,2 +8,3 @@ mod context; + pub mod args; ++pub mod pin; + pub mod serve_web; +diff --git a/cli/src/commands/args.rs b/cli/src/commands/args.rs +index 6301bdd..692e06b 100644 +--- a/cli/src/commands/args.rs ++++ b/cli/src/commands/args.rs +@@ -154,2 +154,35 @@ pub enum StandaloneCommands { + Update(StandaloneUpdateArgs), ++ /// Manage extension version pins for Codex projects. ++ Pin(PinArgs), ++} ++ ++#[derive(Args, Debug, Clone)] ++pub struct PinArgs { ++ /// The project name or ID. If not provided, lists all projects. ++ pub project: Option, ++ ++ #[clap(subcommand)] ++ pub subcommand: Option, ++} ++ ++#[derive(Subcommand, Debug, Clone)] ++pub enum PinSubcommand { ++ /// List pins for the project (default). ++ List, ++ /// Pin an extension to a specific version via VSIX URL. ++ Add(PinAddArgs), ++ /// Remove a version pin. ++ Remove(PinRemoveArgs), ++} ++ ++#[derive(Args, Debug, Clone)] ++pub struct PinAddArgs { ++ /// URL to the VSIX artifact (typically a GitHub Release asset). ++ pub url: String, ++} ++ ++#[derive(Args, Debug, Clone)] ++pub struct PinRemoveArgs { ++ /// The extension identifier to unpin (e.g. 'publisher.name'). ++ pub id: String, + } +diff --git a/cli/src/util/errors.rs b/cli/src/util/errors.rs +index b7ed029..6ed4439 100644 +--- a/cli/src/util/errors.rs ++++ b/cli/src/util/errors.rs +@@ -437,2 +437,11 @@ impl Display for DbusConnectFailedError { + ++#[derive(Debug)] ++pub struct PinningError(pub String); ++ ++impl std::fmt::Display for PinningError { ++ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { ++ write!(f, "extension version pinning error: {}", self.0) ++ } ++} ++ + /// Internal errors in the VS Code CLI. +@@ -550,2 +559,3 @@ makeAnyError!( + InvalidRpcDataError, ++ PinningError, + CodeError, +diff --git a/src/vs/platform/environment/common/argv.ts b/src/vs/platform/environment/common/argv.ts +index a10f4c9..c75e211 100644 +--- a/src/vs/platform/environment/common/argv.ts ++++ b/src/vs/platform/environment/common/argv.ts +@@ -26,2 +26,5 @@ export interface NativeParsedArgs { + 'serve-web'?: INativeCliOptions; ++ pin?: { ++ _: string[]; ++ }; + chat?: { +diff --git a/src/vs/platform/environment/node/argv.ts b/src/vs/platform/environment/node/argv.ts +index 35a833d..590ef12 100644 +--- a/src/vs/platform/environment/node/argv.ts ++++ b/src/vs/platform/environment/node/argv.ts +@@ -47,3 +47,3 @@ export type OptionDescriptions = { + +-export const NATIVE_CLI_COMMANDS = ['tunnel', 'serve-web'] as const; ++export const NATIVE_CLI_COMMANDS = ['tunnel', 'serve-web', 'pin'] as const; + +@@ -94,2 +94,9 @@ export const OPTIONS: OptionDescriptions> = { + }, ++ 'pin': { ++ type: 'subcommand', ++ description: localize('pinExtension', "Manage extension version pins for Codex projects."), ++ options: { ++ _: { type: 'string[]' } ++ } ++ }, + 'diff': { type: 'boolean', cat: 'o', alias: 'd', args: ['file', 'file'], description: localize('diff', "Compare two files with each other.") }, +diff --git a/src/vs/platform/native/electron-main/nativeHostMainService.ts b/src/vs/platform/native/electron-main/nativeHostMainService.ts +index 8041f08..3c3d891 100644 +--- a/src/vs/platform/native/electron-main/nativeHostMainService.ts ++++ b/src/vs/platform/native/electron-main/nativeHostMainService.ts +@@ -423,23 +423,34 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + async installShellCommand(windowId: number | undefined): Promise { +- const { source, target } = await this.getShellCommandLink(); +- +- // Only install unless already existing +- try { +- const { symbolicLink } = await SymlinkSupport.stat(source); +- if (symbolicLink && !symbolicLink.dangling) { +- const linkTargetRealPath = await Promises.realpath(source); +- if (target === linkTargetRealPath) { +- return; ++ const links = await this.getShellCommandLinks(); ++ ++ // Only install unless all already existing ++ let allExist = true; ++ for (const link of links) { ++ try { ++ const { symbolicLink } = await SymlinkSupport.stat(link.source); ++ if (symbolicLink && !symbolicLink.dangling) { ++ const linkTargetRealPath = await Promises.realpath(link.source); ++ if (link.target === linkTargetRealPath) { ++ continue; ++ } + } ++ allExist = false; ++ break; ++ } catch (error) { ++ if (error.code !== 'ENOENT') { ++ throw error; ++ } ++ allExist = false; ++ break; + } +- } catch (error) { +- if (error.code !== 'ENOENT') { +- throw error; // throw on any error but file not found +- } + } + +- await this.installShellCommandWithPrivileges(windowId, source, target); ++ if (allExist) { ++ return; ++ } ++ ++ await this.installShellCommandWithPrivileges(windowId, links); + } + +- private async installShellCommandWithPrivileges(windowId: number | undefined, source: string, target: string): Promise { ++ private async installShellCommandWithPrivileges(windowId: number | undefined, links: { source: string; target: string }[]): Promise { + const { response } = await this.showMessageBox(windowId, { +@@ -458,6 +469,7 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + try { +- const command = `osascript -e "do shell script \\"mkdir -p /usr/local/bin && ln -sf \'${target}\' \'${source}\'\\" with administrator privileges"`; ++ const commands = links.map(link => `ln -sf '${link.target}' '${link.source}'`).join(' && '); ++ const command = `osascript -e "do shell script \\"mkdir -p /usr/local/bin && ${commands}\\" with administrator privileges"`; + await promisify(exec)(command); + } catch (error) { +- throw new Error(localize('cantCreateBinFolder', "Unable to install the shell command '{0}'.", source)); ++ throw new Error(localize('cantCreateBinFolder', "Unable to install the shell command.")); + } +@@ -466,6 +478,8 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + async uninstallShellCommand(windowId: number | undefined): Promise { +- const { source } = await this.getShellCommandLink(); ++ const links = await this.getShellCommandLinks(); + + try { +- await fs.promises.unlink(source); ++ for (const link of links) { ++ await fs.promises.unlink(link.source); ++ } + } catch (error) { +@@ -487,6 +501,7 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + try { +- const command = `osascript -e "do shell script \\"rm \'${source}\'\\" with administrator privileges"`; ++ const commands = links.map(link => `rm -f '${link.source}'`).join(' && '); ++ const command = `osascript -e "do shell script \\"${commands}\\" with administrator privileges"`; + await promisify(exec)(command); + } catch (error) { +- throw new Error(localize('cantUninstall', "Unable to uninstall the shell command '{0}'.", source)); ++ throw new Error(localize('uninstallFailed', "Unable to uninstall the shell command.")); + } +@@ -502,13 +517,26 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + +- private async getShellCommandLink(): Promise<{ readonly source: string; readonly target: string }> { +- const target = resolve(this.environmentMainService.appRoot, 'bin', this.productService.applicationName); +- const source = `/usr/local/bin/${this.productService.applicationName}`; ++ private async getShellCommandLinks(): Promise<{ readonly source: string; readonly target: string }[]> { ++ const links: { source: string; target: string }[] = []; ++ ++ // Main 'codex' command ++ const mainTarget = resolve(this.environmentMainService.appRoot, 'bin', this.productService.applicationName); ++ const mainSource = `/usr/local/bin/${this.productService.applicationName}`; ++ if (await Promises.exists(mainTarget)) { ++ links.push({ source: mainSource, target: mainTarget }); ++ } ++ ++ // 'codex-cli' command pointing to 'codex-tunnel' ++ if (this.productService.tunnelApplicationName) { ++ const tunnelTarget = resolve(this.environmentMainService.appRoot, 'bin', this.productService.tunnelApplicationName); ++ const tunnelSource = '/usr/local/bin/codex-cli'; ++ if (await Promises.exists(tunnelTarget)) { ++ links.push({ source: tunnelSource, target: tunnelTarget }); ++ } ++ } + +- // Ensure source exists +- const sourceExists = await Promises.exists(target); +- if (!sourceExists) { +- throw new Error(localize('sourceMissing', "Unable to find shell script in '{0}'", target)); ++ if (links.length === 0) { ++ throw new Error(localize('sourceMissing', "Unable to find shell scripts in '{0}'", resolve(this.environmentMainService.appRoot, 'bin'))); + } + +- return { source, target }; ++ return links; + } diff --git a/patches/feat-codex-conductor.patch b/patches/feat-codex-conductor.patch new file mode 100644 index 00000000000..6aebb937d47 --- /dev/null +++ b/patches/feat-codex-conductor.patch @@ -0,0 +1,10 @@ +diff --git a/src/vs/workbench/workbench.common.main.ts b/src/vs/workbench/workbench.common.main.ts +index e7c16a7..5ede7d5 100644 +--- a/src/vs/workbench/workbench.common.main.ts ++++ b/src/vs/workbench/workbench.common.main.ts +@@ -325,2 +325,5 @@ import './contrib/keybindings/browser/keybindings.contribution.js'; + ++// Codex ++import './contrib/codexConductor/browser/codexConductor.contribution.js'; ++ + // Snippets diff --git a/patches/zzz-authoritative-reload.patch b/patches/zzz-authoritative-reload.patch new file mode 100644 index 00000000000..1b6faae2cf2 --- /dev/null +++ b/patches/zzz-authoritative-reload.patch @@ -0,0 +1,113 @@ +diff --git a/src/vs/platform/native/common/native.ts b/src/vs/platform/native/common/native.ts +index 75a302b..5c91eac 100644 +--- a/src/vs/platform/native/common/native.ts ++++ b/src/vs/platform/native/common/native.ts +@@ -204,7 +204,7 @@ export interface ICommonNativeHostService { + // Lifecycle + notifyReady(): Promise; + relaunch(options?: { addArgs?: string[]; removeArgs?: string[] }): Promise; +- reload(options?: { disableExtensions?: boolean }): Promise; ++ reload(options?: { disableExtensions?: boolean; forceProfile?: string }): Promise; + closeWindow(options?: INativeHostOptions): Promise; + quit(): Promise; + exit(code: number): Promise; +diff --git a/src/vs/platform/native/electron-main/nativeHostMainService.ts b/src/vs/platform/native/electron-main/nativeHostMainService.ts +index 2c3b710..121e545 100644 +--- a/src/vs/platform/native/electron-main/nativeHostMainService.ts ++++ b/src/vs/platform/native/electron-main/nativeHostMainService.ts +@@ -934,7 +934,7 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + return this.lifecycleMainService.relaunch(options); + } + +- async reload(windowId: number | undefined, options?: { disableExtensions?: boolean }): Promise { ++ async reload(windowId: number | undefined, options?: { disableExtensions?: boolean; forceProfile?: string }): Promise { + const window = this.codeWindowById(windowId); + if (window) { + +@@ -954,7 +954,11 @@ export class NativeHostMainService extends Disposable implements INativeHostMain + } + + // Proceed normally to reload the window +- return this.lifecycleMainService.reload(window, options?.disableExtensions !== undefined ? { _: [], 'disable-extensions': options.disableExtensions } : undefined); ++ return this.lifecycleMainService.reload(window, { ++ _: [], ++ 'disable-extensions': options?.disableExtensions, ++ 'profile': options?.forceProfile ++ } as any); + } + } + +diff --git a/src/vs/platform/windows/electron-main/windowImpl.ts b/src/vs/platform/windows/electron-main/windowImpl.ts +index 63652a5..3511ecd 100644 +--- a/src/vs/platform/windows/electron-main/windowImpl.ts ++++ b/src/vs/platform/windows/electron-main/windowImpl.ts +@@ -1271,9 +1271,22 @@ export class CodeWindow extends BaseWindow implements ICodeWindow { + configuration.isInitialStartup = false; // since this is a reload + configuration.policiesData = this.policyService.serialize(); // set policies data again + configuration.continueOn = this.environmentMainService.continueOn; ++ ++ const ws = configuration.workspace; ++ let profile: IUserDataProfile | undefined; ++ if (cli?.profile) { ++ profile = this.userDataProfilesService.profiles.find(p => p.name === cli.profile); ++ } ++ if (!profile && ws) { ++ const revivedWS = isSingleFolderWorkspaceIdentifier(ws) ? { id: ws.id, uri: URI.revive(ws.uri) } : ws; ++ profile = this.userDataProfilesService.getProfileForWorkspace(revivedWS); ++ } ++ ++ profile = profile || this.profile || this.userDataProfilesService.defaultProfile; ++ + configuration.profiles = { + all: this.userDataProfilesService.profiles, +- profile: this.profile || this.userDataProfilesService.defaultProfile, ++ profile, + home: this.userDataProfilesService.profilesHome + }; + configuration.logLevel = this.loggerMainService.getLogLevel(); +diff --git a/src/vs/platform/windows/electron-main/windowsMainService.ts b/src/vs/platform/windows/electron-main/windowsMainService.ts +index 117dfd2..68a9c06 100644 +--- a/src/vs/platform/windows/electron-main/windowsMainService.ts ++++ b/src/vs/platform/windows/electron-main/windowsMainService.ts +@@ -1669,12 +1669,9 @@ export class WindowsMainService extends Disposable implements IWindowsMainServic + const profile = profilePromise instanceof Promise ? await profilePromise : profilePromise; + configuration.profiles.profile = profile; + +- if (!configuration.extensionDevelopmentPath) { +- // Associate the configured profile to the workspace +- // unless the window is for extension development, +- // where we do not persist the associations +- await this.userDataProfilesMainService.setProfileForWorkspace(workspace, profile); +- } ++ // Associate the configured profile to the workspace. ++ // For Codex, we want this to persist even during extension development. ++ await this.userDataProfilesMainService.setProfileForWorkspace(workspace, profile); + + // Load it + window.load(configuration); +diff --git a/src/vs/workbench/services/host/browser/host.ts b/src/vs/workbench/services/host/browser/host.ts +index 4ac35c9..23e7bab 100644 +--- a/src/vs/workbench/services/host/browser/host.ts ++++ b/src/vs/workbench/services/host/browser/host.ts +@@ -111,7 +111,7 @@ export interface IHostService { + /** + * Reload the currently active main window. + */ +- reload(options?: { disableExtensions?: boolean }): Promise; ++ reload(options?: { disableExtensions?: boolean; forceProfile?: string }): Promise; + + /** + * Attempt to close the active main window. +diff --git a/src/vs/workbench/services/host/electron-browser/nativeHostService.ts b/src/vs/workbench/services/host/electron-browser/nativeHostService.ts +index 9ca38b2..dd7cf9b 100644 +--- a/src/vs/workbench/services/host/electron-browser/nativeHostService.ts ++++ b/src/vs/workbench/services/host/electron-browser/nativeHostService.ts +@@ -187,7 +187,7 @@ class WorkbenchHostService extends Disposable implements IHostService { + return this.nativeHostService.relaunch(); + } + +- reload(options?: { disableExtensions?: boolean }): Promise { ++ reload(options?: { disableExtensions?: boolean; forceProfile?: string }): Promise { + return this.nativeHostService.reload(options); + } + diff --git a/src/stable/cli/src/commands/pin.rs b/src/stable/cli/src/commands/pin.rs new file mode 100644 index 00000000000..bcebcde8a8d --- /dev/null +++ b/src/stable/cli/src/commands/pin.rs @@ -0,0 +1,368 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +use crate::{ + commands::args::{PinAddArgs, PinArgs, PinRemoveArgs, PinSubcommand}, + log, + util::errors::{wrap, AnyError, PinningError}, +}; +use serde::{Deserialize, Serialize}; +use std::{ + fs, + io::Read, + path::{Path, PathBuf}, +}; + +use super::context::CommandContext; + +const CODEX_PROJECTS_DIR: &str = ".codex-projects"; + +#[derive(Serialize, Deserialize, Debug, Clone)] +struct ProjectMetadata { + #[serde(rename = "projectName", default)] + project_name: String, + #[serde(rename = "projectId", default)] + project_id: String, + #[serde(default)] + meta: Meta, + #[serde(flatten)] + extra: serde_json::Value, +} + +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +struct Meta { + #[serde(rename = "requiredExtensions", default)] + required_extensions: std::collections::HashMap, + #[serde(rename = "pinnedExtensions", default)] + pinned_extensions: std::collections::HashMap, + #[serde(flatten)] + extra: serde_json::Value, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +struct PinnedExtension { + version: String, + url: String, +} + +struct ProjectInfo { + path: PathBuf, + metadata: ProjectMetadata, +} + +pub async fn pin(ctx: CommandContext, args: PinArgs) -> Result { + match (&args.project, &args.subcommand) { + (None, _) | (Some(_), Some(PinSubcommand::List)) | (Some(_), None) => { + let project_filter = if let Some(p) = &args.project { + Some(resolve_project(&ctx, p)?) + } else { + None + }; + list_pins(&ctx, project_filter)?; + } + (Some(p), Some(PinSubcommand::Add(add_args))) => add_pin(ctx, p.clone(), add_args.clone()).await?, + (Some(p), Some(PinSubcommand::Remove(remove_args))) => remove_pin(ctx, p.clone(), remove_args.clone())?, + } + + Ok(0) +} + +fn discover_projects(ctx: &CommandContext) -> Result, AnyError> { + // Use LauncherPaths root to find home directory reliably + let home_dir = ctx.paths.root().parent() + .and_then(|p| p.parent()) + .map(|p| p.to_path_buf()) + .or_else(dirs::home_dir) + .ok_or_else(|| AnyError::PinningError(PinningError("Could not find home directory".to_string())))?; + + let projects_dir = home_dir.join(CODEX_PROJECTS_DIR); + + let mut projects = Vec::new(); + + if projects_dir.exists() && projects_dir.is_dir() { + for entry in fs::read_dir(projects_dir).map_err(|e| wrap(e, "Failed to read projects directory"))? { + let entry = entry.map_err(|e| wrap(e, "Failed to read directory entry"))?; + let path = entry.path(); + + if path.is_dir() { + let metadata_path = path.join("metadata.json"); + if metadata_path.exists() { + match read_metadata(&metadata_path) { + Ok(metadata) => projects.push(ProjectInfo { path, metadata }), + Err(e) => { + log::emit(log::Level::Warn, "pin", &format!("Failed to read metadata at {}: {}", metadata_path.display(), e)); + } + } + } + } + } + } + + Ok(projects) +} + +fn read_metadata(path: &Path) -> Result { + let file = fs::File::open(path).map_err(|e| wrap(e, "Failed to open metadata.json"))?; + let metadata: ProjectMetadata = serde_json::from_reader(file).map_err(|e| wrap(e, "Failed to parse metadata.json"))?; + Ok(metadata) +} + +fn write_metadata(path: &Path, metadata: &ProjectMetadata) -> Result<(), AnyError> { + let file = fs::File::create(path).map_err(|e| wrap(e, "Failed to create metadata.json"))?; + let formatter = serde_json::ser::PrettyFormatter::with_indent(b" "); + let mut ser = serde_json::Serializer::with_formatter(file, formatter); + metadata.serialize(&mut ser).map_err(|e| wrap(e, "Failed to write metadata.json"))?; + Ok(()) +} + +fn truncate_url(url: &str) -> String { + if let Ok(parsed_url) = url::Url::parse(url) { + let mut segments = parsed_url.path_segments().map(|c| c.collect::>()).unwrap_or_default(); + if segments.len() > 3 { + let filename = segments.pop().unwrap_or(""); + let first_two = segments.iter().take(2).copied().collect::>().join("/"); + format!("{}://{}/{}/.../{}", parsed_url.scheme(), parsed_url.host_str().unwrap_or(""), first_two, filename) + } else { + url.to_string() + } + } else { + url.to_string() + } +} + +fn list_pins(ctx: &CommandContext, project_filter: Option) -> Result<(), AnyError> { + let projects = if let Some(p) = project_filter { + vec![p] + } else { + discover_projects(ctx)? + }; + + for project in projects { + println!( + "{} {} {}", + project.metadata.project_name, + project.metadata.project_id, + project.path.display() + ); + + if !project.metadata.meta.required_extensions.is_empty() { + let mut reqs = String::new(); + let mut ids: Vec<_> = project.metadata.meta.required_extensions.keys().collect(); + ids.sort(); + for id in ids { + let version = &project.metadata.meta.required_extensions[id]; + reqs.push_str(&format!("⚓ {} {} ", id, version)); + } + println!(" {}", reqs.trim_end()); + } + + let mut pinned_ids: Vec<_> = project.metadata.meta.pinned_extensions.keys().collect(); + pinned_ids.sort(); + for id in pinned_ids { + let pin = &project.metadata.meta.pinned_extensions[id]; + println!(" 📌 {} {} {}", id, pin.version, truncate_url(&pin.url)); + } + println!(); + } + + println!("Usage:"); + println!(" codex pin List all projects and pins"); + println!(" codex pin List pins for a project"); + println!(" codex pin add Add a version pin"); + println!(" codex pin remove Remove a version pin"); + + Ok(()) +} + +fn resolve_project(ctx: &CommandContext, project_identifier: &str) -> Result { + let projects = discover_projects(ctx)?; + let mut matches: Vec = projects + .into_iter() + .filter(|p| p.metadata.project_id == project_identifier || p.metadata.project_name == project_identifier) + .collect(); + + if matches.is_empty() { + return Err(AnyError::PinningError(PinningError(format!("No project found matching '{}'", project_identifier)))); + } else if matches.len() > 1 { + let mut msg = format!("Multiple projects found matching '{}'. Please use the ID:\n", project_identifier); + for m in matches { + msg.push_str(&format!("- {} ({})\n", m.metadata.project_name, m.metadata.project_id)); + } + return Err(AnyError::PinningError(PinningError(msg))); + } + + Ok(matches.remove(0)) +} + +/// Resolves a GitHub release page URL to a direct VSIX download URL. +/// If the URL is already a direct URL (not a release page), returns it unchanged. +/// +/// Matches: https://github.com/{owner}/{repo}/releases/tag/{tag} +async fn resolve_vsix_url(client: &reqwest::Client, url: &str) -> Result { + let url = url.trim(); + const PREFIX: &str = "https://github.com/"; + const RELEASES_TAG: &str = "/releases/tag/"; + + if !url.starts_with(PREFIX) { + return Ok(url.to_string()); + } + + let after_host = &url[PREFIX.len()..]; + let tag_pos = match after_host.find(RELEASES_TAG) { + Some(pos) => pos, + None => return Ok(url.to_string()), + }; + + let owner_repo = &after_host[..tag_pos]; + let tag = &after_host[tag_pos + RELEASES_TAG.len()..]; + + if owner_repo.is_empty() || tag.is_empty() || owner_repo.matches('/').count() != 1 { + return Ok(url.to_string()); + } + + // Percent-encode characters that are unsafe in URL path segments. + // Tags are typically semver (0.24.1-pr123) so only + is a realistic risk. + let encoded_tag = tag.replace('%', "%25").replace(' ', "%20").replace('+', "%2B"); + let api_url = format!("https://api.github.com/repos/{}/releases/tags/{}", owner_repo, encoded_tag); + log::emit(log::Level::Info, "pin", &format!("Resolving release page: {}", api_url)); + + let resp = client + .get(&api_url) + .header("Accept", "application/vnd.github+json") + .header("User-Agent", "codex-cli") + .send() + .await + .map_err(|e| wrap(e, "Failed to query GitHub API"))? + .error_for_status() + .map_err(|e| wrap(e, "GitHub API returned an error"))?; + + let release: serde_json::Value = resp.json().await.map_err(|e| wrap(e, "Failed to parse GitHub API response"))?; + + let assets = release["assets"] + .as_array() + .ok_or_else(|| AnyError::PinningError(PinningError("No assets found in GitHub release".to_string())))?; + + let vsix_asset = assets + .iter() + .find(|a| a["name"].as_str().map_or(false, |n| n.ends_with(".vsix"))) + .ok_or_else(|| AnyError::PinningError(PinningError("No .vsix asset found in GitHub release".to_string())))?; + + let download_url = vsix_asset["browser_download_url"] + .as_str() + .ok_or_else(|| AnyError::PinningError(PinningError("Missing download URL for .vsix asset".to_string())))?; + + log::emit(log::Level::Info, "pin", &format!("Resolved to: {}", download_url)); + Ok(download_url.to_string()) +} + +async fn add_pin(ctx: CommandContext, project_id: String, args: PinAddArgs) -> Result<(), AnyError> { + let mut project_info = resolve_project(&ctx, &project_id)?; + + // Resolve release page URLs to direct VSIX download URLs + let resolved_url = resolve_vsix_url(&ctx.http, &args.url).await?; + + log::emit(log::Level::Info, "pin", &format!("Inspecting VSIX at {}...", truncate_url(&resolved_url))); + + // Optimized VSIX metadata extraction using Range requests + let (extension_id, version) = match get_vsix_metadata_smart(&ctx.http, &resolved_url).await { + Ok(meta) => meta, + Err(e) => { + log::emit(log::Level::Warn, "pin", &format!("Range request optimization not available, using full download: {}", e)); + get_vsix_metadata_full(&ctx.http, &resolved_url).await? + } + }; + + log::emit(log::Level::Info, "pin", &format!("✔ Identified: {} (v{})", extension_id, version)); + + // Update metadata + project_info.metadata.meta.pinned_extensions.insert( + extension_id.clone(), + PinnedExtension { + version: version.to_string(), + url: resolved_url, + }, + ); + + let metadata_path = project_info.path.join("metadata.json"); + write_metadata(&metadata_path, &project_info.metadata)?; + + log::emit(log::Level::Info, "pin", &format!("✔ Updated metadata.json for \"{}\"", project_info.metadata.project_name)); + println!("Pinned {} to {}", extension_id, version); + + Ok(()) +} + +async fn get_vsix_metadata_smart(client: &reqwest::Client, url: &str) -> Result<(String, String), AnyError> { + // 1. Get content length + let head = client.head(url).send().await?.error_for_status()?; + let content_length = head.headers() + .get(reqwest::header::CONTENT_LENGTH) + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.parse::().ok()) + .ok_or_else(|| AnyError::PinningError(PinningError("Missing Content-Length header".to_string())))?; + + // 2. Fetch the last 16KB (contains the central directory index) + let range_size = 16 * 1024; + let start = if content_length > range_size { content_length - range_size } else { 0 }; + let _res = client.get(url) + .header(reqwest::header::RANGE, format!("bytes={}-{}", start, content_length - 1)) + .send().await?.error_for_status()?; + + // Implementation of Range-based parsing would go here. + // For now, we return an error to trigger the full download fallback. + Err(AnyError::PinningError(PinningError("Range request optimization not fully implemented yet".to_string()))) +} + +async fn get_vsix_metadata_full(client: &reqwest::Client, url: &str) -> Result<(String, String), AnyError> { + let response = client.get(url).send().await?.error_for_status()?; + let bytes = response.bytes().await?; + + let reader = std::io::Cursor::new(bytes); + let mut zip = zip::ZipArchive::new(reader).map_err(|e| wrap(e, "Failed to read VSIX as ZIP"))?; + + let mut package_json_bytes = Vec::new(); + let mut found = false; + + for i in 0..zip.len() { + let mut file = zip.by_index(i).map_err(|e| wrap(e, "Failed to read file from ZIP"))?; + if file.name() == "extension/package.json" { + file.read_to_end(&mut package_json_bytes).map_err(|e| wrap(e, "Failed to read package.json from ZIP"))?; + found = true; + break; + } + } + + if !found { + return Err(AnyError::PinningError(PinningError("Could not find extension/package.json in VSIX".to_string()))); + } + + let package_json: serde_json::Value = serde_json::from_slice(&package_json_bytes).map_err(|e| wrap(e, "Failed to parse package.json"))?; + + let publisher = package_json["publisher"] + .as_str() + .ok_or_else(|| AnyError::PinningError(PinningError("Missing publisher in package.json".to_string())))?; + let name = package_json["name"] + .as_str() + .ok_or_else(|| AnyError::PinningError(PinningError("Missing name in package.json".to_string())))?; + let version = package_json["version"] + .as_str() + .ok_or_else(|| AnyError::PinningError(PinningError("Missing version in package.json".to_string())))?; + + Ok((format!("{}.{}", publisher, name), version.to_string())) +} + +fn remove_pin(ctx: CommandContext, project_id: String, args: PinRemoveArgs) -> Result<(), AnyError> { + let mut project_info = resolve_project(&ctx, &project_id)?; + + if project_info.metadata.meta.pinned_extensions.remove(&args.id).is_some() { + let metadata_path = project_info.path.join("metadata.json"); + write_metadata(&metadata_path, &project_info.metadata)?; + log::emit(log::Level::Info, "pin", &format!("✔ Removed pin for {}", args.id)); + } else { + log::emit(log::Level::Warn, "pin", &format!("No pin found for {} in project {}", args.id, project_info.metadata.project_name)); + } + + Ok(()) +} diff --git a/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexConductor.contribution.ts b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexConductor.contribution.ts new file mode 100644 index 00000000000..f2852b2f743 --- /dev/null +++ b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexConductor.contribution.ts @@ -0,0 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Frontier R&D Ltd. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { WorkbenchPhase, registerWorkbenchContribution2 } from '../../../common/contributions.js'; +import { CodexConductorContribution } from './codexConductor.js'; +import './codexPinManager.js'; + +registerWorkbenchContribution2(CodexConductorContribution.ID, CodexConductorContribution, WorkbenchPhase.AfterRestored); diff --git a/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexConductor.ts b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexConductor.ts new file mode 100644 index 00000000000..d1168fab1f2 --- /dev/null +++ b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexConductor.ts @@ -0,0 +1,812 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Frontier R&D Ltd. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Disposable, DisposableStore } from '../../../../base/common/lifecycle.js'; +import { IWorkbenchContribution } from '../../../common/contributions.js'; +import { IFileService } from '../../../../platform/files/common/files.js'; +import { IWorkspaceContextService, WorkbenchState, toWorkspaceIdentifier, isSingleFolderWorkspaceIdentifier } from '../../../../platform/workspace/common/workspace.js'; +import { IUserDataProfileService } from '../../../services/userDataProfile/common/userDataProfile.js'; +import { IUserDataProfile, IUserDataProfilesService } from '../../../../platform/userDataProfile/common/userDataProfile.js'; +import { IWorkbenchExtensionManagementService } from '../../../services/extensionManagement/common/extensionManagement.js'; +import { IStorageService, StorageScope, StorageTarget } from '../../../../platform/storage/common/storage.js'; +import { INotificationService, Severity } from '../../../../platform/notification/common/notification.js'; +import { URI } from '../../../../base/common/uri.js'; +import { joinPath } from '../../../../base/common/resources.js'; +import { IHostService } from '../../../services/host/browser/host.js'; +import { CommandsRegistry } from '../../../../platform/commands/common/commands.js'; +import { ISharedProcessService } from '../../../../platform/ipc/electron-browser/services.js'; +import { ILogService } from '../../../../platform/log/common/log.js'; +import { IDialogService } from '../../../../platform/dialogs/common/dialogs.js'; +import { IClipboardService } from '../../../../platform/clipboard/common/clipboardService.js'; +import { IProductService } from '../../../../platform/product/common/productService.js'; +import { OS, OperatingSystem } from '../../../../base/common/platform.js'; +import { timeout } from '../../../../base/common/async.js'; +import { PinnedExtensions, RequiredExtensions, ProjectMetadata, parsePinnedExtensions } from './codexTypes.js'; + +/** Maps profile name → array of project folder URIs that reference it. */ +type ProfileAssociations = Record; + +const CODEX_EDITOR_EXTENSION_ID = 'project-accelerate.codex-editor-extension'; +const CIRCUIT_BREAKER_KEY = 'codex.conductor.enforcementAttempts'; +const CIRCUIT_BREAKER_MAX = 3; +const CIRCUIT_BREAKER_WINDOW_MS = 30_000; +const CONDUCTOR_PROFILE_ICON = 'repo-pinned'; +const FRONTIER_EXTENSION_ID = 'frontier-rnd.frontier-authentication'; +const PROFILE_ASSOCIATIONS_KEY = 'codex.conductor.profileAssociations'; +const LAST_CLEANUP_KEY = 'codex.conductor.lastCleanup'; +const CLEANUP_INTERVAL_MS = 14 * 24 * 60 * 60 * 1000; // 14 days + +/** Strip publisher prefix and common suffixes to get a short profile-friendly name. */ +function shortName(extensionId: string): string { + const afterDot = extensionId.includes('.') ? extensionId.slice(extensionId.indexOf('.') + 1) : extensionId; + return afterDot.replace(/-extension$/, ''); +} + +export class CodexConductorContribution extends Disposable implements IWorkbenchContribution { + + static readonly ID = 'workbench.contrib.codexConductor'; + + private metadataUri: URI | undefined; + private lastSeenPinsSnapshot: string | undefined; + private readonly syncCompletionListener = this._register(new DisposableStore()); + + constructor( + @IFileService private readonly fileService: IFileService, + @IWorkspaceContextService private readonly workspaceContextService: IWorkspaceContextService, + @IUserDataProfileService private readonly userDataProfileService: IUserDataProfileService, + @IUserDataProfilesService private readonly userDataProfilesService: IUserDataProfilesService, + @IWorkbenchExtensionManagementService private readonly extensionManagementService: IWorkbenchExtensionManagementService, + @IStorageService private readonly storageService: IStorageService, + @INotificationService private readonly notificationService: INotificationService, + @IHostService private readonly hostService: IHostService, + @ILogService private readonly logService: ILogService, + @ISharedProcessService private readonly sharedProcessService: ISharedProcessService, + @IDialogService private readonly dialogService: IDialogService, + @IClipboardService private readonly clipboardService: IClipboardService, + @IProductService private readonly productService: IProductService, + ) { + super(); + + this._register(CommandsRegistry.registerCommand('codex.conductor.cleanupProfiles', () => this.runProfileCleanup())); + this._register(this.workspaceContextService.onDidChangeWorkbenchState(() => this.initialize())); + + this.initialize(); + } + + private async initialize(): Promise { + if (this.workspaceContextService.getWorkbenchState() !== WorkbenchState.FOLDER) { + this.metadataUri = undefined; + await this.revertIfPatchBuild(); + return; + } + + const workspaceFolder = this.workspaceContextService.getWorkspace().folders[0]; + this.metadataUri = joinPath(workspaceFolder.uri, 'metadata.json'); + + // Snapshot current pins before enforcement + this.lastSeenPinsSnapshot = await this.readPinsSnapshot(); + + // Run initial enforcement + await this.enforce(); + + // Periodic profile cleanup (every 14 days) + await this.maybeCleanupOrphanedProfiles(); + + // Listen for sync completions from Frontier + this.listenForSyncCompletion(); + + await this.logStartupExtensionState(); + } + + // ── Mid-session signals ──────────────────────────────────────────── + + /** + * Listens for Frontier's workspace state changes via IStorageService. + * When Frontier writes to its workspaceState (e.g. after a sync), this fires. + * We then check if pinnedExtensions in metadata.json have changed and prompt + * the user to reload if so. + */ + private listenForSyncCompletion(): void { + this.syncCompletionListener.clear(); + + this.syncCompletionListener.add( + this.storageService.onDidChangeValue( + StorageScope.WORKSPACE, + FRONTIER_EXTENSION_ID, + this.syncCompletionListener + )(() => { + this.checkForPinChanges(); + }) + ); + } + + private async checkForPinChanges(): Promise { + const currentSnapshot = await this.readPinsSnapshot(); + if (currentSnapshot === this.lastSeenPinsSnapshot) { + return; + } + + this.lastSeenPinsSnapshot = currentSnapshot; + + if (!currentSnapshot) { + // Pins were removed — prompt a simple reload to revert profile + this.notificationService.prompt( + Severity.Info, + 'Extension version pins have been removed. Reload to revert to the default profile.', + [{ + label: 'Reload Codex', + run: () => this.hostService.reload() + }] + ); + return; + } + + // New or changed pins — need to prepare the profile before reloading. + let pins: PinnedExtensions; + try { + const parsed = parsePinnedExtensions(JSON.parse(currentSnapshot)); + if (!parsed) { return; } + pins = parsed; + } catch { + return; + } + + const targetProfileName = this.resolveProfileName(pins); + const existingProfile = this.userDataProfilesService.profiles.find(p => p.name === targetProfileName); + + if (existingProfile) { + // Profile already exists — just prompt reload + this.notificationService.prompt( + Severity.Info, + 'Pinned extension installed. Reload to apply.', + [{ + label: 'Reload Codex', + run: () => this.hostService.reload() + }] + ); + return; + } + + // Profile doesn't exist — download and install, then prompt. + // Show progress notification with "Reload Codex When Ready" option. + let reloadWhenReady = false; + + const handle = this.notificationService.prompt( + Severity.Info, + 'Installing pinned extension\u2026', + [{ + label: 'Reload Codex When Ready', + run: () => { reloadWhenReady = true; } + }] + ); + handle.progress.infinite(); + + try { + const profile = await this.userDataProfilesService.createNamedProfile(targetProfileName, { icon: CONDUCTOR_PROFILE_ICON }); + + try { + await this.installPinnedExtensions(pins, profile); + } catch (e: unknown) { + // Installation failed after all retries — cleanup the incomplete profile + try { + await this.userDataProfilesService.removeProfile(profile); + this.logService.info(`[CodexConductor] Cleaned up incomplete profile "${targetProfileName}" after installation failure`); + } catch (cleanupError) { + this.logService.warn(`[CodexConductor] Failed to clean up incomplete profile "${targetProfileName}": ${cleanupError}`); + } + throw e; + } + + handle.close(); + + if (reloadWhenReady) { + // User already opted in — reload immediately + this.hostService.reload(); + } else { + // Show completion notification with reload button + this.notificationService.prompt( + Severity.Info, + 'Pinned extension installed. Reload to apply.', + [{ + label: 'Reload Codex', + run: () => this.hostService.reload() + }] + ); + } + } catch (e: unknown) { + handle.close(); + this.notificationService.prompt( + Severity.Error, + 'Failed to install pinned extension.', + [{ + label: 'Copy Error Report', + run: () => this.showErrorReport(pins, e) + }] + ); + } + } + + private async installPinnedExtensions(pins: PinnedExtensions, profile: IUserDataProfile): Promise { + // Use the shared process 'extensions' IPC channel directly to bypass + // NativeExtensionManagementService.downloadVsix(), which downloads in the + // renderer using browser fetch() — that fails for GitHub release URLs due + // to CORS on the 302 redirect. The shared process downloads via Node.js + // networking which handles redirects without CORS restrictions. + const channel = this.sharedProcessService.getChannel('extensions'); + + for (const [id, pin] of Object.entries(pins)) { + let lastError: Error | undefined; + for (let attempt = 1; attempt <= 3; attempt++) { + try { + this.logService.info(`[CodexConductor] Installing pinned VSIX for "${id}" v${pin.version} from ${pin.url} (attempt ${attempt}/3)`); + + await channel.call('install', [URI.parse(pin.url), { + installGivenVersion: true, + pinned: true, + profileLocation: profile.extensionsResource + }]); + lastError = undefined; + break; // Success + } catch (e: unknown) { + lastError = e instanceof Error ? e : new Error(String(e)); + (lastError as any).extensionId = id; + (lastError as any).url = pin.url; + const code = (lastError as any).code ? ` [Code: ${(lastError as any).code}]` : ''; + const stack = lastError.stack ? `\nStack: ${lastError.stack}` : ''; + this.logService.error(`[CodexConductor] Failed to install pinned extension ${id} from ${pin.url} (attempt ${attempt}/3) [Online: ${navigator.onLine}]: ${lastError.message}${code}${stack}`); + console.error(`[CodexConductor] Installation error for ${id} (attempt ${attempt}/3):`, lastError); + + if (attempt < 3) { + const delay = Math.pow(2, attempt) * 1000; + await timeout(delay); + } + } + } + + if (lastError) { + throw lastError; + } + } + } + + /** + * Reads pinnedExtensions from storage (remotePinnedExtensions written by + * Frontier) first, then falls back to metadata.json on disk. Returns a + * stable JSON string for snapshot comparison, or undefined if no pins found. + */ + private async readPinsSnapshot(): Promise { + // Storage first — this has the latest pins from origin even if sync + // aborted before merging metadata.json to disk. + const storagePins = this.readPinsFromStorage(); + if (storagePins) { + return storagePins; + } + + // Fall back to metadata.json on disk + if (!this.metadataUri) { + return undefined; + } + try { + const content = await this.fileService.readFile(this.metadataUri); + const metadata = JSON.parse(content.value.toString()); + const pins = parsePinnedExtensions(metadata?.meta?.pinnedExtensions); + return pins ? JSON.stringify(pins) : undefined; + } catch { + return undefined; + } + } + + /** + * Reads remotePinnedExtensions from Frontier's workspaceState via + * IStorageService. VS Code stores an extension's entire workspaceState + * as a single JSON blob under the extension ID key, so we read that + * blob and extract the `remotePinnedExtensions` field from within it. + * Returns a stable JSON string or undefined. + */ + private readPinsFromStorage(): string | undefined { + const raw = this.storageService.get( + FRONTIER_EXTENSION_ID, + StorageScope.WORKSPACE + ); + if (!raw) { + return undefined; + } + try { + const state = JSON.parse(raw); + const pins = parsePinnedExtensions(state?.remotePinnedExtensions); + return pins ? JSON.stringify(pins) : undefined; + } catch { + return undefined; + } + } + + private async logStartupExtensionState(): Promise { + const installed = await this.extensionManagementService.getInstalled(); + const codexEditorVersion = installed.find(e => e.identifier.id.toLowerCase() === CODEX_EDITOR_EXTENSION_ID)?.manifest.version ?? 'not installed'; + const frontierAuthVersion = installed.find(e => e.identifier.id.toLowerCase() === FRONTIER_EXTENSION_ID)?.manifest.version ?? 'not installed'; + const currentProfileName = this.userDataProfileService.currentProfile.name; + const requiredExtensions = await this.readRequiredExtensionsFromMetadata(); + const pinnedExtensions = await this.readEffectivePinnedExtensions(); + + this.logService.info( + `[CodexConductor] Startup extension state — profile=${currentProfileName}, ${CODEX_EDITOR_EXTENSION_ID}=${codexEditorVersion}, ${FRONTIER_EXTENSION_ID}=${frontierAuthVersion}, pinnedExtensions=${this.formatObjectForLog(pinnedExtensions)}, requiredExtensions=${this.formatObjectForLog(requiredExtensions)}` + ); + } + + private async readRequiredExtensionsFromMetadata(): Promise { + const metadata = await this.readProjectMetadata(); + return metadata?.meta?.requiredExtensions || {}; + } + + private async readEffectivePinnedExtensions(): Promise { + const storagePins = this.readPinsFromStorage(); + if (storagePins) { + try { + return parsePinnedExtensions(JSON.parse(storagePins)) || {}; + } catch { + // Ignore malformed storage data and fall back to metadata.json. + } + } + + const metadata = await this.readProjectMetadata(); + return parsePinnedExtensions(metadata?.meta?.pinnedExtensions) || {}; + } + + private async readProjectMetadata(): Promise { + if (!this.metadataUri) { + return undefined; + } + + try { + const content = await this.fileService.readFile(this.metadataUri); + return JSON.parse(content.value.toString()) as ProjectMetadata; + } catch { + return undefined; + } + } + + private formatObjectForLog(value: T): string { + const sortedEntries = Object.entries(value).sort(([left], [right]) => left.localeCompare(right)); + return JSON.stringify(Object.fromEntries(sortedEntries)); + } + + // ── Enforcement ──────────────────────────────────────────────────── + + private async enforce(): Promise { + if (!this.metadataUri) { + return; + } + + const workspaceFolder = this.workspaceContextService.getWorkspace().folders[0]; + + // Read pins from storage first (remotePinnedExtensions written by Frontier), + // then fall back to metadata.json on disk. Storage has the latest pins from + // origin even if sync aborted before merging metadata.json to disk. + let pins: PinnedExtensions | undefined; + + const storagePins = this.readPinsFromStorage(); + if (storagePins) { + try { + pins = parsePinnedExtensions(JSON.parse(storagePins)); + } catch { + this.logService.warn('[CodexConductor] Malformed remotePinnedExtensions in storage'); + } + } + + if (!pins) { + // No pins in storage — try metadata.json on disk + try { + const content = await this.fileService.readFile(this.metadataUri); + let metadata: unknown; + try { + metadata = JSON.parse(content.value.toString()); + } catch (parseError) { + this.logService.warn('[CodexConductor] metadata.json contains invalid JSON — extension pinning disabled'); + } + pins = parsePinnedExtensions((metadata as { meta?: { pinnedExtensions?: unknown } })?.meta?.pinnedExtensions); + } catch (e) { + // No metadata.json — not a Codex project, nothing to enforce + this.logService.trace('[CodexConductor] No metadata.json found — skipping enforcement'); + } + } + + if (!pins) { + // No active pins — remove this project from any profile associations + this.removeCurrentProjectFromAssociations(); + await this.revertIfPatchBuild(); + return; + } + + await this.enforcePins(pins, workspaceFolder.uri); + } + + private async enforcePins(pins: PinnedExtensions, workspaceUri: URI): Promise { + const installed = await this.extensionManagementService.getInstalled(); + const mismatches: string[] = []; + + for (const [id, pin] of Object.entries(pins)) { + const ext = installed.find(e => e.identifier.id.toLowerCase() === id.toLowerCase()); + if (!ext || ext.manifest.version !== pin.version) { + mismatches.push(`${id}: expected ${pin.version}, found ${ext?.manifest.version || 'none'}`); + } + } + + if (mismatches.length === 0) { + return; + } + + if (this.checkCircuitBreaker()) { + this.notificationService.prompt( + Severity.Error, + 'Something went wrong while switching profiles.', + [{ + label: 'Open in Default Profile', + run: () => this.switchToDefaultProfile() + }, { + label: 'Copy Error Report', + run: () => this.showErrorReport(pins, undefined, mismatches) + }] + ); + return; + } + + const targetProfileName = this.resolveProfileName(pins); + this.recordAttempt(); + + // Track this project's association with the profile + this.addProfileAssociation(targetProfileName, workspaceUri.toString()); + + this.logService.info(`[CodexConductor] Switching to profile "${targetProfileName}" — version pin active`); + + const existingProfile = this.userDataProfilesService.profiles.find(p => p.name === targetProfileName); + if (existingProfile) { + // Profile already exists with the correct name — the name is deterministic + // ({shortName}-v{version}) so a name match guarantees the correct extensions + // are installed. Skip download/install and just switch. + this.logService.info(`[CodexConductor] Profile "${targetProfileName}" already exists — switching without download`); + await this.switchProfileAndReload(existingProfile); + return; + } + + const profile = await this.userDataProfilesService.createNamedProfile(targetProfileName, { icon: CONDUCTOR_PROFILE_ICON }); + + try { + await this.installPinnedExtensions(pins, profile); + } catch (e: unknown) { + // Installation failed after all retries — cleanup the incomplete profile + try { + await this.userDataProfilesService.removeProfile(profile); + this.logService.info(`[CodexConductor] Cleaned up incomplete profile "${targetProfileName}" after installation failure`); + } catch (cleanupError) { + this.logService.warn(`[CodexConductor] Failed to clean up incomplete profile "${targetProfileName}": ${cleanupError}`); + } + + this.notificationService.prompt( + Severity.Error, + 'Failed to install pinned extension.', + [{ + label: 'Open in Default Profile', + run: () => this.switchToDefaultProfile() + }, { + label: 'Copy Error Report', + run: () => this.showErrorReport(pins, e) + }] + ); + return; + } + + await this.switchProfileAndReload(profile); + } + + private async revertIfPatchBuild(): Promise { + if (this.userDataProfileService.currentProfile.isDefault) { + return; + } + + // Only revert if the current profile was created by the conductor + const currentProfile = this.userDataProfileService.currentProfile; + if (currentProfile.icon !== CONDUCTOR_PROFILE_ICON) { + return; + } + + const defaultProfile = this.userDataProfilesService.profiles.find(p => p.isDefault); + if (defaultProfile) { + this.logService.info(`[CodexConductor] No active pins — reverting from "${currentProfile.name}" to default profile`); + await this.switchProfileAndReload(defaultProfile); + } + } + + // ── Profile lifecycle cleanup ────────────────────────────────────── + + /** + * Runs cleanup if at least CLEANUP_INTERVAL_MS has passed since the last run. + */ + private async maybeCleanupOrphanedProfiles(): Promise { + const lastCleanup = this.storageService.getNumber(LAST_CLEANUP_KEY, StorageScope.APPLICATION, 0); + if (Date.now() - lastCleanup < CLEANUP_INTERVAL_MS) { + return; + } + await this.runProfileCleanup(); + } + + /** + * Cleans up conductor-managed profiles that are no longer referenced by any + * project on disk. Can be called directly via the + * `codex.conductor.cleanupProfiles` command for testing. + * + * For each conductor profile, checks every associated project path: + * - If the project's metadata.json is unreadable (deleted, moved), remove + * the association. + * - If the project's pins no longer resolve to this profile name, remove + * the association. + * - If no associations remain, delete the profile. + */ + async runProfileCleanup(): Promise { + const associations = this.getProfileAssociations(); + const conductorProfiles = this.userDataProfilesService.profiles.filter( + p => !p.isDefault && p.icon === CONDUCTOR_PROFILE_ICON + ); + + if (conductorProfiles.length === 0) { + this.storageService.store(LAST_CLEANUP_KEY, Date.now(), StorageScope.APPLICATION, StorageTarget.MACHINE); + return; + } + + let removedCount = 0; + + for (const profile of conductorProfiles) { + // Don't remove the profile we're currently using + if (profile.id === this.userDataProfileService.currentProfile.id) { + continue; + } + + const projectPaths = associations[profile.name] || []; + const stillReferenced = await this.isProfileReferencedByAnyProject(profile.name, projectPaths); + + if (!stillReferenced) { + try { + await this.userDataProfilesService.removeProfile(profile); + delete associations[profile.name]; + removedCount++; + } catch { + // Profile may be in use by another window — skip silently + } + } + } + + this.storeProfileAssociations(associations); + this.storageService.store(LAST_CLEANUP_KEY, Date.now(), StorageScope.APPLICATION, StorageTarget.MACHINE); + + this.logService.info(`[CodexConductor] Profile cleanup complete — removed ${removedCount} orphaned profile${removedCount !== 1 ? 's' : ''}, ${conductorProfiles.length - removedCount} retained`); + } + + /** + * Checks if any of the given project paths still have pins that resolve + * to the given profile name. + */ + private async isProfileReferencedByAnyProject(profileName: string, projectPaths: string[]): Promise { + for (const projectPath of projectPaths) { + try { + const metadataUri = joinPath(URI.parse(projectPath), 'metadata.json'); + const content = await this.fileService.readFile(metadataUri); + const metadata = JSON.parse(content.value.toString()); + const pins = parsePinnedExtensions(metadata?.meta?.pinnedExtensions); + + if (pins && this.resolveProfileName(pins) === profileName) { + return true; + } + } catch { + // Project unreadable (deleted, moved) — not referencing + } + } + return false; + } + + // ── Profile association tracking ─────────────────────────────────── + + private getProfileAssociations(): ProfileAssociations { + const raw = this.storageService.get(PROFILE_ASSOCIATIONS_KEY, StorageScope.APPLICATION); + if (!raw) { return {}; } + try { + return JSON.parse(raw); + } catch { + return {}; + } + } + + private storeProfileAssociations(associations: ProfileAssociations): void { + this.storageService.store(PROFILE_ASSOCIATIONS_KEY, JSON.stringify(associations), StorageScope.APPLICATION, StorageTarget.MACHINE); + } + + private addProfileAssociation(profileName: string, projectUri: string): void { + const associations = this.getProfileAssociations(); + const paths = associations[profileName] || []; + if (!paths.includes(projectUri)) { + paths.push(projectUri); + } + associations[profileName] = paths; + this.storeProfileAssociations(associations); + } + + private removeCurrentProjectFromAssociations(): void { + const workspaceFolder = this.workspaceContextService.getWorkspace().folders[0]; + if (!workspaceFolder) { return; } + + const projectUri = workspaceFolder.uri.toString(); + const associations = this.getProfileAssociations(); + let changed = false; + + for (const profileName of Object.keys(associations)) { + const paths = associations[profileName]; + const idx = paths.indexOf(projectUri); + if (idx !== -1) { + paths.splice(idx, 1); + changed = true; + if (paths.length === 0) { + delete associations[profileName]; + } + } + } + + if (changed) { + this.storeProfileAssociations(associations); + } + } + + // ── Error reporting ──────────────────────────────────────────────── + + private async showErrorReport(pins: PinnedExtensions, error?: unknown, mismatches?: string[]): Promise { + const osName = OS === OperatingSystem.Macintosh ? 'macOS' : OS === OperatingSystem.Windows ? 'Windows' : 'Linux'; + const workspaceFolder = this.workspaceContextService.getWorkspace().folders[0]; + + const report = [ + '--- Codex Conductor Error Report ---', + '', + `Codex Version: ${this.productService.version || 'unknown'} (${this.productService.commit?.slice(0, 8) || 'unknown'})`, + `OS: ${osName}`, + `Profile: ${this.userDataProfileService.currentProfile.name}`, + `Project: ${workspaceFolder?.name || 'unknown'}`, + `Online: ${navigator.onLine}`, + '', + ]; + + if (error) { + const message = error instanceof Error ? error.message : String(error); + const code = (error as any).code ? ` [Code: ${(error as any).code}]` : ''; + const extensionId = (error as any).extensionId ? ` [Extension: ${(error as any).extensionId}]` : ''; + const url = (error as any).url ? ` [URL: ${(error as any).url}]` : ''; + + report.push('Error:'); + report.push(` - ${message}${code}${extensionId}${url}`); + report.push(''); + } + + if (mismatches && mismatches.length > 0) { + report.push('Mismatches:'); + report.push(...mismatches.map(m => ` - ${m}`)); + report.push(''); + } + + report.push('Pinned Extensions:'); + report.push(...Object.entries(pins).map(([id, pin]) => + ` - ${id}: v${pin.version} (${pin.url})` + )); + report.push(''); + report.push('---'); + + const fullReport = report.join('\n'); + + const { result } = await this.dialogService.prompt({ + type: Severity.Error, + message: 'Something went wrong while switching profiles', + detail: fullReport, + buttons: [ + { label: 'Copy to Clipboard', run: () => true }, + ], + cancelButton: 'Close', + }); + + if (await result) { + await this.clipboardService.writeText(fullReport); + } + } + + // ── Utilities ────────────────────────────────────────────────────── + + private resolveProfileName(pins: PinnedExtensions): string { + const ids = Object.keys(pins).sort(); + const firstId = ids[0]; + const base = `${shortName(firstId)}-v${pins[firstId].version}`; + if (ids.length === 1) { return base; } + + // Simple hash of all id@version pairs for deterministic multi-pin names + let h = 5381; + const str = ids.map(id => `${id}@${pins[id].version}`).join(','); + for (let i = 0; i < str.length; i++) { h = (((h << 5) + h) ^ str.charCodeAt(i)) >>> 0; } + return `${base}+${h.toString(16).slice(0, 4)}`; + } + + private checkCircuitBreaker(): boolean { + const raw = this.storageService.get(CIRCUIT_BREAKER_KEY, StorageScope.WORKSPACE); + if (!raw) { return false; } + try { + const attempts: number[] = JSON.parse(raw); + const now = Date.now(); + const recent = attempts.filter(t => now - t < CIRCUIT_BREAKER_WINDOW_MS); + return recent.length >= CIRCUIT_BREAKER_MAX; + } catch { + return false; + } + } + + private recordAttempt(): void { + const raw = this.storageService.get(CIRCUIT_BREAKER_KEY, StorageScope.WORKSPACE); + let attempts: number[]; + try { + attempts = raw ? JSON.parse(raw) : []; + } catch { + attempts = []; + } + attempts.push(Date.now()); + // Prune old entries to prevent unbounded growth + const now = Date.now(); + attempts = attempts.filter(t => now - t < CIRCUIT_BREAKER_WINDOW_MS); + this.storageService.store(CIRCUIT_BREAKER_KEY, JSON.stringify(attempts), StorageScope.WORKSPACE, StorageTarget.MACHINE); + } + + /** + * switchProfile() for folder workspaces only persists the profile association + * (via setProfileForWorkspace) — it does NOT restart the extension host or + * change the active profile in the current session. A window reload is needed + * to make the switch effective. If the extension host restart is vetoed (e.g. + * a custom editor like Startup Flow is open), switchProfile() throws + * CancellationError and reverts the association — reload handles that too. + */ + private async switchProfileAndReload(profile: IUserDataProfile): Promise { + const workspace = this.workspaceContextService.getWorkspace(); + const workspaceIdentifier = toWorkspaceIdentifier(workspace); + const originalProfileId = this.userDataProfileService.currentProfile.id; + const currentProfileName = this.userDataProfileService.currentProfile.name; + + this.logService.info(`[CodexConductor] switchProfileAndReload: current=${currentProfileName}, target=${profile.name}`); + this.logService.info(`[CodexConductor] Workspace ID: ${workspaceIdentifier.id}`); + if (isSingleFolderWorkspaceIdentifier(workspaceIdentifier)) { + this.logService.info(`[CodexConductor] Workspace URI: ${workspaceIdentifier.uri.toString()}`); + } + + // Explicitly set the association for the workspace. + // For folder workspaces, this is the primary way VS Code associates a profile. + this.logService.info(`[CodexConductor] Calling setProfileForWorkspace...`); + + // First, clear any existing associations for this workspace to prevent duplicates + // that could cause lookup confusion in the Main process. + try { + await this.userDataProfilesService.resetWorkspaces(); + } catch { + // Best effort + } + + await this.userDataProfilesService.setProfileForWorkspace(workspaceIdentifier, profile); + this.logService.info(`[CodexConductor] setProfileForWorkspace completed`); + + // Compare against the profile ID captured BEFORE setProfileForWorkspace. + // setProfileForWorkspace may internally trigger changeCurrentProfile which + // updates currentProfile even if the extension host vetos the switch. Using + // the post-call currentProfile.id would incorrectly skip the reload. + if (originalProfileId !== profile.id) { + this.logService.info(`[CodexConductor] Profile mismatch (${currentProfileName} != ${profile.name}) — triggering authoritative reload`); + this.hostService.reload({ forceProfile: profile.name }); + } else { + this.logService.info(`[CodexConductor] Already on target profile ${profile.name} — no reload needed`); + } + } + + private async switchToDefaultProfile(): Promise { + const profile = this.userDataProfilesService.profiles.find(p => p.isDefault); + if (profile) { + await this.switchProfileAndReload(profile); + } + } +} diff --git a/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexPinManager.ts b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexPinManager.ts new file mode 100644 index 00000000000..8f898da83cf --- /dev/null +++ b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexPinManager.ts @@ -0,0 +1,398 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Frontier R&D Ltd. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Action2, registerAction2 } from '../../../../platform/actions/common/actions.js'; +import { ServicesAccessor } from '../../../../platform/instantiation/common/instantiation.js'; +import { localize, localize2 } from '../../../../nls.js'; +import { IQuickInputService, IQuickPickItem, IQuickPickSeparator } from '../../../../platform/quickinput/common/quickInput.js'; +import { IFileService } from '../../../../platform/files/common/files.js'; +import { IWorkspaceContextService, WorkbenchState } from '../../../../platform/workspace/common/workspace.js'; +import { INotificationService } from '../../../../platform/notification/common/notification.js'; +import { ILogService } from '../../../../platform/log/common/log.js'; +import { ISharedProcessService } from '../../../../platform/ipc/electron-browser/services.js'; +import { ICommandService } from '../../../../platform/commands/common/commands.js'; +import { IDialogService } from '../../../../platform/dialogs/common/dialogs.js'; +import { IProgressService, ProgressLocation } from '../../../../platform/progress/common/progress.js'; +import { asJson, IRequestService } from '../../../../platform/request/common/request.js'; +import { URI } from '../../../../base/common/uri.js'; +import { joinPath } from '../../../../base/common/resources.js'; +import { VSBuffer } from '../../../../base/common/buffer.js'; +import { CancellationToken } from '../../../../base/common/cancellation.js'; +import { DisposableStore } from '../../../../base/common/lifecycle.js'; +import { ProjectMetadata } from './codexTypes.js'; + +interface GitHubRelease { + assets?: Array<{ + name: string; + browser_download_url: string; + }>; +} + +interface PinActionItem extends IQuickPickItem { + action: 'add' | 'remove' | 'sync' | 'info'; + extensionId?: string; +} + +/** Services needed by pin management sub-flows. */ +interface PinManagerContext { + readonly quickInputService: IQuickInputService; + readonly fileService: IFileService; + readonly notificationService: INotificationService; + readonly logService: ILogService; + readonly sharedProcessService: ISharedProcessService; + readonly requestService: IRequestService; + readonly dialogService: IDialogService; + readonly progressService: IProgressService; + readonly metadataUri: URI; +} + +const RELEASE_PAGE_PATTERN = /^https:\/\/github\.com\/([^/]+)\/([^/]+)\/releases\/tag\/(.+)$/; + +/** JSON indentation used by codex-editor for metadata.json. */ +const METADATA_INDENT = 4; + +/** + * Resolves a GitHub release page URL to a direct VSIX download URL. + * If the URL is not a release page, returns it unchanged. + */ +async function resolveVsixUrl(requestService: IRequestService, url: string, logService: ILogService): Promise { + const match = RELEASE_PAGE_PATTERN.exec(url.trim()); + if (!match) { + return url.trim(); + } + + const [, owner, repo, tag] = match; + const apiUrl = `https://api.github.com/repos/${owner}/${repo}/releases/tags/${encodeURIComponent(tag)}`; + + logService.info(`[CodexPinManager] Resolving release page: ${apiUrl}`); + + const context = await requestService.request( + { type: 'GET', url: apiUrl, headers: { 'Accept': 'application/vnd.github+json', 'User-Agent': 'codex-pin-manager' } }, + CancellationToken.None + ); + const release = await asJson(context); + if (!release?.assets) { + throw new Error(localize('managePins.noAssets', 'No assets found in GitHub release "{0}"', tag)); + } + + const vsixAsset = release.assets.find(a => a.name.endsWith('.vsix')); + if (!vsixAsset) { + throw new Error(localize('managePins.noVsix', 'No .vsix asset found in GitHub release "{0}"', tag)); + } + + logService.info(`[CodexPinManager] Resolved to: ${vsixAsset.browser_download_url}`); + return vsixAsset.browser_download_url; +} + +function truncateUrl(url: string): string { + try { + const parsed = new URL(url); + const segments = parsed.pathname.split('/').filter(Boolean); + if (segments.length > 3) { + const first2 = segments.slice(0, 2).join('/'); + const last = segments[segments.length - 1]; + return `${parsed.origin}/${first2}/.../${last}`; + } + return url; + } catch { + return url; + } +} + +registerAction2(class ManageExtensionPinsAction extends Action2 { + constructor() { + super({ + id: 'codex.conductor.managePins', + title: localize2('managePins', 'Manage Extension Pins'), + category: localize2('codex', 'Codex'), + f1: true, + }); + } + + override async run(accessor: ServicesAccessor): Promise { + const ctx: PinManagerContext = { + quickInputService: accessor.get(IQuickInputService), + fileService: accessor.get(IFileService), + notificationService: accessor.get(INotificationService), + logService: accessor.get(ILogService), + sharedProcessService: accessor.get(ISharedProcessService), + requestService: accessor.get(IRequestService), + dialogService: accessor.get(IDialogService), + progressService: accessor.get(IProgressService), + metadataUri: undefined!, + }; + + const workspaceService = accessor.get(IWorkspaceContextService); + const commandService = accessor.get(ICommandService); + + if (workspaceService.getWorkbenchState() !== WorkbenchState.FOLDER) { + ctx.notificationService.info(localize('managePins.noFolder', 'Open a project folder to manage extension pins.')); + return; + } + + const workspaceFolder = workspaceService.getWorkspace().folders[0]; + (ctx as { metadataUri: URI }).metadataUri = joinPath(workspaceFolder.uri, 'metadata.json'); + + // Hub loop — re-opens after each action until dismissed + while (true) { + const metadata = await readMetadata(ctx); + if (!metadata) { + ctx.notificationService.info(localize('managePins.noMetadata', 'Could not read metadata.json from the workspace.')); + return; + } + + const action = await showHub(ctx.quickInputService, metadata); + if (!action) { + return; // User dismissed + } + + switch (action.action) { + case 'add': + await addPin(ctx); + break; + case 'remove': + await removePin(ctx, metadata); + break; + case 'sync': + await syncChanges(commandService, ctx.notificationService, ctx.logService); + break; // Continue loop — re-read and show hub with post-sync state + case 'info': + break; // Re-show hub + } + } + } +}); + +async function readMetadata(ctx: PinManagerContext): Promise { + try { + const content = await ctx.fileService.readFile(ctx.metadataUri); + return JSON.parse(content.value.toString()) as ProjectMetadata; + } catch { + return undefined; + } +} + +async function writeMetadata(ctx: PinManagerContext, updater: (metadata: ProjectMetadata) => void): Promise { + const content = await ctx.fileService.readFile(ctx.metadataUri); + const metadata = JSON.parse(content.value.toString()) as ProjectMetadata; + + if (!metadata.meta) { + metadata.meta = {}; + } + if (!metadata.meta.pinnedExtensions) { + metadata.meta.pinnedExtensions = {}; + } + + updater(metadata); + + const updated = JSON.stringify(metadata, null, METADATA_INDENT) + '\n'; + await ctx.fileService.writeFile(ctx.metadataUri, VSBuffer.fromString(updated)); +} + +function showHub(quickInputService: IQuickInputService, metadata: ProjectMetadata): Promise { + return new Promise((resolve) => { + const disposables = new DisposableStore(); + const picker = quickInputService.createQuickPick({ useSeparators: true }); + disposables.add(picker); + + picker.title = localize('managePins.title', 'Manage Extension Pins'); + picker.placeholder = localize('managePins.placeholder', 'Select an action'); + picker.matchOnDescription = true; + picker.matchOnDetail = true; + + const items: (PinActionItem | IQuickPickSeparator)[] = []; + + // Required Extensions section + const required = metadata.meta?.requiredExtensions; + if (required && Object.keys(required).length > 0) { + items.push({ type: 'separator', label: localize('managePins.required', 'Required Extensions') }); + const sortedIds = Object.keys(required).sort(); + for (const id of sortedIds) { + items.push({ + label: `$(lock) ${id}`, + description: required[id], + action: 'info', + }); + } + } + + // Pinned Extensions section + const pinned = metadata.meta?.pinnedExtensions; + if (pinned && Object.keys(pinned).length > 0) { + items.push({ type: 'separator', label: localize('managePins.pinned', 'Pinned Extensions') }); + const sortedIds = Object.keys(pinned).sort(); + for (const id of sortedIds) { + const pin = pinned[id]; + items.push({ + label: `$(pinned) ${id}`, + description: `v${pin.version}`, + detail: truncateUrl(pin.url), + action: 'info', + extensionId: id, + }); + } + } + + // Actions section + items.push({ type: 'separator', label: localize('managePins.actions', 'Actions') }); + items.push({ label: localize('managePins.addAction', '$(add) Pin an Extension...'), action: 'add' }); + if (pinned && Object.keys(pinned).length > 0) { + items.push({ label: localize('managePins.removeAction', '$(trash) Remove a Pin...'), action: 'remove' }); + } + items.push({ label: localize('managePins.syncAction', '$(sync) Sync Changes'), action: 'sync' }); + + picker.items = items; + + let result: PinActionItem | undefined; + + disposables.add(picker.onDidAccept(() => { + const selected = picker.selectedItems[0]; + if (!selected || selected.action === 'info') { + return; // Keep picker open for non-actionable items + } + result = selected; + picker.hide(); + })); + + disposables.add(picker.onDidHide(() => { + disposables.dispose(); + resolve(result); + })); + + picker.show(); + }); +} + +async function addPin(ctx: PinManagerContext): Promise { + // Step 1: Get URL from user + const url = await ctx.quickInputService.input({ + title: localize('managePins.addTitle', 'Pin an Extension'), + placeHolder: localize('managePins.addPlaceholder', 'https://github.com/.../releases/tag/0.24.1 or direct .vsix URL'), + prompt: localize('managePins.addPrompt', 'Enter a GitHub release page URL or direct VSIX download URL'), + }); + + if (!url) { + return; + } + + // Step 2: Resolve URL (release page → VSIX download URL) and extract manifest + let extensionId: string; + let version: string; + let resolvedUrl: string; + + try { + const result = await ctx.progressService.withProgress( + { location: ProgressLocation.Notification, title: localize('managePins.inspecting', 'Inspecting VSIX...') }, + async () => { + const resolved = await resolveVsixUrl(ctx.requestService, url, ctx.logService); + const channel = ctx.sharedProcessService.getChannel('extensions'); + const manifest: { publisher?: string; name?: string; version?: string } = + await channel.call('getManifest', [URI.parse(resolved)]); + return { resolved, manifest }; + } + ); + + resolvedUrl = result.resolved; + const manifest = result.manifest; + + if (!manifest.publisher || !manifest.name || !manifest.version) { + ctx.notificationService.error(localize('managePins.badVsix', 'VSIX is missing publisher, name, or version in package.json.')); + return; + } + + extensionId = `${manifest.publisher}.${manifest.name}`; + version = manifest.version; + } catch (e: unknown) { + const msg = e instanceof Error ? e.message : String(e); + ctx.notificationService.error(localize('managePins.inspectFailed', 'Failed to inspect VSIX: {0}', msg)); + return; + } + + // Step 3: Confirm + const { confirmed } = await ctx.dialogService.confirm({ + message: localize('managePins.confirmPin', 'Pin {0} at v{1}?', extensionId, version), + detail: localize('managePins.confirmPinDetail', 'This will pin {0} to version {1} for this project.', extensionId, version), + }); + + if (!confirmed) { + return; + } + + // Step 4: Write to metadata.json + try { + await writeMetadata(ctx, (m) => { + m.meta!.pinnedExtensions![extensionId] = { version, url: resolvedUrl }; + }); + ctx.logService.info(`[CodexPinManager] Pinned ${extensionId} to v${version}`); + ctx.notificationService.info(localize('managePins.pinned', 'Pinned {0} to v{1}.', extensionId, version)); + } catch (e: unknown) { + const msg = e instanceof Error ? e.message : String(e); + ctx.notificationService.error(localize('managePins.writeFailed', 'Failed to update metadata.json: {0}', msg)); + } +} + +async function removePin(ctx: PinManagerContext, metadata: ProjectMetadata): Promise { + const pinned = metadata.meta?.pinnedExtensions; + if (!pinned || Object.keys(pinned).length === 0) { + ctx.notificationService.info(localize('managePins.noPins', 'No pinned extensions to remove.')); + return; + } + + // Step 1: Pick which pin to remove + const items: (IQuickPickItem & { extensionId: string })[] = Object.keys(pinned).sort().map(id => ({ + label: id, + description: `v${pinned[id].version}`, + extensionId: id, + })); + + const selected = await ctx.quickInputService.pick(items, { + title: localize('managePins.removeTitle', 'Remove a Pin'), + placeHolder: localize('managePins.removePlaceholder', 'Select a pinned extension to remove'), + }); + + if (!selected) { + return; + } + + const extensionId = (selected as typeof items[0]).extensionId; + + // Step 2: Confirm + const { confirmed } = await ctx.dialogService.confirm({ + message: localize('managePins.confirmRemove', 'Remove pin for {0}?', extensionId), + detail: localize('managePins.confirmRemoveDetail', 'This will unpin {0} from v{1}.', extensionId, pinned[extensionId].version), + }); + + if (!confirmed) { + return; + } + + // Step 3: Update metadata.json + try { + await writeMetadata(ctx, (m) => { + delete m.meta!.pinnedExtensions![extensionId]; + }); + ctx.logService.info(`[CodexPinManager] Removed pin for ${extensionId}`); + ctx.notificationService.info(localize('managePins.removed', 'Removed pin for {0}.', extensionId)); + } catch (e: unknown) { + const msg = e instanceof Error ? e.message : String(e); + ctx.notificationService.error(localize('managePins.writeFailed', 'Failed to update metadata.json: {0}', msg)); + } +} + +async function syncChanges( + commandService: ICommandService, + notificationService: INotificationService, + logService: ILogService, +): Promise { + try { + logService.info('[CodexPinManager] Triggering Frontier sync...'); + await commandService.executeCommand('frontier.syncChanges'); + logService.info('[CodexPinManager] Frontier sync completed'); + } catch (e: unknown) { + const msg = e instanceof Error ? e.message : String(e); + logService.warn(`[CodexPinManager] Failed to trigger Frontier sync: ${msg}`); + notificationService.info(localize('managePins.syncFallback', 'Sync manually to share pin changes with your team.')); + } +} diff --git a/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexTypes.ts b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexTypes.ts new file mode 100644 index 00000000000..da84c1fd9fb --- /dev/null +++ b/src/stable/src/vs/workbench/contrib/codexConductor/browser/codexTypes.ts @@ -0,0 +1,42 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Frontier R&D Ltd. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +export interface PinnedExtensionEntry { + version: string; + url: string; +} + +export type PinnedExtensions = Record; +export type RequiredExtensions = Record; + +export interface ProjectMetadata { + meta?: { + pinnedExtensions?: PinnedExtensions; + requiredExtensions?: RequiredExtensions; + }; + [key: string]: unknown; +} + +/** + * Validates and extracts well-formed pinned extension entries from an unknown + * parsed JSON value. Returns only entries where the value has string `version` + * and `url` fields. Malformed entries are silently dropped. + */ +export function parsePinnedExtensions(value: unknown): PinnedExtensions | undefined { + if (!value || typeof value !== 'object') { + return undefined; + } + const result: PinnedExtensions = {}; + for (const [key, entry] of Object.entries(value as Record)) { + if ( + entry && typeof entry === 'object' && + typeof (entry as Record).version === 'string' && + typeof (entry as Record).url === 'string' + ) { + result[key] = entry as PinnedExtensionEntry; + } + } + return Object.keys(result).length > 0 ? result : undefined; +}