From dd6d6abbaab709bc57c944ab913caf1d47345dd4 Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Wed, 26 Nov 2025 00:25:34 +0000 Subject: [PATCH 1/4] feat: enable firewall on claude engine ana remove hooks Signed-off-by: Jiaxiao (mossaka) Zhou --- .github/workflows/audit-workflows.lock.yml | 117 +------- .github/workflows/blog-auditor.lock.yml | 129 +------- .github/workflows/cloclo.lock.yml | 117 +------- .../commit-changes-analyzer.lock.yml | 117 +------- .../workflows/copilot-agent-analysis.lock.yml | 129 +------- .../copilot-session-insights.lock.yml | 129 +------- .github/workflows/daily-code-metrics.lock.yml | 117 +------- .github/workflows/daily-doc-updater.lock.yml | 129 +------- .../daily-multi-device-docs-tester.lock.yml | 129 +------- .github/workflows/dev.lock.yml | 117 +------- .../developer-docs-consolidator.lock.yml | 129 +------- .../example-workflow-analyzer.lock.yml | 117 +------- .../github-mcp-structural-analysis.lock.yml | 129 +------- .../github-mcp-tools-report.lock.yml | 117 +------- .github/workflows/go-logger.lock.yml | 117 +------- .../workflows/go-pattern-detector.lock.yml | 117 +------- .../workflows/instructions-janitor.lock.yml | 129 +------- .github/workflows/lockfile-stats.lock.yml | 117 +------- .../prompt-clustering-analysis.lock.yml | 129 +------- .github/workflows/safe-output-health.lock.yml | 117 +------- .../schema-consistency-checker.lock.yml | 117 +------- .github/workflows/scout.lock.yml | 117 +------- .github/workflows/security-fix-pr.lock.yml | 117 +------- .../semantic-function-refactor.lock.yml | 117 +------- .github/workflows/smoke-claude.lock.yml | 129 +------- .github/workflows/smoke-detector.lock.yml | 117 +------- .../workflows/static-analysis-report.lock.yml | 117 +------- .../workflows/test-firewall-escape.lock.yml | 2 +- .github/workflows/typist.lock.yml | 117 +------- .github/workflows/unbloat-docs.lock.yml | 129 +------- pkg/workflow/agentic_output_test.go | 138 --------- pkg/workflow/claude_engine.go | 109 +++++-- pkg/workflow/claude_engine_network_test.go | 90 +++--- pkg/workflow/claude_settings.go | 76 ----- pkg/workflow/claude_settings_test.go | 199 ------------ pkg/workflow/claude_settings_tmp_test.go | 86 ------ pkg/workflow/compiler.go | 4 +- pkg/workflow/domains.go | 26 ++ pkg/workflow/engine_firewall_support_test.go | 28 +- pkg/workflow/engine_network_hooks.go | 172 ----------- pkg/workflow/engine_network_test.go | 282 ------------------ pkg/workflow/firewall.go | 12 +- .../firewall_default_enablement_test.go | 92 ++++-- pkg/workflow/firewall_workflow_test.go | 43 ++- pkg/workflow/network_test.go | 14 - 45 files changed, 411 insertions(+), 4487 deletions(-) delete mode 100644 pkg/workflow/claude_settings.go delete mode 100644 pkg/workflow/claude_settings_test.go delete mode 100644 pkg/workflow/claude_settings_tmp_test.go delete mode 100644 pkg/workflow/engine_network_hooks.go delete mode 100644 pkg/workflow/engine_network_test.go diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml index 52d0b8e379..e57b6ad0f9 100644 --- a/.github/workflows/audit-workflows.lock.yml +++ b/.github/workflows/audit-workflows.lock.yml @@ -1087,115 +1087,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -3008,7 +2899,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -3026,12 +2917,6 @@ jobs: GH_AW_ASSETS_MAX_SIZE_KB: 10240 GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg" GH_AW_TOOL_TIMEOUT: "300" - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/blog-auditor.lock.yml b/.github/workflows/blog-auditor.lock.yml index 9d6e25f9b2..d08cb049ae 100644 --- a/.github/workflows/blog-auditor.lock.yml +++ b/.github/workflows/blog-auditor.lock.yml @@ -606,117 +606,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","githubnext.com","www.githubnext.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2020,7 +1919,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -2148,7 +2047,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat *),Bash(cat),Bash(date *),Bash(date),Bash(echo *),Bash(echo),Bash(gh aw compile *),Bash(grep),Bash(head),Bash(ls *),Bash(ls),Bash(mktemp *),Bash(pwd),Bash(rm *),Bash(sort),Bash(tail),Bash(test *),Bash(uniq),Bash(wc),Bash(yq),BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,githubnext.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.githubnext.com --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat *),Bash(cat),Bash(date *),Bash(date),Bash(echo *),Bash(echo),Bash(gh aw compile *),Bash(grep),Bash(head),Bash(ls *),Bash(ls),Bash(mktemp *),Bash(pwd),Bash(rm *),Bash(sort),Bash(tail),Bash(test *),Bash(uniq),Bash(wc),Bash(yq),BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2162,12 +2063,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/cloclo.lock.yml b/.github/workflows/cloclo.lock.yml index 88c6179ca7..f39db73a2e 100644 --- a/.github/workflows/cloclo.lock.yml +++ b/.github/workflows/cloclo.lock.yml @@ -1688,115 +1688,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -3221,7 +3112,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --max-turns 100 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --max-turns 100 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -3236,12 +3127,6 @@ jobs: BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GH_AW_MAX_TURNS: 100 - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/commit-changes-analyzer.lock.yml b/.github/workflows/commit-changes-analyzer.lock.yml index b546984f9d..952a5dc169 100644 --- a/.github/workflows/commit-changes-analyzer.lock.yml +++ b/.github/workflows/commit-changes-analyzer.lock.yml @@ -579,115 +579,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2044,7 +1935,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --max-turns 100 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --max-turns 100 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2059,12 +1950,6 @@ jobs: BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GH_AW_MAX_TURNS: 100 - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/copilot-agent-analysis.lock.yml b/.github/workflows/copilot-agent-analysis.lock.yml index c206e116e5..8bd1b9ba58 100644 --- a/.github/workflows/copilot-agent-analysis.lock.yml +++ b/.github/workflows/copilot-agent-analysis.lock.yml @@ -904,117 +904,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2589,7 +2488,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -2706,7 +2605,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(cp *),Bash(date *),Bash(date),Bash(echo),Bash(find .github -name '\''*.md'\''),Bash(find .github -type f -exec cat {} +),Bash(gh api *),Bash(gh pr list *),Bash(gh search prs *),Bash(git diff),Bash(git log --oneline),Bash(grep),Bash(head),Bash(jq *),Bash(ln *),Bash(ls -la .github),Bash(ls),Bash(mkdir *),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(cp *),Bash(date *),Bash(date),Bash(echo),Bash(find .github -name '\''*.md'\''),Bash(find .github -type f -exec cat {} +),Bash(gh api *),Bash(gh pr list *),Bash(gh search prs *),Bash(git diff),Bash(git log --oneline),Bash(grep),Bash(head),Bash(jq *),Bash(ln *),Bash(ls -la .github),Bash(ls),Bash(mkdir *),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2720,12 +2621,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml index 4c65835a83..327a2c016b 100644 --- a/.github/workflows/copilot-session-insights.lock.yml +++ b/.github/workflows/copilot-session-insights.lock.yml @@ -1815,117 +1815,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com","pypi.python.org","pypi.org","pip.pypa.io","*.pythonhosted.org","files.pythonhosted.org","bootstrap.pypa.io","conda.binstar.org","conda.anaconda.org","binstar.org","anaconda.org","repo.continuum.io","repo.anaconda.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -4382,7 +4281,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -4474,7 +4373,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,*.pythonhosted.org,anaconda.org,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,binstar.org,bootstrap.pypa.io,codeload.github.com,conda.anaconda.org,conda.binstar.org,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pip.pypa.io,ppa.launchpad.net,pypi.org,pypi.python.org,raw.githubusercontent.com,repo.anaconda.com,repo.continuum.io,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -4491,12 +4392,6 @@ jobs: GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}" GH_AW_ASSETS_MAX_SIZE_KB: 10240 GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg" - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml index 8ffcdfc49a..fc2f3381ae 100644 --- a/.github/workflows/daily-code-metrics.lock.yml +++ b/.github/workflows/daily-code-metrics.lock.yml @@ -1162,115 +1162,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -3181,7 +3072,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -3195,12 +3086,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/daily-doc-updater.lock.yml b/.github/workflows/daily-doc-updater.lock.yml index 475809d63d..cfca05d5c8 100644 --- a/.github/workflows/daily-doc-updater.lock.yml +++ b/.github/workflows/daily-doc-updater.lock.yml @@ -447,117 +447,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1700,7 +1599,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -1818,7 +1717,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(date),Bash(echo),Bash(find docs -name '\''*.md'\'' -exec cat {} +),Bash(find docs -name '\''*.md'\'' -o -name '\''*.mdx'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -r '\''*'\'' docs),Bash(grep),Bash(head),Bash(ls -la docs),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(date),Bash(echo),Bash(find docs -name '\''*.md'\'' -exec cat {} +),Bash(find docs -name '\''*.md'\'' -o -name '\''*.mdx'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -r '\''*'\'' docs),Bash(grep),Bash(head),Bash(ls -la docs),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1832,12 +1733,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml index dfe9d7d5e1..9a029ba53e 100644 --- a/.github/workflows/daily-multi-device-docs-tester.lock.yml +++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml @@ -369,117 +369,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["npmjs.org","npmjs.com","www.npmjs.com","www.npmjs.org","registry.npmjs.com","registry.npmjs.org","skimdb.npmjs.com","npm.pkg.github.com","api.npms.io","nodejs.org","yarnpkg.com","registry.yarnpkg.com","repo.yarnpkg.com","deb.nodesource.com","get.pnpm.io","bun.sh","deno.land","registry.bower.io"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1542,7 +1441,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -1672,7 +1571,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --max-turns 30 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(cd*),Bash(curl*),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(kill*),Bash(ls),Bash(ls*),Bash(lsof*),Bash(npm install*),Bash(npm run build*),Bash(npm run preview*),Bash(npx playwright*),Bash(pwd),Bash(pwd*),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains api.npms.io,bun.sh,deb.nodesource.com,deno.land,get.pnpm.io,nodejs.org,npm.pkg.github.com,npmjs.com,npmjs.org,registry.bower.io,registry.npmjs.com,registry.npmjs.org,registry.yarnpkg.com,repo.yarnpkg.com,skimdb.npmjs.com,www.npmjs.com,www.npmjs.org,yarnpkg.com --log-level info \ + -- claude --print --max-turns 30 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(cd*),Bash(curl*),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(kill*),Bash(ls),Bash(ls*),Bash(lsof*),Bash(npm install*),Bash(npm run build*),Bash(npm run preview*),Bash(npx playwright*),Bash(pwd),Bash(pwd*),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1690,12 +1591,6 @@ jobs: GH_AW_ASSETS_MAX_SIZE_KB: 10240 GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg" GH_AW_MAX_TURNS: 30 - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml index 8483817594..c2c410a75c 100644 --- a/.github/workflows/dev.lock.yml +++ b/.github/workflows/dev.lock.yml @@ -267,115 +267,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1424,7 +1315,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1438,12 +1329,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/developer-docs-consolidator.lock.yml b/.github/workflows/developer-docs-consolidator.lock.yml index 3e3571fadf..ece980773b 100644 --- a/.github/workflows/developer-docs-consolidator.lock.yml +++ b/.github/workflows/developer-docs-consolidator.lock.yml @@ -963,117 +963,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2722,7 +2621,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -2841,7 +2740,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat specs/*.md),Bash(cat),Bash(date),Bash(echo),Bash(find specs -name '\''*.md'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -r '\''*'\'' specs),Bash(grep),Bash(head),Bash(ls -la specs),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l specs/*.md),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat specs/*.md),Bash(cat),Bash(date),Bash(echo),Bash(find specs -name '\''*.md'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -r '\''*'\'' specs),Bash(grep),Bash(head),Bash(ls -la specs),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l specs/*.md),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2855,12 +2756,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/example-workflow-analyzer.lock.yml b/.github/workflows/example-workflow-analyzer.lock.yml index 651bd3896a..94dad5f80b 100644 --- a/.github/workflows/example-workflow-analyzer.lock.yml +++ b/.github/workflows/example-workflow-analyzer.lock.yml @@ -376,115 +376,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1630,7 +1521,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools ExitPlanMode,Glob,Grep,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools ExitPlanMode,Glob,Grep,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1644,12 +1535,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml index e440eb50e1..046f730814 100644 --- a/.github/workflows/github-mcp-structural-analysis.lock.yml +++ b/.github/workflows/github-mcp-structural-analysis.lock.yml @@ -977,117 +977,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2717,7 +2616,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -2809,7 +2708,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2823,12 +2724,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/github-mcp-tools-report.lock.yml b/.github/workflows/github-mcp-tools-report.lock.yml index 7eae866032..2c420e249b 100644 --- a/.github/workflows/github-mcp-tools-report.lock.yml +++ b/.github/workflows/github-mcp-tools-report.lock.yml @@ -837,115 +837,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Setup Safe Outputs Collector MCP run: | mkdir -p /tmp/gh-aw/safeoutputs @@ -2567,7 +2458,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2581,12 +2472,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/go-logger.lock.yml b/.github/workflows/go-logger.lock.yml index c134aa3bff..c70d528a5f 100644 --- a/.github/workflows/go-logger.lock.yml +++ b/.github/workflows/go-logger.lock.yml @@ -565,115 +565,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2036,7 +1927,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(./gh-aw compile *),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''*.go'\'' -type f ! -name '\''*_test.go'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -n '\''func '\'' pkg/*.go),Bash(grep -r '\''var log = logger.New'\'' pkg --include='\''*.go'\''),Bash(grep),Bash(head -n * pkg/**/*.go),Bash(head),Bash(ls),Bash(make build),Bash(make recompile),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(./gh-aw compile *),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''*.go'\'' -type f ! -name '\''*_test.go'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -n '\''func '\'' pkg/*.go),Bash(grep -r '\''var log = logger.New'\'' pkg --include='\''*.go'\''),Bash(grep),Bash(head -n * pkg/**/*.go),Bash(head),Bash(ls),Bash(make build),Bash(make recompile),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2050,12 +1941,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml index ff2085ca15..df3aec0db5 100644 --- a/.github/workflows/go-pattern-detector.lock.yml +++ b/.github/workflows/go-pattern-detector.lock.yml @@ -412,115 +412,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1699,7 +1590,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools ExitPlanMode,Glob,Grep,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__ast-grep,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools ExitPlanMode,Glob,Grep,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__ast-grep,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1713,12 +1604,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/instructions-janitor.lock.yml b/.github/workflows/instructions-janitor.lock.yml index 67b897fe75..0200a4bdef 100644 --- a/.github/workflows/instructions-janitor.lock.yml +++ b/.github/workflows/instructions-janitor.lock.yml @@ -446,117 +446,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1697,7 +1596,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -1815,7 +1714,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat .github/instructions/github-agentic-workflows.instructions.md),Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git describe --tags --abbrev=0),Bash(git log --since='\''*'\'' --pretty=format:'\''%h %s'\'' -- docs/),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l .github/instructions/github-agentic-workflows.instructions.md),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat .github/instructions/github-agentic-workflows.instructions.md),Bash(cat),Bash(date),Bash(echo),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git describe --tags --abbrev=0),Bash(git log --since='\''*'\'' --pretty=format:'\''%h %s'\'' -- docs/),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l .github/instructions/github-agentic-workflows.instructions.md),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1829,12 +1730,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/lockfile-stats.lock.yml b/.github/workflows/lockfile-stats.lock.yml index 4ac465ef58..d253b61611 100644 --- a/.github/workflows/lockfile-stats.lock.yml +++ b/.github/workflows/lockfile-stats.lock.yml @@ -701,115 +701,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2289,7 +2180,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2303,12 +2194,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml index 72c2526513..54b0e7aff6 100644 --- a/.github/workflows/prompt-clustering-analysis.lock.yml +++ b/.github/workflows/prompt-clustering-analysis.lock.yml @@ -1298,117 +1298,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com","pypi.python.org","pypi.org","pip.pypa.io","*.pythonhosted.org","files.pythonhosted.org","bootstrap.pypa.io","conda.binstar.org","conda.anaconda.org","binstar.org","anaconda.org","repo.continuum.io","repo.anaconda.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -3320,7 +3219,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -3412,7 +3311,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,*.pythonhosted.org,anaconda.org,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,binstar.org,bootstrap.pypa.io,codeload.github.com,conda.anaconda.org,conda.binstar.org,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pip.pypa.io,ppa.launchpad.net,pypi.org,pypi.python.org,raw.githubusercontent.com,repo.anaconda.com,repo.continuum.io,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -3426,12 +3327,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml index dcb7fa5ea2..dbd08126b6 100644 --- a/.github/workflows/safe-output-health.lock.yml +++ b/.github/workflows/safe-output-health.lock.yml @@ -818,115 +818,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2511,7 +2402,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2526,12 +2417,6 @@ jobs: BASH_MAX_TIMEOUT_MS: "300000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GH_AW_TOOL_TIMEOUT: "300" - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/schema-consistency-checker.lock.yml b/.github/workflows/schema-consistency-checker.lock.yml index d3bd90bd42..5cf296cb7c 100644 --- a/.github/workflows/schema-consistency-checker.lock.yml +++ b/.github/workflows/schema-consistency-checker.lock.yml @@ -709,115 +709,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Setup Safe Outputs Collector MCP run: | mkdir -p /tmp/gh-aw/safeoutputs @@ -2294,7 +2185,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2308,12 +2199,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/scout.lock.yml b/.github/workflows/scout.lock.yml index 817d438350..3e96bee61b 100644 --- a/.github/workflows/scout.lock.yml +++ b/.github/workflows/scout.lock.yml @@ -1744,115 +1744,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -3343,7 +3234,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__arxiv__get_paper_details,mcp__arxiv__get_paper_pdf,mcp__arxiv__search_arxiv,mcp__context7__get-library-docs,mcp__context7__resolve-library-id,mcp__deepwiki__ask_question,mcp__deepwiki__read_wiki_contents,mcp__deepwiki__read_wiki_structure,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__markitdown,mcp__microsoftdocs,mcp__tavily' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(/tmp/gh-aw/jqschema.sh),Bash(cat),Bash(date),Bash(echo),Bash(grep),Bash(head),Bash(jq *),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__arxiv__get_paper_details,mcp__arxiv__get_paper_pdf,mcp__arxiv__search_arxiv,mcp__context7__get-library-docs,mcp__context7__resolve-library-id,mcp__deepwiki__ask_question,mcp__deepwiki__read_wiki_contents,mcp__deepwiki__read_wiki_structure,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__markitdown,mcp__microsoftdocs,mcp__tavily' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -3357,12 +3248,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/security-fix-pr.lock.yml b/.github/workflows/security-fix-pr.lock.yml index 9f2f830e3e..3004abce2c 100644 --- a/.github/workflows/security-fix-pr.lock.yml +++ b/.github/workflows/security-fix-pr.lock.yml @@ -436,115 +436,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -1761,7 +1652,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -1775,12 +1666,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/semantic-function-refactor.lock.yml b/.github/workflows/semantic-function-refactor.lock.yml index 3b5e3c917e..172a9b0978 100644 --- a/.github/workflows/semantic-function-refactor.lock.yml +++ b/.github/workflows/semantic-function-refactor.lock.yml @@ -781,115 +781,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2471,7 +2362,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''*.go'\'' ! -name '\''*_test.go'\'' -type f),Bash(find pkg -type f -name '\''*.go'\'' ! -name '\''*_test.go'\''),Bash(grep -r '\''func '\'' pkg --include='\''*.go'\''),Bash(grep),Bash(head -n * pkg/**/*.go),Bash(head),Bash(ls -la pkg/),Bash(ls -la pkg/workflow/),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''*.go'\'' ! -name '\''*_test.go'\'' -type f),Bash(find pkg -type f -name '\''*.go'\'' ! -name '\''*_test.go'\''),Bash(grep -r '\''func '\'' pkg --include='\''*.go'\''),Bash(grep),Bash(head -n * pkg/**/*.go),Bash(head),Bash(ls -la pkg/),Bash(ls -la pkg/workflow/),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2485,12 +2376,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml index f0620c4bae..c0d93a748e 100644 --- a/.github/workflows/smoke-claude.lock.yml +++ b/.github/workflows/smoke-claude.lock.yml @@ -888,117 +888,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com","playwright.download.prss.microsoft.com","cdn.playwright.dev"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2121,7 +2020,7 @@ jobs: event_name: context.eventName, staged: true, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -2233,7 +2132,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --max-turns 15 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --max-turns 15 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2249,12 +2150,6 @@ jobs: GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GH_AW_SAFE_OUTPUTS_STAGED: "true" GH_AW_MAX_TURNS: 15 - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/smoke-detector.lock.yml b/.github/workflows/smoke-detector.lock.yml index f8f2e12983..38673fada8 100644 --- a/.github/workflows/smoke-detector.lock.yml +++ b/.github/workflows/smoke-detector.lock.yml @@ -1454,115 +1454,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2952,7 +2843,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2966,12 +2857,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml index 5a8f0b86f6..ff5f42a239 100644 --- a/.github/workflows/static-analysis-report.lock.yml +++ b/.github/workflows/static-analysis-report.lock.yml @@ -729,115 +729,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2318,7 +2209,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,LS,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2333,12 +2224,6 @@ jobs: BASH_MAX_TIMEOUT_MS: "300000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GH_AW_TOOL_TIMEOUT: "300" - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/test-firewall-escape.lock.yml b/.github/workflows/test-firewall-escape.lock.yml index 7fa196d30d..828413befc 100644 --- a/.github/workflows/test-firewall-escape.lock.yml +++ b/.github/workflows/test-firewall-escape.lock.yml @@ -2583,7 +2583,7 @@ jobs: } - if: failure() name: Check test results and create issue on failure - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 with: script: "await github.rest.issues.create({\n owner: context.repo.owner,\n repo: context.repo.repo,\n title: '[Firewall Test] Escape test failed',\n body: `## Firewall Escape Test Failure\n\n**Workflow Run:** ${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}\n\nThe firewall escape test detected an issue. Please review the workflow logs for details.\n\n### Possible Issues\n- A forbidden domain was accessible (firewall escape vulnerability)\n- An allowed domain was blocked unexpectedly\n- File operations failed\n" diff --git a/.github/workflows/typist.lock.yml b/.github/workflows/typist.lock.yml index 816f229718..869ad88689 100644 --- a/.github/workflows/typist.lock.yml +++ b/.github/workflows/typist.lock.yml @@ -829,115 +829,6 @@ jobs: node-version: '24' - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2565,7 +2456,7 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''*.go'\'' ! -name '\''*_test.go'\'' -type f),Bash(find pkg -type f -name '\''*.go'\'' ! -name '\''*_test.go'\''),Bash(grep -r '\''\bany\b'\'' pkg --include='\''*.go'\''),Bash(grep -r '\''interface{}'\'' pkg --include='\''*.go'\''),Bash(grep -r '\''type '\'' pkg --include='\''*.go'\''),Bash(grep),Bash(head),Bash(ls -la pkg/),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + claude --print --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat pkg/**/*.go),Bash(cat),Bash(date),Bash(echo),Bash(find pkg -name '\''*.go'\'' ! -name '\''*_test.go'\'' -type f),Bash(find pkg -type f -name '\''*.go'\'' ! -name '\''*_test.go'\''),Bash(grep -r '\''\bany\b'\'' pkg --include='\''*.go'\''),Bash(grep -r '\''interface{}'\'' pkg --include='\''*.go'\''),Bash(grep -r '\''type '\'' pkg --include='\''*.go'\''),Bash(grep),Bash(head),Bash(ls -la pkg/),Bash(ls),Bash(pwd),Bash(sort),Bash(tail),Bash(uniq),Bash(wc -l pkg/**/*.go),Bash(wc),Bash(yq),BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -2579,12 +2470,6 @@ jobs: BASH_DEFAULT_TIMEOUT_MS: "60000" BASH_MAX_TIMEOUT_MS: "60000" GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml index b42b338929..64bfc6e321 100644 --- a/.github/workflows/unbloat-docs.lock.yml +++ b/.github/workflows/unbloat-docs.lock.yml @@ -1429,117 +1429,16 @@ jobs: uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6 with: node-version: '24' + - name: Install awf binary + run: | + echo "Installing awf from release: v0.3.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + chmod +x awf + sudo mv awf /usr/local/bin/ + which awf + awf --version - name: Install Claude Code CLI run: npm install -g @anthropic-ai/claude-code@2.0.53 - - name: Generate Claude Settings - run: | - mkdir -p /tmp/gh-aw/.claude - cat > /tmp/gh-aw/.claude/settings.json << 'EOF' - { - "hooks": { - "PreToolUse": [ - { - "matcher": "WebFetch|WebSearch", - "hooks": [ - { - "type": "command", - "command": ".claude/hooks/network_permissions.py" - } - ] - } - ] - } - } - EOF - - name: Generate Network Permissions Hook - run: | - mkdir -p .claude/hooks - cat > .claude/hooks/network_permissions.py << 'EOF' - #!/usr/bin/env python3 - """ - Network permissions validator for Claude Code engine. - Generated by gh-aw from workflow-level network configuration. - """ - - import json - import sys - import urllib.parse - import re - - # Domain allow-list (populated during generation) - # JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities - ALLOWED_DOMAINS = json.loads('''["crl3.digicert.com","crl4.digicert.com","ocsp.digicert.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","crl.geotrust.com","ocsp.geotrust.com","crl.thawte.com","ocsp.thawte.com","crl.verisign.com","ocsp.verisign.com","crl.globalsign.com","ocsp.globalsign.com","crls.ssl.com","ocsp.ssl.com","crl.identrust.com","ocsp.identrust.com","crl.sectigo.com","ocsp.sectigo.com","crl.usertrust.com","ocsp.usertrust.com","s.symcb.com","s.symcd.com","json-schema.org","json.schemastore.org","archive.ubuntu.com","security.ubuntu.com","ppa.launchpad.net","keyserver.ubuntu.com","azure.archive.ubuntu.com","api.snapcraft.io","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","*.githubusercontent.com","raw.githubusercontent.com","objects.githubusercontent.com","lfs.github.com","github-cloud.githubusercontent.com","github-cloud.s3.amazonaws.com","codeload.github.com"]''') - - def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - - def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - - # Main logic - try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - - except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors - - EOF - chmod +x .claude/hooks/network_permissions.py - name: Downloading container images run: | set -e @@ -2862,7 +2761,7 @@ jobs: event_name: context.eventName, staged: false, steps: { - firewall: "" + firewall: "squid" }, created_at: new Date().toISOString() }; @@ -3012,7 +2911,9 @@ jobs: run: | set -o pipefail # Execute Claude Code CLI with prompt from file - claude --print --max-turns 90 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat *),Bash(cat),Bash(cd *),Bash(cp *),Bash(curl *),Bash(date),Bash(echo),Bash(find docs/src/content/docs -name '\''*.md'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -n *),Bash(grep),Bash(head *),Bash(head),Bash(kill *),Bash(ls),Bash(mkdir *),Bash(mv *),Bash(node *),Bash(ps *),Bash(pwd),Bash(sleep *),Bash(sort),Bash(tail *),Bash(tail),Bash(uniq),Bash(wc -l *),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json --settings /tmp/gh-aw/.claude/settings.json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/gh-aw/agent-stdio.log + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --allow-domains '*.githubusercontent.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info \ + -- claude --print --max-turns 90 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools 'Bash(cat *),Bash(cat),Bash(cd *),Bash(cp *),Bash(curl *),Bash(date),Bash(echo),Bash(find docs/src/content/docs -name '\''*.md'\''),Bash(git add:*),Bash(git branch:*),Bash(git checkout:*),Bash(git commit:*),Bash(git merge:*),Bash(git rm:*),Bash(git status),Bash(git switch:*),Bash(grep -n *),Bash(grep),Bash(head *),Bash(head),Bash(kill *),Bash(ls),Bash(mkdir *),Bash(mv *),Bash(node *),Bash(ps *),Bash(pwd),Bash(sleep *),Bash(sort),Bash(tail *),Bash(tail),Bash(uniq),Bash(wc -l *),Bash(wc),Bash(yq),BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for' --debug --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -3030,12 +2931,6 @@ jobs: GH_AW_ASSETS_MAX_SIZE_KB: 10240 GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg" GH_AW_MAX_TURNS: 90 - - name: Clean up network proxy hook files - if: always() - run: | - rm -rf .claude/hooks/network_permissions.py || true - rm -rf .claude/hooks || true - rm -rf .claude || true - name: Redact secrets in logs if: always() uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 diff --git a/pkg/workflow/agentic_output_test.go b/pkg/workflow/agentic_output_test.go index aba1428204..4fe17eb586 100644 --- a/pkg/workflow/agentic_output_test.go +++ b/pkg/workflow/agentic_output_test.go @@ -295,144 +295,6 @@ This workflow tests that /tmp/gh-aw/ h-aw/ files are excluded from cleanup. t.Log("Successfully verified that /tmp/gh-aw/ files are excluded from cleanup step while still being uploaded as artifacts") } -func TestClaudeEngineNetworkHookCleanup(t *testing.T) { - engine := NewClaudeEngine() - - t.Run("Network hook cleanup with Claude engine and network permissions", func(t *testing.T) { - // Test data with Claude engine and network permissions - data := &WorkflowData{ - Name: "test-workflow", - EngineConfig: &EngineConfig{ - ID: "claude", - Model: "claude-3-5-sonnet-20241022", - }, - NetworkPermissions: &NetworkPermissions{ - Allowed: []string{"example.com", "*.trusted.com"}, - }, - } - - steps := engine.GetExecutionSteps(data, "/tmp/gh-aw/th-aw/test.log") - - // Convert all steps to string for analysis - var allStepsStr strings.Builder - for _, step := range steps { - allStepsStr.WriteString(strings.Join(step, "\n")) - allStepsStr.WriteString("\n") - } - result := allStepsStr.String() - - // Verify cleanup step is generated - if !strings.Contains(result, "- name: Clean up network proxy hook files") { - t.Error("Expected cleanup step to be generated with Claude engine and network permissions") - } - - // Verify if: always() condition - if !strings.Contains(result, "if: always()") { - t.Error("Expected cleanup step to have 'if: always()' condition") - } - - // Verify cleanup commands - if !strings.Contains(result, "rm -rf .claude/hooks/network_permissions.py || true") { - t.Error("Expected cleanup step to remove network_permissions.py") - } - - if !strings.Contains(result, "rm -rf .claude/hooks || true") { - t.Error("Expected cleanup step to remove hooks directory") - } - - if !strings.Contains(result, "rm -rf .claude || true") { - t.Error("Expected cleanup step to remove .claude directory") - } - }) - - t.Run("Cleanup with Claude engine and defaults network permissions", func(t *testing.T) { - // Test data with Claude engine and defaults network permissions - // (This simulates what happens when no network section is specified - defaults to "defaults" mode) - data := &WorkflowData{ - Name: "test-workflow", - EngineConfig: &EngineConfig{ - ID: "claude", - Model: "claude-3-5-sonnet-20241022", - }, - NetworkPermissions: &NetworkPermissions{ - Mode: "defaults", // Default network mode - }, - } - - steps := engine.GetExecutionSteps(data, "/tmp/gh-aw/th-aw/test.log") - - // Convert all steps to string for analysis - var allStepsStr strings.Builder - for _, step := range steps { - allStepsStr.WriteString(strings.Join(step, "\n")) - allStepsStr.WriteString("\n") - } - result := allStepsStr.String() - - // Verify cleanup step is generated for defaults mode - if !strings.Contains(result, "- name: Clean up network proxy hook files") { - t.Error("Expected cleanup step to be generated with defaults network permissions") - } - }) - - t.Run("No cleanup with Claude engine but no network permissions", func(t *testing.T) { - // Test data with Claude engine but no network permissions - data := &WorkflowData{ - Name: "test-workflow", - EngineConfig: &EngineConfig{ - ID: "claude", - Model: "claude-3-5-sonnet-20241022", - }, - NetworkPermissions: nil, // No network permissions - } - - steps := engine.GetExecutionSteps(data, "/tmp/gh-aw/th-aw/test.log") - - // Convert all steps to string for analysis - var allStepsStr strings.Builder - for _, step := range steps { - allStepsStr.WriteString(strings.Join(step, "\n")) - allStepsStr.WriteString("\n") - } - result := allStepsStr.String() - - // Verify no cleanup step is generated - if strings.Contains(result, "- name: Clean up network proxy hook files") { - t.Error("Expected no cleanup step to be generated without network permissions") - } - }) - - t.Run("Cleanup with empty network permissions (deny-all)", func(t *testing.T) { - // Test data with Claude engine and empty network permissions (deny-all) - data := &WorkflowData{ - Name: "test-workflow", - EngineConfig: &EngineConfig{ - ID: "claude", - Model: "claude-3-5-sonnet-20241022", - }, - NetworkPermissions: &NetworkPermissions{ - Allowed: []string{}, // Empty allowed list (deny-all, but still uses hooks) - }, - } - - steps := engine.GetExecutionSteps(data, "/tmp/gh-aw/th-aw/test.log") - - // Convert all steps to string for analysis - var allStepsStr strings.Builder - for _, step := range steps { - allStepsStr.WriteString(strings.Join(step, "\n")) - allStepsStr.WriteString("\n") - } - result := allStepsStr.String() - - // Verify cleanup step is generated even for deny-all policy - // because hooks are still created for deny-all enforcement - if !strings.Contains(result, "- name: Clean up network proxy hook files") { - t.Error("Expected cleanup step to be generated even with deny-all network permissions") - } - }) -} - func TestEngineOutputCleanupWithMixedPaths(t *testing.T) { // Test the cleanup logic directly with mixed paths to ensure proper filtering var yaml strings.Builder diff --git a/pkg/workflow/claude_engine.go b/pkg/workflow/claude_engine.go index 69e2d940de..b81c71a41d 100644 --- a/pkg/workflow/claude_engine.go +++ b/pkg/workflow/claude_engine.go @@ -27,6 +27,7 @@ func NewClaudeEngine() *ClaudeEngine { supportsMaxTurns: true, // Claude supports max-turns feature supportsWebFetch: true, // Claude has built-in WebFetch support supportsWebSearch: true, // Claude has built-in WebSearch support + supportsFirewall: true, // Claude supports AWF firewall }, } } @@ -52,25 +53,27 @@ func (e *ClaudeEngine) GetInstallationSteps(workflowData *WorkflowData) []GitHub "claude", workflowData, ) - steps = append(steps, npmSteps...) - // Check if network permissions are configured (only for Claude engine) - if workflowData.EngineConfig != nil && ShouldEnforceNetworkPermissions(workflowData.NetworkPermissions) { - // Generate network hook generator and settings generator - hookGenerator := &NetworkHookGenerator{} - settingsGenerator := &ClaudeSettingsGenerator{} + // Install Node.js setup (first npm step) + steps = append(steps, npmSteps[0]) - allowedDomains := GetAllowedDomains(workflowData.NetworkPermissions) - - // Add settings generation step - settingsStep := settingsGenerator.GenerateSettingsWorkflowStep() - steps = append(steps, settingsStep) + // Add AWF installation steps only if firewall is enabled + if isFirewallEnabled(workflowData) { + // Install AWF after Node.js setup but before Claude CLI installation + firewallConfig := getFirewallConfig(workflowData) + var awfVersion string + if firewallConfig != nil { + awfVersion = firewallConfig.Version + } - // Add hook generation step - hookStep := hookGenerator.GenerateNetworkHookWorkflowStep(allowedDomains) - steps = append(steps, hookStep) + // Install AWF binary + awfInstall := generateAWFInstallationStep(awfVersion) + steps = append(steps, awfInstall) } + // Install Claude CLI (remaining npm steps) + steps = append(steps, npmSteps[1:]...) + return steps } @@ -134,11 +137,6 @@ func (e *ClaudeEngine) GetExecutionSteps(workflowData *WorkflowData, logFile str // Add output format for structured output claudeArgs = append(claudeArgs, "--output-format", "stream-json") - // Add network settings if configured - if workflowData.EngineConfig != nil && ShouldEnforceNetworkPermissions(workflowData.NetworkPermissions) { - claudeArgs = append(claudeArgs, "--settings", "/tmp/gh-aw/.claude/settings.json") - } - // Add custom args from engine configuration before the prompt if workflowData.EngineConfig != nil && len(workflowData.EngineConfig.Args) > 0 { claudeArgs = append(claudeArgs, workflowData.EngineConfig.Args...) @@ -197,10 +195,45 @@ func (e *ClaudeEngine) GetExecutionSteps(workflowData *WorkflowData, logFile str // Join command parts with proper escaping using shellJoinArgs helper // This handles already-quoted arguments correctly and prevents double-escaping - command := shellJoinArgs(commandParts) + claudeCommand := shellJoinArgs(commandParts) + + // Conditionally wrap with AWF if firewall is enabled + if isFirewallEnabled(workflowData) { + // Build the AWF-wrapped command + firewallConfig := getFirewallConfig(workflowData) + var awfLogLevel = "info" + if firewallConfig != nil && firewallConfig.LogLevel != "" { + awfLogLevel = firewallConfig.LogLevel + } - // Add the command with proper indentation and tee output (preserves exit code with pipefail) - stepLines = append(stepLines, fmt.Sprintf(" %s 2>&1 | tee %s", command, logFile)) + // Get allowed domains (claude defaults + network permissions) + allowedDomains := GetClaudeAllowedDomains(workflowData.NetworkPermissions) + + // Build AWF arguments + var awfArgs []string + awfArgs = append(awfArgs, "--env-all") + awfArgs = append(awfArgs, "--container-workdir", "\"${GITHUB_WORKSPACE}\"") + + // Add mount arguments + awfArgs = append(awfArgs, "--mount", "/tmp:/tmp:rw") + awfArgs = append(awfArgs, "--mount", "\"${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw\"") + + awfArgs = append(awfArgs, "--allow-domains", allowedDomains) + awfArgs = append(awfArgs, "--log-level", awfLogLevel) + + // Add custom args if specified + if firewallConfig != nil && len(firewallConfig.Args) > 0 { + awfArgs = append(awfArgs, firewallConfig.Args...) + } + + // Build the full AWF command with multiline formatting + stepLines = append(stepLines, fmt.Sprintf(" sudo -E awf %s \\", shellJoinArgs(awfArgs))) + stepLines = append(stepLines, fmt.Sprintf(" -- %s \\", claudeCommand)) + stepLines = append(stepLines, fmt.Sprintf(" 2>&1 | tee %s", shellEscapeArg(logFile))) + } else { + // Run claude command without AWF wrapper + stepLines = append(stepLines, fmt.Sprintf(" %s 2>&1 | tee %s", claudeCommand, logFile)) + } // Add environment section - always include environment section for GH_AW_PROMPT stepLines = append(stepLines, " env:") @@ -271,17 +304,27 @@ func (e *ClaudeEngine) GetExecutionSteps(workflowData *WorkflowData, logFile str steps = append(steps, GitHubActionStep(stepLines)) - // Add cleanup step for network proxy hook files (if proxy was enabled) - if workflowData.EngineConfig != nil && ShouldEnforceNetworkPermissions(workflowData.NetworkPermissions) { - cleanupStep := GitHubActionStep{ - " - name: Clean up network proxy hook files", - " if: always()", - " run: |", - " rm -rf .claude/hooks/network_permissions.py || true", - " rm -rf .claude/hooks || true", - " rm -rf .claude || true", - } - steps = append(steps, cleanupStep) + return steps +} + +// GetSquidLogsSteps returns the steps for collecting and uploading Squid logs +func (e *ClaudeEngine) GetSquidLogsSteps(workflowData *WorkflowData) []GitHubActionStep { + var steps []GitHubActionStep + + // Only add Squid logs collection and upload steps if firewall is enabled + if isFirewallEnabled(workflowData) { + claudeLog.Printf("Adding Squid logs collection steps for workflow: %s", workflowData.Name) + squidLogsCollection := generateSquidLogsCollectionStep(workflowData.Name) + steps = append(steps, squidLogsCollection) + + squidLogsUpload := generateSquidLogsUploadStep(workflowData.Name) + steps = append(steps, squidLogsUpload) + + // Add firewall log parsing step to create step summary + firewallLogParsing := generateFirewallLogParsingStep(workflowData.Name) + steps = append(steps, firewallLogParsing) + } else { + claudeLog.Print("Firewall disabled, skipping Squid logs collection") } return steps diff --git a/pkg/workflow/claude_engine_network_test.go b/pkg/workflow/claude_engine_network_test.go index 6830ef24d3..8b46f2ef56 100644 --- a/pkg/workflow/claude_engine_network_test.go +++ b/pkg/workflow/claude_engine_network_test.go @@ -30,38 +30,25 @@ func TestClaudeEngineNetworkPermissions(t *testing.T) { }, NetworkPermissions: &NetworkPermissions{ Allowed: []string{"example.com", "*.trusted.com"}, + Firewall: &FirewallConfig{ + Enabled: true, + }, }, } steps := engine.GetInstallationSteps(workflowData) - if len(steps) != 5 { - t.Errorf("Expected 5 installation steps with network permissions (secret validation + Node.js setup + install + settings + hook), got %d", len(steps)) + if len(steps) != 4 { + t.Errorf("Expected 4 installation steps with firewall (secret + Node.js + AWF + install), got %d", len(steps)) } - // Check settings step (4th step, index 3) - settingsStepStr := strings.Join(steps[3], "\n") - if !strings.Contains(settingsStepStr, "Generate Claude Settings") { - t.Error("Fourth step should generate Claude settings") + // Check AWF installation step (3rd step, index 2) + awfStepStr := strings.Join(steps[2], "\n") + if !strings.Contains(awfStepStr, "Install awf binary") { + t.Error("Third step should install AWF binary") } - if !strings.Contains(settingsStepStr, "/tmp/gh-aw/.claude/settings.json") { - t.Error("Fourth step should create settings file") + if !strings.Contains(awfStepStr, "awf --version") { + t.Error("Third step should verify AWF installation") } - - // Check hook step (5th step, index 4) - hookStepStr := strings.Join(steps[4], "\n") - if !strings.Contains(hookStepStr, "Generate Network Permissions Hook") { - t.Error("Fifth step should generate network permissions hook") - } - if !strings.Contains(hookStepStr, ".claude/hooks/network_permissions.py") { - t.Error("Fifth step should create hook file") - } - if !strings.Contains(hookStepStr, "example.com") { - t.Error("Hook should contain allowed domain example.com") - } - if !strings.Contains(hookStepStr, "*.trusted.com") { - t.Error("Hook should contain allowed domain *.trusted.com") - } - }) t.Run("ExecutionSteps without network permissions", func(t *testing.T) { @@ -101,6 +88,9 @@ func TestClaudeEngineNetworkPermissions(t *testing.T) { }, NetworkPermissions: &NetworkPermissions{ Allowed: []string{"example.com"}, + Firewall: &FirewallConfig{ + Enabled: true, + }, }, } @@ -112,9 +102,14 @@ func TestClaudeEngineNetworkPermissions(t *testing.T) { // Convert steps to string for analysis stepYAML := strings.Join(steps[0], "\n") - // Verify settings parameter is present - if !strings.Contains(stepYAML, "--settings /tmp/gh-aw/.claude/settings.json") { - t.Error("Settings parameter should be present with network permissions") + // Verify AWF wrapper is present + if !strings.Contains(stepYAML, "sudo -E awf") { + t.Error("Claude CLI should be wrapped with AWF when firewall is enabled") + } + + // Verify settings parameter is NOT present + if strings.Contains(stepYAML, "--settings") { + t.Error("Settings parameter should not be present with AWF firewall") } // Verify model parameter is present in claude_args @@ -131,6 +126,9 @@ func TestClaudeEngineNetworkPermissions(t *testing.T) { networkPermissions := &NetworkPermissions{ Allowed: []string{}, // Empty list means deny all + Firewall: &FirewallConfig{ + Enabled: true, + }, } steps := engine.GetExecutionSteps(&WorkflowData{Name: "test-workflow", EngineConfig: config, NetworkPermissions: networkPermissions}, "test-log") @@ -141,9 +139,9 @@ func TestClaudeEngineNetworkPermissions(t *testing.T) { // Convert steps to string for analysis stepYAML := strings.Join(steps[0], "\n") - // Verify settings parameter is present even with deny-all policy - if !strings.Contains(stepYAML, "--settings /tmp/gh-aw/.claude/settings.json") { - t.Error("Settings parameter should be present with deny-all network permissions") + // Verify AWF wrapper is present even with deny-all policy + if !strings.Contains(stepYAML, "sudo -E awf") { + t.Error("AWF wrapper should be present with deny-all network permissions") } }) @@ -182,21 +180,21 @@ func TestNetworkPermissionsIntegration(t *testing.T) { networkPermissions := &NetworkPermissions{ Allowed: []string{"api.github.com", "*.example.com", "trusted.org"}, + Firewall: &FirewallConfig{ + Enabled: true, + }, } // Get installation steps steps := engine.GetInstallationSteps(&WorkflowData{EngineConfig: config, NetworkPermissions: networkPermissions}) - if len(steps) != 5 { - t.Fatalf("Expected 5 installation steps (secret validation + Node.js setup + install + settings + hook), got %d", len(steps)) + if len(steps) != 4 { + t.Fatalf("Expected 4 installation steps (secret validation + Node.js setup + AWF + install), got %d", len(steps)) } - // Verify hook generation step (fifth step, index 4) - hookStep := strings.Join(steps[4], "\n") - expectedDomains := []string{"api.github.com", "*.example.com", "trusted.org"} - for _, domain := range expectedDomains { - if !strings.Contains(hookStep, domain) { - t.Errorf("Hook step should contain domain '%s'", domain) - } + // Verify AWF installation step (third step, index 2) + awfStep := strings.Join(steps[2], "\n") + if !strings.Contains(awfStep, "Install awf binary") { + t.Error("Third step should install AWF binary") } // Get execution steps @@ -208,9 +206,14 @@ func TestNetworkPermissionsIntegration(t *testing.T) { // Convert steps to string for analysis stepYAML := strings.Join(execSteps[0], "\n") - // Verify settings is configured - if !strings.Contains(stepYAML, "--settings /tmp/gh-aw/.claude/settings.json") { - t.Error("Settings parameter should be present") + // Verify AWF wrapper is present + if !strings.Contains(stepYAML, "sudo -E awf") { + t.Error("AWF wrapper should be present") + } + + // Verify settings is NOT present + if strings.Contains(stepYAML, "--settings") { + t.Error("Settings parameter should not be present with AWF firewall") } // Test the GetAllowedDomains function @@ -238,6 +241,9 @@ func TestNetworkPermissionsIntegration(t *testing.T) { networkPermissions := &NetworkPermissions{ Allowed: []string{"example.com"}, + Firewall: &FirewallConfig{ + Enabled: true, + }, } steps1 := engine1.GetInstallationSteps(&WorkflowData{EngineConfig: config, NetworkPermissions: networkPermissions}) diff --git a/pkg/workflow/claude_settings.go b/pkg/workflow/claude_settings.go deleted file mode 100644 index df0c593ccd..0000000000 --- a/pkg/workflow/claude_settings.go +++ /dev/null @@ -1,76 +0,0 @@ -package workflow - -import ( - "encoding/json" - "fmt" - "strings" -) - -// ClaudeSettingsGenerator generates Claude Code settings configurations -type ClaudeSettingsGenerator struct{} - -// ClaudeSettings represents the structure of Claude Code settings.json -type ClaudeSettings struct { - Hooks *HookConfiguration `json:"hooks,omitempty"` -} - -// HookConfiguration represents the hooks section of settings -type HookConfiguration struct { - PreToolUse []PreToolUseHook `json:"PreToolUse,omitempty"` -} - -// PreToolUseHook represents a pre-tool-use hook configuration -type PreToolUseHook struct { - Matcher string `json:"matcher"` - Hooks []HookEntry `json:"hooks"` -} - -// HookEntry represents a single hook entry -type HookEntry struct { - Type string `json:"type"` - Command string `json:"command"` -} - -// GenerateSettingsJSON generates Claude Code settings JSON for network permissions -func (g *ClaudeSettingsGenerator) GenerateSettingsJSON() string { - settings := ClaudeSettings{ - Hooks: &HookConfiguration{ - PreToolUse: []PreToolUseHook{ - { - Matcher: "WebFetch|WebSearch", - Hooks: []HookEntry{ - { - Type: "command", - Command: ".claude/hooks/network_permissions.py", - }, - }, - }, - }, - }, - } - - settingsJSON, _ := json.MarshalIndent(settings, "", " ") - return string(settingsJSON) -} - -// GenerateSettingsWorkflowStep generates a GitHub Actions workflow step that creates the settings file -func (g *ClaudeSettingsGenerator) GenerateSettingsWorkflowStep() GitHubActionStep { - settingsJSON := g.GenerateSettingsJSON() - - runContent := fmt.Sprintf(`mkdir -p /tmp/gh-aw/.claude -cat > /tmp/gh-aw/.claude/settings.json << 'EOF' -%s -EOF`, settingsJSON) - - var lines []string - lines = append(lines, " - name: Generate Claude Settings") - lines = append(lines, " run: |") - - // Split the run content into lines and properly indent - runLines := strings.Split(runContent, "\n") - for _, line := range runLines { - lines = append(lines, fmt.Sprintf(" %s", line)) - } - - return GitHubActionStep(lines) -} diff --git a/pkg/workflow/claude_settings_test.go b/pkg/workflow/claude_settings_test.go deleted file mode 100644 index 597ed24702..0000000000 --- a/pkg/workflow/claude_settings_test.go +++ /dev/null @@ -1,199 +0,0 @@ -package workflow - -import ( - "encoding/json" - "strings" - "testing" -) - -func TestClaudeSettingsStructures(t *testing.T) { - t.Run("ClaudeSettings JSON marshaling", func(t *testing.T) { - settings := ClaudeSettings{ - Hooks: &HookConfiguration{ - PreToolUse: []PreToolUseHook{ - { - Matcher: "WebFetch|WebSearch", - Hooks: []HookEntry{ - { - Type: "command", - Command: ".claude/hooks/network_permissions.py", - }, - }, - }, - }, - }, - } - - jsonData, err := json.Marshal(settings) - if err != nil { - t.Fatalf("Failed to marshal settings: %v", err) - } - - jsonStr := string(jsonData) - if !strings.Contains(jsonStr, `"hooks"`) { - t.Error("JSON should contain hooks field") - } - if !strings.Contains(jsonStr, `"PreToolUse"`) { - t.Error("JSON should contain PreToolUse field") - } - if !strings.Contains(jsonStr, `"WebFetch|WebSearch"`) { - t.Error("JSON should contain matcher pattern") - } - if !strings.Contains(jsonStr, `"command"`) { - t.Error("JSON should contain hook type") - } - if !strings.Contains(jsonStr, `.claude/hooks/network_permissions.py`) { - t.Error("JSON should contain hook command path") - } - }) - - t.Run("Empty settings", func(t *testing.T) { - settings := ClaudeSettings{} - jsonData, err := json.Marshal(settings) - if err != nil { - t.Fatalf("Failed to marshal empty settings: %v", err) - } - - jsonStr := string(jsonData) - if strings.Contains(jsonStr, `"hooks"`) { - t.Error("Empty settings should not contain hooks field due to omitempty") - } - }) - - t.Run("JSON unmarshal round-trip", func(t *testing.T) { - generator := &ClaudeSettingsGenerator{} - originalJSON := generator.GenerateSettingsJSON() - - var settings ClaudeSettings - err := json.Unmarshal([]byte(originalJSON), &settings) - if err != nil { - t.Fatalf("Failed to unmarshal settings: %v", err) - } - - // Verify structure is preserved - if settings.Hooks == nil { - t.Error("Unmarshaled settings should have hooks") - } - if len(settings.Hooks.PreToolUse) != 1 { - t.Errorf("Expected 1 PreToolUse hook, got %d", len(settings.Hooks.PreToolUse)) - } - - hook := settings.Hooks.PreToolUse[0] - if hook.Matcher != "WebFetch|WebSearch" { - t.Errorf("Expected matcher 'WebFetch|WebSearch', got '%s'", hook.Matcher) - } - if len(hook.Hooks) != 1 { - t.Errorf("Expected 1 hook entry, got %d", len(hook.Hooks)) - } - - entry := hook.Hooks[0] - if entry.Type != "command" { - t.Errorf("Expected hook type 'command', got '%s'", entry.Type) - } - if entry.Command != ".claude/hooks/network_permissions.py" { - t.Errorf("Expected command '.claude/hooks/network_permissions.py', got '%s'", entry.Command) - } - }) -} - -func TestClaudeSettingsWorkflowGeneration(t *testing.T) { - generator := &ClaudeSettingsGenerator{} - - t.Run("Workflow step format", func(t *testing.T) { - step := generator.GenerateSettingsWorkflowStep() - - if len(step) == 0 { - t.Fatal("Generated step should not be empty") - } - - stepStr := strings.Join(step, "\n") - - // Check step name - if !strings.Contains(stepStr, "- name: Generate Claude Settings") { - t.Error("Step should have correct name") - } - - // Check run command structure - if !strings.Contains(stepStr, "run: |") { - t.Error("Step should use multi-line run format") - } - - // Check directory creation command - if !strings.Contains(stepStr, "mkdir -p /tmp/gh-aw/.claude") { - t.Error("Step should create /tmp/gh-aw/.claude directory before creating settings file") - } - - // Check file creation - if !strings.Contains(stepStr, "cat > /tmp/gh-aw/.claude/settings.json") { - t.Error("Step should create /tmp/gh-aw/.claude/settings.json file") - } - - // Verify the order - mkdir should come before cat - mkdirIndex := strings.Index(stepStr, "mkdir -p /tmp/gh-aw/.claude") - catIndex := strings.Index(stepStr, "cat > /tmp/gh-aw/.claude/settings.json") - if mkdirIndex == -1 || catIndex == -1 || mkdirIndex > catIndex { - t.Error("Directory creation (mkdir) should come before file creation (cat)") - } - - // Check heredoc usage - if !strings.Contains(stepStr, "EOF") { - t.Error("Step should use heredoc for JSON content") - } - - // Check indentation - lines := strings.Split(stepStr, "\n") - foundRunLine := false - for _, line := range lines { - if strings.Contains(line, "run: |") { - foundRunLine = true - continue - } - if foundRunLine && strings.TrimSpace(line) != "" { - if !strings.HasPrefix(line, " ") { - t.Errorf("Run command lines should be indented with 10 spaces, got line: '%s'", line) - } - break // Only check the first non-empty line after run: - } - } - - // Verify the JSON content is embedded - if !strings.Contains(stepStr, `"hooks"`) { - t.Error("Step should contain embedded JSON settings") - } - }) - - t.Run("Generated JSON validity", func(t *testing.T) { - jsonStr := generator.GenerateSettingsJSON() - - var settings map[string]any - err := json.Unmarshal([]byte(jsonStr), &settings) - if err != nil { - t.Fatalf("Generated JSON should be valid: %v", err) - } - - // Check structure - hooks, exists := settings["hooks"] - if !exists { - t.Error("Settings should contain hooks section") - } - - hooksMap, ok := hooks.(map[string]any) - if !ok { - t.Error("Hooks should be an object") - } - - preToolUse, exists := hooksMap["PreToolUse"] - if !exists { - t.Error("Hooks should contain PreToolUse section") - } - - preToolUseArray, ok := preToolUse.([]any) - if !ok { - t.Error("PreToolUse should be an array") - } - - if len(preToolUseArray) != 1 { - t.Errorf("PreToolUse should contain 1 hook, got %d", len(preToolUseArray)) - } - }) -} diff --git a/pkg/workflow/claude_settings_tmp_test.go b/pkg/workflow/claude_settings_tmp_test.go deleted file mode 100644 index 0de84eb02c..0000000000 --- a/pkg/workflow/claude_settings_tmp_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package workflow - -import ( - "os" - "path/filepath" - "strings" - "testing" - - "github.com/githubnext/gh-aw/pkg/testutil" -) - -func TestClaudeSettingsTmpPath(t *testing.T) { - // Create temporary directory for test files - tmpDir := testutil.TempDir(t, "claude-settings-tmp-test") - - // Create a test markdown file with network permissions to trigger settings generation - testContent := `--- -on: push -permissions: - contents: read - issues: write - pull-requests: read -tools: - github: - allowed: [list_issues] -engine: claude -strict: false -network: - allowed: - - example.com ---- - -# Test Claude settings tmp path - -This workflow tests that .claude/settings.json is generated in /tmp directory. -` - - testFile := filepath.Join(tmpDir, "test-claude-settings-tmp.md") - if err := os.WriteFile(testFile, []byte(testContent), 0644); err != nil { - t.Fatal(err) - } - - compiler := NewCompiler(false, "", "test") - if err := compiler.CompileWorkflow(testFile); err != nil { - t.Fatalf("Failed to compile workflow: %v", err) - } - - // Read the generated lock file - lockFile := strings.Replace(testFile, ".md", ".lock.yml", 1) - lockContent, err := os.ReadFile(lockFile) - if err != nil { - t.Fatalf("Failed to read generated lock file: %v", err) - } - - lockStr := string(lockContent) - - // Test 1: Verify .claude directory is created in /tmp/gh-aw - if !strings.Contains(lockStr, "mkdir -p /tmp/gh-aw/.claude") { - t.Error("Expected directory creation 'mkdir -p /tmp/gh-aw/.claude' in generated workflow") - } - - // Test 2: Verify settings.json is written to /tmp/gh-aw/.claude/settings.json - if !strings.Contains(lockStr, "cat > /tmp/gh-aw/.claude/settings.json") { - t.Error("Expected settings file creation 'cat > /tmp/gh-aw/.claude/settings.json' in generated workflow") - } - - // Test 3: Verify settings parameter points to /tmp/gh-aw/.claude/settings.json - if !strings.Contains(lockStr, "--settings /tmp/gh-aw/.claude/settings.json") { - t.Error("Expected settings parameter to be '/tmp/gh-aw/.claude/settings.json' in generated workflow") - } - - // Test 4: Verify the old paths are not present - if strings.Contains(lockStr, "mkdir -p .claude") && !strings.Contains(lockStr, "mkdir -p /tmp/gh-aw/.claude") { - t.Error("Found old directory path '.claude' without /tmp/gh-aw prefix in generated workflow") - } - - if strings.Contains(lockStr, "cat > .claude/settings.json") { - t.Error("Found old settings file path '.claude/settings.json' in generated workflow, should use /tmp/gh-aw/.claude/settings.json") - } - - if strings.Contains(lockStr, "settings: .claude/settings.json") && !strings.Contains(lockStr, "settings: /tmp/gh-aw/.claude/settings.json") { - t.Error("Found old settings parameter '.claude/settings.json' without /tmp/gh-aw prefix in generated workflow") - } - - t.Logf("Successfully verified .claude/settings.json is generated in /tmp/gh-aw directory") -} diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go index 72e5a1457f..a03ca12576 100644 --- a/pkg/workflow/compiler.go +++ b/pkg/workflow/compiler.go @@ -805,8 +805,8 @@ func (c *Compiler) ParseWorkflowFile(markdownPath string) (*WorkflowData, error) c.IncrementWarningCount() } - // Enable firewall by default for copilot engine when network restrictions are present - enableFirewallByDefaultForCopilot(engineSetting, networkPermissions) + // Enable firewall by default for engines that support it when network restrictions are present + enableFirewallByDefault(agenticEngine, networkPermissions) // Save the initial strict mode state again for network support check // (it was restored after validateStrictMode but we need it again) diff --git a/pkg/workflow/domains.go b/pkg/workflow/domains.go index 846a8f2756..b28537c04d 100644 --- a/pkg/workflow/domains.go +++ b/pkg/workflow/domains.go @@ -169,3 +169,29 @@ func GetCopilotAllowedDomains(network *NetworkPermissions) string { // Join with commas for AWF --allow-domains flag return strings.Join(domains, ",") } + +// GetClaudeAllowedDomains returns allowed domains for Claude engine with AWF +// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag +// Note: Claude uses API key authentication, so no default domains are needed (unlike Copilot) +func GetClaudeAllowedDomains(network *NetworkPermissions) string { + domainMap := make(map[string]bool) + + // Add NetworkPermissions domains (if specified) + if network != nil && len(network.Allowed) > 0 { + // Expand ecosystem identifiers and add individual domains + expandedDomains := GetAllowedDomains(network) + for _, domain := range expandedDomains { + domainMap[domain] = true + } + } + + // Convert to sorted slice for consistent output + domains := make([]string, 0, len(domainMap)) + for domain := range domainMap { + domains = append(domains, domain) + } + SortStrings(domains) + + // Join with commas for AWF --allow-domains flag + return strings.Join(domains, ",") +} diff --git a/pkg/workflow/engine_firewall_support_test.go b/pkg/workflow/engine_firewall_support_test.go index 5782c1e311..3037d01cd3 100644 --- a/pkg/workflow/engine_firewall_support_test.go +++ b/pkg/workflow/engine_firewall_support_test.go @@ -13,10 +13,10 @@ func TestSupportsFirewall(t *testing.T) { } }) - t.Run("claude engine does not support firewall", func(t *testing.T) { + t.Run("claude engine supports firewall", func(t *testing.T) { engine := NewClaudeEngine() - if engine.SupportsFirewall() { - t.Error("Claude engine should not support firewall") + if !engine.SupportsFirewall() { + t.Error("Claude engine should support firewall") } }) @@ -119,7 +119,7 @@ func TestCheckNetworkSupport_WithRestrictions(t *testing.T) { } }) - t.Run("claude engine with restrictions - warning emitted", func(t *testing.T) { + t.Run("claude engine with restrictions - no warning", func(t *testing.T) { compiler := NewCompiler(false, "", "test") engine := NewClaudeEngine() perms := &NetworkPermissions{ @@ -131,8 +131,8 @@ func TestCheckNetworkSupport_WithRestrictions(t *testing.T) { if err != nil { t.Errorf("Expected no error, got: %v", err) } - if compiler.warningCount != initialWarnings+1 { - t.Error("Should emit warning for claude engine with network restrictions") + if compiler.warningCount != initialWarnings { + t.Error("Should not emit warning for claude engine with network restrictions (it supports firewall now)") } }) @@ -186,7 +186,7 @@ func TestCheckNetworkSupport_StrictMode(t *testing.T) { } }) - t.Run("strict mode: claude engine with restrictions - error", func(t *testing.T) { + t.Run("strict mode: claude engine with restrictions - no error", func(t *testing.T) { compiler := NewCompiler(false, "", "test") compiler.strictMode = true engine := NewClaudeEngine() @@ -195,14 +195,8 @@ func TestCheckNetworkSupport_StrictMode(t *testing.T) { } err := compiler.checkNetworkSupport(engine, perms) - if err == nil { - t.Error("Expected error in strict mode for claude engine with restrictions") - } - if !strings.Contains(err.Error(), "strict mode") { - t.Errorf("Error should mention strict mode, got: %v", err) - } - if !strings.Contains(err.Error(), "firewall") { - t.Errorf("Error should mention firewall, got: %v", err) + if err != nil { + t.Errorf("Expected no error for claude in strict mode (it supports firewall now), got: %v", err) } }) @@ -326,10 +320,10 @@ func TestCheckFirewallDisable(t *testing.T) { } }) - t.Run("strict mode: firewall disabled with unsupported engine - error", func(t *testing.T) { + t.Run("strict mode: firewall disabled with codex engine - error", func(t *testing.T) { compiler := NewCompiler(false, "", "test") compiler.strictMode = true - engine := NewClaudeEngine() + engine := NewCodexEngine() perms := &NetworkPermissions{ Firewall: &FirewallConfig{ Enabled: false, diff --git a/pkg/workflow/engine_network_hooks.go b/pkg/workflow/engine_network_hooks.go deleted file mode 100644 index 502720a5ae..0000000000 --- a/pkg/workflow/engine_network_hooks.go +++ /dev/null @@ -1,172 +0,0 @@ -package workflow - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/githubnext/gh-aw/pkg/logger" -) - -var networkHooksLog = logger.New("workflow:engine_network_hooks") - -// NetworkHookGenerator generates network permission hooks for AI engines -// Network permissions are configured at the workflow level using the top-level "network" field -type NetworkHookGenerator struct{} - -// GenerateNetworkHookScript generates a Python hook script for network permissions -func (g *NetworkHookGenerator) GenerateNetworkHookScript(allowedDomains []string) string { - networkHooksLog.Printf("Generating network hook script with %d allowed domains", len(allowedDomains)) - - // Convert domain list to JSON for embedding in Python - // Ensure empty slice becomes [] not null in JSON - var domainsJSON string - if allowedDomains == nil { - domainsJSON = "[]" - networkHooksLog.Print("No allowed domains configured (deny-all policy)") - } else { - jsonBytes, _ := json.Marshal(allowedDomains) - domainsJSON = string(jsonBytes) - if networkHooksLog.Enabled() { - networkHooksLog.Printf("Allowed domains JSON: %s", domainsJSON) - } - } - - // Embed domain list JSON using json.loads() to eliminate any quoting vulnerabilities - // This approach prevents quote-related injection vulnerabilities (CWE-78, CWE-89, CWE-94) - // by using Python's json.loads() to safely parse the JSON string - - // Build the Python script using a safe template approach - // The JSON is parsed at runtime using json.loads() to avoid any quoting issues - return fmt.Sprintf(`#!/usr/bin/env python3 -""" -Network permissions validator for Claude Code engine. -Generated by gh-aw from workflow-level network configuration. -""" - -import json -import sys -import urllib.parse -import re - -# Domain allow-list (populated during generation) -# JSON string is safely parsed using json.loads() to eliminate quoting vulnerabilities -ALLOWED_DOMAINS = json.loads('''%s''') - -def extract_domain(url_or_query): - """Extract domain from URL or search query.""" - if not url_or_query: - return None - - if url_or_query.startswith(('http://', 'https://')): - return urllib.parse.urlparse(url_or_query).netloc.lower() - - # Check for domain patterns in search queries - match = re.search(r'site:([a-zA-Z0-9.-]+\.[a-zA-Z]{2,})', url_or_query) - if match: - return match.group(1).lower() - - return None - -def is_domain_allowed(domain): - """Check if domain is allowed.""" - if not domain: - # If no domain detected, allow only if not under deny-all policy - return bool(ALLOWED_DOMAINS) # False if empty list (deny-all), True if has domains - - # Empty allowed domains means deny all - if not ALLOWED_DOMAINS: - return False - - for pattern in ALLOWED_DOMAINS: - regex = pattern.replace('.', r'\.').replace('*', '.*') - if re.match(f'^{regex}$', domain): - return True - return False - -# Main logic -try: - data = json.load(sys.stdin) - tool_name = data.get('tool_name', '') - tool_input = data.get('tool_input', {}) - - if tool_name not in ['WebFetch', 'WebSearch']: - sys.exit(0) # Allow other tools - - target = tool_input.get('url') or tool_input.get('query', '') - domain = extract_domain(target) - - # For WebSearch, apply domain restrictions consistently - # If no domain detected in search query, check if restrictions are in place - if tool_name == 'WebSearch' and not domain: - # Since this hook is only generated when network permissions are configured, - # empty ALLOWED_DOMAINS means deny-all policy - if not ALLOWED_DOMAINS: # Empty list means deny all - print(f"Network access blocked: deny-all policy in effect", file=sys.stderr) - print(f"No domains are allowed for WebSearch", file=sys.stderr) - sys.exit(2) # Block under deny-all policy - else: - print(f"Network access blocked for web-search: no specific domain detected", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block general searches when domain allowlist is configured - - if not is_domain_allowed(domain): - print(f"Network access blocked for domain: {domain}", file=sys.stderr) - print(f"Allowed domains: {', '.join(ALLOWED_DOMAINS)}", file=sys.stderr) - sys.exit(2) # Block with feedback to Claude - - sys.exit(0) # Allow - -except Exception as e: - print(f"Network validation error: {e}", file=sys.stderr) - sys.exit(2) # Block on errors -`, domainsJSON) -} - -// GenerateNetworkHookWorkflowStep generates a GitHub Actions workflow step that creates the network permissions hook -func (g *NetworkHookGenerator) GenerateNetworkHookWorkflowStep(allowedDomains []string) GitHubActionStep { - networkHooksLog.Print("Generating network hook workflow step") - hookScript := g.GenerateNetworkHookScript(allowedDomains) - - // No escaping needed for heredoc with 'EOF' - it's literal - runContent := fmt.Sprintf(`mkdir -p .claude/hooks -cat > .claude/hooks/network_permissions.py << 'EOF' -%s -EOF -chmod +x .claude/hooks/network_permissions.py`, hookScript) - - var lines []string - lines = append(lines, " - name: Generate Network Permissions Hook") - lines = append(lines, " run: |") - - // Split the run content into lines and properly indent - runLines := strings.Split(runContent, "\n") - for _, line := range runLines { - lines = append(lines, fmt.Sprintf(" %s", line)) - } - - return GitHubActionStep(lines) -} - -// ShouldEnforceNetworkPermissions checks if network permissions should be enforced -// Returns true if network permissions are configured and not in "defaults" mode -func ShouldEnforceNetworkPermissions(network *NetworkPermissions) bool { - if network == nil { - networkHooksLog.Print("No network permissions configured, full access allowed") - return false // No network config, defaults to full access - } - if network.Mode == "defaults" { - networkHooksLog.Print("Network permissions in 'defaults' mode, enforcement enabled") - return true // "defaults" mode uses restricted allow-list (enforcement needed) - } - networkHooksLog.Print("Network permissions configured with restrictions, enforcement enabled") - return true // Object format means some restriction is configured -} - -// HasNetworkPermissions is deprecated - use ShouldEnforceNetworkPermissions instead -// Kept for backwards compatibility but will be removed in future versions -func HasNetworkPermissions(engineConfig *EngineConfig) bool { - // This function is now deprecated since network permissions are top-level - // Return false for backwards compatibility - return false -} diff --git a/pkg/workflow/engine_network_test.go b/pkg/workflow/engine_network_test.go deleted file mode 100644 index 833ca8c414..0000000000 --- a/pkg/workflow/engine_network_test.go +++ /dev/null @@ -1,282 +0,0 @@ -package workflow - -import ( - "strings" - "testing" -) - -func TestNetworkHookGenerator(t *testing.T) { - generator := &NetworkHookGenerator{} - - t.Run("GenerateNetworkHookScript", func(t *testing.T) { - allowedDomains := []string{"example.com", "*.trusted.com", "api.service.org"} - script := generator.GenerateNetworkHookScript(allowedDomains) - - // Check that script contains the expected domains - if !strings.Contains(script, `"example.com"`) { - t.Error("Script should contain example.com") - } - if !strings.Contains(script, `"*.trusted.com"`) { - t.Error("Script should contain *.trusted.com") - } - if !strings.Contains(script, `"api.service.org"`) { - t.Error("Script should contain api.service.org") - } - - // Check for required Python imports and functions - if !strings.Contains(script, "import json") { - t.Error("Script should import json") - } - if !strings.Contains(script, "import urllib.parse") { - t.Error("Script should import urllib.parse") - } - if !strings.Contains(script, "def extract_domain") { - t.Error("Script should define extract_domain function") - } - if !strings.Contains(script, "def is_domain_allowed") { - t.Error("Script should define is_domain_allowed function") - } - }) - - t.Run("GenerateNetworkHookWorkflowStep", func(t *testing.T) { - allowedDomains := []string{"api.github.com", "*.trusted.com"} - step := generator.GenerateNetworkHookWorkflowStep(allowedDomains) - - stepStr := strings.Join(step, "\n") - - // Check that the step contains proper YAML structure - if !strings.Contains(stepStr, "name: Generate Network Permissions Hook") { - t.Error("Step should have correct name") - } - if !strings.Contains(stepStr, ".claude/hooks/network_permissions.py") { - t.Error("Step should create hook file in correct location") - } - if !strings.Contains(stepStr, "chmod +x") { - t.Error("Step should make hook executable") - } - - // Check that domains are included in the hook - if !strings.Contains(stepStr, "api.github.com") { - t.Error("Step should contain api.github.com domain") - } - if !strings.Contains(stepStr, "*.trusted.com") { - t.Error("Step should contain *.trusted.com domain") - } - }) - - t.Run("EmptyDomainsGeneration", func(t *testing.T) { - allowedDomains := []string{} // Empty list means deny-all - script := generator.GenerateNetworkHookScript(allowedDomains) - - // Should still generate a valid script - if !strings.Contains(script, "json.loads('''[]''')") { - t.Error("Script should handle empty domains list (deny-all policy)") - } - if !strings.Contains(script, "def is_domain_allowed") { - t.Error("Script should still define required functions") - } - }) -} - -func TestShouldEnforceNetworkPermissions(t *testing.T) { - t.Run("nil permissions", func(t *testing.T) { - if ShouldEnforceNetworkPermissions(nil) { - t.Error("Should not enforce permissions when nil") - } - }) - - t.Run("valid permissions with domains", func(t *testing.T) { - permissions := &NetworkPermissions{ - Allowed: []string{"example.com", "*.trusted.com"}, - } - if !ShouldEnforceNetworkPermissions(permissions) { - t.Error("Should enforce permissions when provided") - } - }) - - t.Run("empty permissions (deny-all)", func(t *testing.T) { - permissions := &NetworkPermissions{ - Allowed: []string{}, // Empty list means deny-all - } - if !ShouldEnforceNetworkPermissions(permissions) { - t.Error("Should enforce permissions even with empty allowed list (deny-all policy)") - } - }) -} - -func TestGetAllowedDomains(t *testing.T) { - t.Run("nil permissions", func(t *testing.T) { - domains := GetAllowedDomains(nil) - if domains == nil { - t.Error("Should return default allow-list when permissions are nil") - } - if len(domains) == 0 { - t.Error("Expected default allow-list domains for nil permissions, got empty list") - } - }) - - t.Run("empty permissions (deny-all)", func(t *testing.T) { - permissions := &NetworkPermissions{ - Allowed: []string{}, // Empty list means deny-all - } - domains := GetAllowedDomains(permissions) - if domains == nil { - t.Error("Should return empty slice, not nil, for deny-all policy") - } - if len(domains) != 0 { - t.Errorf("Expected 0 domains for deny-all policy, got %d", len(domains)) - } - }) - - t.Run("valid permissions with domains", func(t *testing.T) { - permissions := &NetworkPermissions{ - Allowed: []string{"example.com", "*.trusted.com", "api.service.org"}, - } - domains := GetAllowedDomains(permissions) - expectedDomains := []string{"example.com", "*.trusted.com", "api.service.org"} - if len(domains) != len(expectedDomains) { - t.Fatalf("Expected %d domains, got %d", len(expectedDomains), len(domains)) - } - - for i, expected := range expectedDomains { - if domains[i] != expected { - t.Errorf("Expected domain %d to be '%s', got '%s'", i, expected, domains[i]) - } - } - }) - - t.Run("permissions with 'defaults' in allowed list", func(t *testing.T) { - permissions := &NetworkPermissions{ - Allowed: []string{"defaults", "good.com"}, - } - domains := GetAllowedDomains(permissions) - - // Should have all default domains plus "good.com" - defaultDomains := getEcosystemDomains("defaults") - expectedTotal := len(defaultDomains) + 1 - - if len(domains) != expectedTotal { - t.Fatalf("Expected %d domains (defaults + good.com), got %d", expectedTotal, len(domains)) - } - - // Check that all default domains are included - defaultsFound := 0 - goodComFound := false - - for _, domain := range domains { - if domain == "good.com" { - goodComFound = true - } - // Check if this domain is in the defaults list - for _, defaultDomain := range defaultDomains { - if domain == defaultDomain { - defaultsFound++ - break - } - } - } - - if defaultsFound != len(defaultDomains) { - t.Errorf("Expected all %d default domains to be included, found %d", len(defaultDomains), defaultsFound) - } - - if !goodComFound { - t.Error("Expected 'good.com' to be included in the allowed domains") - } - }) - - t.Run("permissions with only 'defaults' in allowed list", func(t *testing.T) { - permissions := &NetworkPermissions{ - Allowed: []string{"defaults"}, - } - domains := GetAllowedDomains(permissions) - defaultDomains := getEcosystemDomains("defaults") - - if len(domains) != len(defaultDomains) { - t.Fatalf("Expected %d domains (just defaults), got %d", len(defaultDomains), len(domains)) - } - - // Check that all default domains are included - for i, defaultDomain := range defaultDomains { - if domains[i] != defaultDomain { - t.Errorf("Expected domain %d to be '%s', got '%s'", i, defaultDomain, domains[i]) - } - } - }) -} - -func TestDeprecatedHasNetworkPermissions(t *testing.T) { - t.Run("deprecated function always returns false", func(t *testing.T) { - // Test that the deprecated function always returns false - if HasNetworkPermissions(nil) { - t.Error("Deprecated HasNetworkPermissions should always return false") - } - - config := &EngineConfig{ID: "claude"} - if HasNetworkPermissions(config) { - t.Error("Deprecated HasNetworkPermissions should always return false") - } - }) -} - -func TestEngineConfigParsing(t *testing.T) { - compiler := &Compiler{} - - t.Run("ParseNetworkPermissions", func(t *testing.T) { - frontmatter := map[string]any{ - "network": map[string]any{ - "allowed": []any{"example.com", "*.trusted.com", "api.service.org"}, - }, - } - - networkPermissions := compiler.extractNetworkPermissions(frontmatter) - - if networkPermissions == nil { - t.Fatal("Network permissions should not be nil") - } - - expectedDomains := []string{"example.com", "*.trusted.com", "api.service.org"} - if len(networkPermissions.Allowed) != len(expectedDomains) { - t.Fatalf("Expected %d domains, got %d", len(expectedDomains), len(networkPermissions.Allowed)) - } - - for i, expected := range expectedDomains { - if networkPermissions.Allowed[i] != expected { - t.Errorf("Expected domain %d to be '%s', got '%s'", i, expected, networkPermissions.Allowed[i]) - } - } - }) - - t.Run("ParseWithoutNetworkPermissions", func(t *testing.T) { - frontmatter := map[string]any{ - "engine": map[string]any{ - "id": "claude", - "model": "claude-3-5-sonnet-20241022", - }, - } - - networkPermissions := compiler.extractNetworkPermissions(frontmatter) - - if networkPermissions != nil { - t.Error("Network permissions should be nil when not specified") - } - }) - - t.Run("ParseEmptyNetworkPermissions", func(t *testing.T) { - frontmatter := map[string]any{ - "network": map[string]any{ - "allowed": []any{}, // Empty list means deny-all - }, - } - - networkPermissions := compiler.extractNetworkPermissions(frontmatter) - - if networkPermissions == nil { - t.Fatal("Network permissions should not be nil") - } - - if len(networkPermissions.Allowed) != 0 { - t.Errorf("Expected 0 domains for deny-all policy, got %d", len(networkPermissions.Allowed)) - } - }) -} diff --git a/pkg/workflow/firewall.go b/pkg/workflow/firewall.go index 5ddc6194fd..d8b22d8dba 100644 --- a/pkg/workflow/firewall.go +++ b/pkg/workflow/firewall.go @@ -48,11 +48,11 @@ func getFirewallConfig(workflowData *WorkflowData) *FirewallConfig { return nil } -// enableFirewallByDefaultForCopilot enables firewall by default for copilot engine +// enableFirewallByDefault enables firewall by default for engines that support it // when network restrictions are present but no explicit firewall configuration exists -func enableFirewallByDefaultForCopilot(engineID string, networkPermissions *NetworkPermissions) { - // Only apply to copilot engine - if engineID != "copilot" { +func enableFirewallByDefault(engine CodingAgentEngine, networkPermissions *NetworkPermissions) { + // Only apply to engines that support firewall + if engine == nil || !engine.SupportsFirewall() { return } @@ -69,10 +69,10 @@ func enableFirewallByDefaultForCopilot(engineID string, networkPermissions *Netw // Check if network restrictions are present (allowed domains specified) if len(networkPermissions.Allowed) > 0 { - // Enable firewall by default for copilot engine with network restrictions + // Enable firewall by default for engines with network restrictions networkPermissions.Firewall = &FirewallConfig{ Enabled: true, } - firewallLog.Print("Enabled firewall by default for copilot engine with network restrictions") + firewallLog.Printf("Enabled firewall by default for %s engine with network restrictions", engine.GetID()) } } diff --git a/pkg/workflow/firewall_default_enablement_test.go b/pkg/workflow/firewall_default_enablement_test.go index fd13e5d3ec..e9b160ac65 100644 --- a/pkg/workflow/firewall_default_enablement_test.go +++ b/pkg/workflow/firewall_default_enablement_test.go @@ -5,14 +5,15 @@ import ( "testing" ) -// TestEnableFirewallByDefaultForCopilot tests the automatic firewall enablement for copilot engine -func TestEnableFirewallByDefaultForCopilot(t *testing.T) { +// TestEnableFirewallByDefault tests the automatic firewall enablement for engines that support it +func TestEnableFirewallByDefault(t *testing.T) { t.Run("copilot engine with network restrictions enables firewall by default", func(t *testing.T) { + engine := NewCopilotEngine() networkPerms := &NetworkPermissions{ Allowed: []string{"example.com", "api.github.com"}, } - enableFirewallByDefaultForCopilot("copilot", networkPerms) + enableFirewallByDefault(engine, networkPerms) if networkPerms.Firewall == nil { t.Error("Expected firewall to be enabled by default for copilot engine with network restrictions") @@ -23,12 +24,30 @@ func TestEnableFirewallByDefaultForCopilot(t *testing.T) { } }) + t.Run("claude engine with network restrictions enables firewall by default", func(t *testing.T) { + engine := NewClaudeEngine() + networkPerms := &NetworkPermissions{ + Allowed: []string{"example.com", "api.anthropic.com"}, + } + + enableFirewallByDefault(engine, networkPerms) + + if networkPerms.Firewall == nil { + t.Error("Expected firewall to be enabled by default for claude engine with network restrictions") + } + + if !networkPerms.Firewall.Enabled { + t.Error("Expected firewall.Enabled to be true") + } + }) + t.Run("copilot engine without network restrictions does not enable firewall", func(t *testing.T) { + engine := NewCopilotEngine() networkPerms := &NetworkPermissions{ Mode: "defaults", } - enableFirewallByDefaultForCopilot("copilot", networkPerms) + enableFirewallByDefault(engine, networkPerms) if networkPerms.Firewall != nil { t.Error("Expected firewall to remain nil when no network restrictions are present") @@ -36,6 +55,7 @@ func TestEnableFirewallByDefaultForCopilot(t *testing.T) { }) t.Run("copilot engine with explicit firewall config is not overridden", func(t *testing.T) { + engine := NewCopilotEngine() networkPerms := &NetworkPermissions{ Allowed: []string{"example.com"}, Firewall: &FirewallConfig{ @@ -43,28 +63,30 @@ func TestEnableFirewallByDefaultForCopilot(t *testing.T) { }, } - enableFirewallByDefaultForCopilot("copilot", networkPerms) + enableFirewallByDefault(engine, networkPerms) if networkPerms.Firewall.Enabled { t.Error("Expected explicit firewall.Enabled=false to be preserved") } }) - t.Run("non-copilot engine does not enable firewall", func(t *testing.T) { + t.Run("engine without firewall support does not enable firewall", func(t *testing.T) { + engine := NewCodexEngine() networkPerms := &NetworkPermissions{ Allowed: []string{"example.com"}, } - enableFirewallByDefaultForCopilot("claude", networkPerms) + enableFirewallByDefault(engine, networkPerms) if networkPerms.Firewall != nil { - t.Error("Expected firewall to remain nil for non-copilot engine") + t.Error("Expected firewall to remain nil for engine without firewall support") } }) t.Run("nil network permissions does not cause error", func(t *testing.T) { + engine := NewCopilotEngine() // Should not panic - enableFirewallByDefaultForCopilot("copilot", nil) + enableFirewallByDefault(engine, nil) }) } @@ -99,7 +121,8 @@ func TestCopilotFirewallDefaultIntegration(t *testing.T) { } // Enable firewall by default - enableFirewallByDefaultForCopilot(engineConfig.ID, networkPerms) + engine := NewCopilotEngine() + enableFirewallByDefault(engine, networkPerms) // Verify firewall is enabled if networkPerms.Firewall == nil { @@ -118,7 +141,6 @@ func TestCopilotFirewallDefaultIntegration(t *testing.T) { } // Get installation steps - engine := NewCopilotEngine() steps := engine.GetInstallationSteps(workflowData) // Verify AWF installation step is present @@ -175,7 +197,8 @@ func TestCopilotFirewallDefaultIntegration(t *testing.T) { } // Enable firewall by default (should not override explicit config) - enableFirewallByDefaultForCopilot(engineConfig.ID, networkPerms) + engine := NewCopilotEngine() + enableFirewallByDefault(engine, networkPerms) // Verify firewall is still disabled if networkPerms.Firewall.Enabled { @@ -190,8 +213,8 @@ func TestCopilotFirewallDefaultIntegration(t *testing.T) { } // Get installation steps - engine := NewCopilotEngine() - steps := engine.GetInstallationSteps(workflowData) + copilotEngine := NewCopilotEngine() + steps := copilotEngine.GetInstallationSteps(workflowData) // Verify AWF installation step is NOT present for _, step := range steps { @@ -202,7 +225,7 @@ func TestCopilotFirewallDefaultIntegration(t *testing.T) { } }) - t.Run("claude engine with network restrictions does not enable firewall", func(t *testing.T) { + t.Run("claude engine with network restrictions enables firewall", func(t *testing.T) { frontmatter := map[string]any{ "on": "workflow_dispatch", "permissions": map[string]any{ @@ -230,12 +253,41 @@ func TestCopilotFirewallDefaultIntegration(t *testing.T) { t.Fatal("Expected network permissions to be extracted") } - // Enable firewall by default (should not affect non-copilot engines) - enableFirewallByDefaultForCopilot(engineConfig.ID, networkPerms) + // Enable firewall by default (should now work for claude engine too) + engine := NewClaudeEngine() + enableFirewallByDefault(engine, networkPerms) - // Verify firewall is NOT enabled for claude - if networkPerms.Firewall != nil { - t.Error("Expected firewall to remain nil for claude engine") + // Verify firewall IS enabled for claude now + if networkPerms.Firewall == nil { + t.Error("Expected firewall to be enabled for claude engine with network restrictions") + } + + if !networkPerms.Firewall.Enabled { + t.Error("Expected firewall.Enabled to be true") + } + + // Create workflow data + workflowData := &WorkflowData{ + Name: "test-workflow", + EngineConfig: engineConfig, + NetworkPermissions: networkPerms, + } + + // Get installation steps + steps := engine.GetInstallationSteps(workflowData) + + // Verify AWF installation step is present + found := false + for _, step := range steps { + stepStr := strings.Join(step, "\n") + if strings.Contains(stepStr, "Install awf binary") || strings.Contains(stepStr, "awf --version") { + found = true + break + } + } + + if !found { + t.Error("Expected AWF installation steps to be included for claude engine with firewall") } }) } diff --git a/pkg/workflow/firewall_workflow_test.go b/pkg/workflow/firewall_workflow_test.go index 8649df6cfa..0369bdd6fc 100644 --- a/pkg/workflow/firewall_workflow_test.go +++ b/pkg/workflow/firewall_workflow_test.go @@ -32,36 +32,26 @@ func TestFirewallWorkflowNetworkConfiguration(t *testing.T) { } }) - t.Run("network hook is generated with default domains", func(t *testing.T) { + t.Run("no AWF installation with defaults mode (no firewall needed)", func(t *testing.T) { engine := NewClaudeEngine() steps := engine.GetInstallationSteps(workflowData) - // Should have 5 steps: secret validation, Node.js setup, install, settings, hook - if len(steps) != 5 { - t.Errorf("Expected 5 installation steps with network permissions, got %d", len(steps)) + // Should have 3 steps: secret validation, Node.js setup, install + // No AWF installation since "defaults" mode doesn't require firewall + if len(steps) != 3 { + t.Errorf("Expected 3 installation steps with defaults mode (no firewall), got %d", len(steps)) } - // Check the network permissions hook step (5th step, index 4) - hookStepStr := strings.Join(steps[4], "\n") - if !strings.Contains(hookStepStr, "Generate Network Permissions Hook") { - t.Error("Fifth step should generate network permissions hook") - } - - // Verify example.com is NOT in the allowed domains - if strings.Contains(hookStepStr, "\"example.com\"") { - t.Error("example.com should not be in the allowed domains for firewall workflow") - } - - // Verify some default domains ARE present - defaultDomains := []string{"json-schema.org", "archive.ubuntu.com"} - for _, domain := range defaultDomains { - if !strings.Contains(hookStepStr, domain) { - t.Errorf("Expected default domain '%s' to be in allowed domains", domain) + // Verify no AWF installation + for _, step := range steps { + stepStr := strings.Join(step, "\n") + if strings.Contains(stepStr, "Install awf binary") { + t.Error("AWF should not be installed with defaults network mode") } } }) - t.Run("execution step includes settings parameter", func(t *testing.T) { + t.Run("execution step does not include AWF wrapper with defaults mode", func(t *testing.T) { engine := NewClaudeEngine() steps := engine.GetExecutionSteps(workflowData, "test-log") @@ -71,9 +61,14 @@ func TestFirewallWorkflowNetworkConfiguration(t *testing.T) { stepYAML := strings.Join(steps[0], "\n") - // Verify settings parameter is present (required for network permissions) - if !strings.Contains(stepYAML, "--settings /tmp/gh-aw/.claude/settings.json") { - t.Error("Settings parameter should be present with network permissions") + // Verify AWF wrapper is NOT present (defaults mode doesn't require firewall) + if strings.Contains(stepYAML, "sudo -E awf") { + t.Error("AWF wrapper should not be present with defaults network mode") + } + + // Verify settings parameter is NOT present (we use AWF now, not settings) + if strings.Contains(stepYAML, "--settings") { + t.Error("Settings parameter should not be present (Claude now uses AWF, not settings)") } }) } diff --git a/pkg/workflow/network_test.go b/pkg/workflow/network_test.go index e578ceb8c2..bb2fb67c72 100644 --- a/pkg/workflow/network_test.go +++ b/pkg/workflow/network_test.go @@ -336,20 +336,6 @@ func TestNetworkPermissionsUtilities(t *testing.T) { t.Error("Expected 'api.example.com' to be included in the expanded domains") } }) - - t.Run("Deprecated HasNetworkPermissions still works", func(t *testing.T) { - // Test the deprecated function that takes EngineConfig - config := &EngineConfig{ - ID: "claude", - Model: "claude-3-5-sonnet-20241022", - } - - // This should return false since the deprecated function - // doesn't have the nested permissions anymore - if HasNetworkPermissions(config) { - t.Error("Expected false for engine config without nested permissions") - } - }) } // Test helper functions for network permissions From b82494c248b7344737bef029aec4f9254625965d Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Wed, 26 Nov 2025 00:28:28 +0000 Subject: [PATCH 2/4] update the awf to v0.4.0 Signed-off-by: Jiaxiao (mossaka) Zhou --- .github/aw/actions-lock.json | 5 +++++ .github/workflows/artifacts-summary.lock.yml | 4 ++-- .github/workflows/blog-auditor.lock.yml | 4 ++-- .github/workflows/changeset.lock.yml | 4 ++-- .github/workflows/cli-consistency-checker.lock.yml | 4 ++-- .github/workflows/cli-version-checker.lock.yml | 4 ++-- .github/workflows/copilot-agent-analysis.lock.yml | 4 ++-- .github/workflows/copilot-pr-nlp-analysis.lock.yml | 4 ++-- .github/workflows/copilot-pr-prompt-analysis.lock.yml | 4 ++-- .github/workflows/copilot-session-insights.lock.yml | 4 ++-- .github/workflows/daily-doc-updater.lock.yml | 4 ++-- .github/workflows/daily-multi-device-docs-tester.lock.yml | 4 ++-- .github/workflows/daily-news.lock.yml | 4 ++-- .github/workflows/daily-repo-chronicle.lock.yml | 4 ++-- .github/workflows/developer-docs-consolidator.lock.yml | 4 ++-- .github/workflows/docs-noob-tester.lock.yml | 4 ++-- .github/workflows/firewall.lock.yml | 4 ++-- .github/workflows/github-mcp-structural-analysis.lock.yml | 4 ++-- .github/workflows/glossary-maintainer.lock.yml | 4 ++-- .github/workflows/instructions-janitor.lock.yml | 4 ++-- .github/workflows/mcp-inspector.lock.yml | 4 ++-- .github/workflows/prompt-clustering-analysis.lock.yml | 4 ++-- .github/workflows/python-data-charts.lock.yml | 4 ++-- .github/workflows/release.lock.yml | 4 ++-- .github/workflows/research.lock.yml | 4 ++-- .github/workflows/smoke-claude.lock.yml | 4 ++-- .github/workflows/smoke-copilot.lock.yml | 4 ++-- .github/workflows/technical-doc-writer.lock.yml | 4 ++-- .github/workflows/test-firewall-escape.lock.yml | 4 ++-- .github/workflows/unbloat-docs.lock.yml | 4 ++-- .github/workflows/weekly-issue-summary.lock.yml | 4 ++-- pkg/constants/constants.go | 2 +- pkg/constants/constants_test.go | 2 +- 33 files changed, 67 insertions(+), 62 deletions(-) diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json index ed3fa4e4e8..6b19638f59 100644 --- a/.github/aw/actions-lock.json +++ b/.github/aw/actions-lock.json @@ -25,6 +25,11 @@ "version": "v6", "sha": "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53" }, + "actions/github-script@v7": { + "repo": "actions/github-script", + "version": "v7", + "sha": "f28e40c7f34bde8b3046d885e986cb6290c5673b" + }, "actions/github-script@v8": { "repo": "actions/github-script", "version": "v8", diff --git a/.github/workflows/artifacts-summary.lock.yml b/.github/workflows/artifacts-summary.lock.yml index 85520247d5..3aa7ec0d17 100644 --- a/.github/workflows/artifacts-summary.lock.yml +++ b/.github/workflows/artifacts-summary.lock.yml @@ -406,8 +406,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/blog-auditor.lock.yml b/.github/workflows/blog-auditor.lock.yml index d08cb049ae..fd246502ec 100644 --- a/.github/workflows/blog-auditor.lock.yml +++ b/.github/workflows/blog-auditor.lock.yml @@ -608,8 +608,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/changeset.lock.yml b/.github/workflows/changeset.lock.yml index 0d9c48135c..cdc1be700d 100644 --- a/.github/workflows/changeset.lock.yml +++ b/.github/workflows/changeset.lock.yml @@ -1121,8 +1121,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/cli-consistency-checker.lock.yml b/.github/workflows/cli-consistency-checker.lock.yml index 873e3117ca..75658df266 100644 --- a/.github/workflows/cli-consistency-checker.lock.yml +++ b/.github/workflows/cli-consistency-checker.lock.yml @@ -437,8 +437,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/cli-version-checker.lock.yml b/.github/workflows/cli-version-checker.lock.yml index 5e8e77adcc..1d63eea237 100644 --- a/.github/workflows/cli-version-checker.lock.yml +++ b/.github/workflows/cli-version-checker.lock.yml @@ -602,8 +602,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/copilot-agent-analysis.lock.yml b/.github/workflows/copilot-agent-analysis.lock.yml index 8bd1b9ba58..416594bfb9 100644 --- a/.github/workflows/copilot-agent-analysis.lock.yml +++ b/.github/workflows/copilot-agent-analysis.lock.yml @@ -906,8 +906,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml index 65ce7aec42..36e9c26e47 100644 --- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml +++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml @@ -1157,8 +1157,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/copilot-pr-prompt-analysis.lock.yml b/.github/workflows/copilot-pr-prompt-analysis.lock.yml index f405053e57..357c7cd762 100644 --- a/.github/workflows/copilot-pr-prompt-analysis.lock.yml +++ b/.github/workflows/copilot-pr-prompt-analysis.lock.yml @@ -730,8 +730,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml index 327a2c016b..5070b3e190 100644 --- a/.github/workflows/copilot-session-insights.lock.yml +++ b/.github/workflows/copilot-session-insights.lock.yml @@ -1817,8 +1817,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/daily-doc-updater.lock.yml b/.github/workflows/daily-doc-updater.lock.yml index cfca05d5c8..ed37332da7 100644 --- a/.github/workflows/daily-doc-updater.lock.yml +++ b/.github/workflows/daily-doc-updater.lock.yml @@ -449,8 +449,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml index 9a029ba53e..68700f87f1 100644 --- a/.github/workflows/daily-multi-device-docs-tester.lock.yml +++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml @@ -371,8 +371,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml index 79661b1b3a..3b46c1112e 100644 --- a/.github/workflows/daily-news.lock.yml +++ b/.github/workflows/daily-news.lock.yml @@ -1110,8 +1110,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml index f3fd16599d..a1260df200 100644 --- a/.github/workflows/daily-repo-chronicle.lock.yml +++ b/.github/workflows/daily-repo-chronicle.lock.yml @@ -981,8 +981,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/developer-docs-consolidator.lock.yml b/.github/workflows/developer-docs-consolidator.lock.yml index ece980773b..6be48488e7 100644 --- a/.github/workflows/developer-docs-consolidator.lock.yml +++ b/.github/workflows/developer-docs-consolidator.lock.yml @@ -965,8 +965,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml index b7b5afc1c2..239e37c856 100644 --- a/.github/workflows/docs-noob-tester.lock.yml +++ b/.github/workflows/docs-noob-tester.lock.yml @@ -433,8 +433,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/firewall.lock.yml b/.github/workflows/firewall.lock.yml index 4980a445c2..dd51903f6d 100644 --- a/.github/workflows/firewall.lock.yml +++ b/.github/workflows/firewall.lock.yml @@ -265,8 +265,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml index 046f730814..0f56fb10b5 100644 --- a/.github/workflows/github-mcp-structural-analysis.lock.yml +++ b/.github/workflows/github-mcp-structural-analysis.lock.yml @@ -979,8 +979,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/glossary-maintainer.lock.yml b/.github/workflows/glossary-maintainer.lock.yml index 4a65964778..6118f92e91 100644 --- a/.github/workflows/glossary-maintainer.lock.yml +++ b/.github/workflows/glossary-maintainer.lock.yml @@ -917,8 +917,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/instructions-janitor.lock.yml b/.github/workflows/instructions-janitor.lock.yml index 0200a4bdef..c504c9cffa 100644 --- a/.github/workflows/instructions-janitor.lock.yml +++ b/.github/workflows/instructions-janitor.lock.yml @@ -448,8 +448,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml index 434540eee0..5c4b745b79 100644 --- a/.github/workflows/mcp-inspector.lock.yml +++ b/.github/workflows/mcp-inspector.lock.yml @@ -606,8 +606,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml index 54b0e7aff6..baa4968779 100644 --- a/.github/workflows/prompt-clustering-analysis.lock.yml +++ b/.github/workflows/prompt-clustering-analysis.lock.yml @@ -1300,8 +1300,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml index e5d7197345..e6e86f250d 100644 --- a/.github/workflows/python-data-charts.lock.yml +++ b/.github/workflows/python-data-charts.lock.yml @@ -1293,8 +1293,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml index c708690b52..33a2290a52 100644 --- a/.github/workflows/release.lock.yml +++ b/.github/workflows/release.lock.yml @@ -406,8 +406,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/research.lock.yml b/.github/workflows/research.lock.yml index 9d3feb8cad..6da8830c11 100644 --- a/.github/workflows/research.lock.yml +++ b/.github/workflows/research.lock.yml @@ -377,8 +377,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml index c0d93a748e..af5d70b992 100644 --- a/.github/workflows/smoke-claude.lock.yml +++ b/.github/workflows/smoke-claude.lock.yml @@ -890,8 +890,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml index c6ea33586c..c5ebdb5170 100644 --- a/.github/workflows/smoke-copilot.lock.yml +++ b/.github/workflows/smoke-copilot.lock.yml @@ -775,8 +775,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml index d3f1d2d071..fbb962a935 100644 --- a/.github/workflows/technical-doc-writer.lock.yml +++ b/.github/workflows/technical-doc-writer.lock.yml @@ -1177,8 +1177,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/test-firewall-escape.lock.yml b/.github/workflows/test-firewall-escape.lock.yml index 828413befc..6f07bc88c6 100644 --- a/.github/workflows/test-firewall-escape.lock.yml +++ b/.github/workflows/test-firewall-escape.lock.yml @@ -419,8 +419,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml index 64bfc6e321..5628e25bbb 100644 --- a/.github/workflows/unbloat-docs.lock.yml +++ b/.github/workflows/unbloat-docs.lock.yml @@ -1431,8 +1431,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml index 8405e50cf9..3384de3f92 100644 --- a/.github/workflows/weekly-issue-summary.lock.yml +++ b/.github/workflows/weekly-issue-summary.lock.yml @@ -888,8 +888,8 @@ jobs: node-version: '24' - name: Install awf binary run: | - echo "Installing awf from release: v0.3.0" - curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.3.0/awf-linux-x64 -o awf + echo "Installing awf from release: v0.4.0" + curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.4.0/awf-linux-x64 -o awf chmod +x awf sudo mv awf /usr/local/bin/ which awf diff --git a/pkg/constants/constants.go b/pkg/constants/constants.go index 8b220b2d2d..20df4fa431 100644 --- a/pkg/constants/constants.go +++ b/pkg/constants/constants.go @@ -39,7 +39,7 @@ const DefaultCodexVersion Version = "0.63.0" const DefaultGitHubMCPServerVersion Version = "v0.22.0" // DefaultFirewallVersion is the default version of the gh-aw-firewall (AWF) binary -const DefaultFirewallVersion Version = "v0.3.0" +const DefaultFirewallVersion Version = "v0.4.0" // DefaultPlaywrightMCPVersion is the default version of the @playwright/mcp package const DefaultPlaywrightMCPVersion Version = "0.0.48" diff --git a/pkg/constants/constants_test.go b/pkg/constants/constants_test.go index 9ccf0dc648..a4b3eb6503 100644 --- a/pkg/constants/constants_test.go +++ b/pkg/constants/constants_test.go @@ -249,7 +249,7 @@ func TestVersionConstants(t *testing.T) { {"DefaultCopilotVersion", DefaultCopilotVersion, "0.0.363"}, {"DefaultCodexVersion", DefaultCodexVersion, "0.63.0"}, {"DefaultGitHubMCPServerVersion", DefaultGitHubMCPServerVersion, "v0.22.0"}, - {"DefaultFirewallVersion", DefaultFirewallVersion, "v0.3.0"}, + {"DefaultFirewallVersion", DefaultFirewallVersion, "v0.4.0"}, {"DefaultPlaywrightMCPVersion", DefaultPlaywrightMCPVersion, "0.0.48"}, {"DefaultPlaywrightBrowserVersion", DefaultPlaywrightBrowserVersion, "v1.56.1"}, {"DefaultBunVersion", DefaultBunVersion, "1.1"}, From 1840f1b4927fefcc2f3d826efb4f2d9def5ade92 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 26 Nov 2025 00:31:39 +0000 Subject: [PATCH 3/4] Initial plan From 8e212d2f27204478ec15ebc4ce209c35a9b65b87 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 26 Nov 2025 00:44:47 +0000 Subject: [PATCH 4/4] docs: update network.md to reflect Claude engine firewall support Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- docs/src/content/docs/reference/network.md | 25 +++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/docs/src/content/docs/reference/network.md b/docs/src/content/docs/reference/network.md index 84f64c80c5..f0e9eb4327 100644 --- a/docs/src/content/docs/reference/network.md +++ b/docs/src/content/docs/reference/network.md @@ -7,7 +7,7 @@ sidebar: Control network access for AI engines using the top-level `network` field to specify which domains and services your agentic workflows can access during execution. -> **Note**: Network permissions are currently supported by the Claude engine and the Copilot engine (when using the [firewall feature](/gh-aw/reference/engines/#network-permissions)). +> **Note**: Network permissions are supported by both the Copilot and Claude engines through the AWF (Agent Workflow Firewall). See the [firewall feature](/gh-aw/reference/engines/#network-permissions) documentation for details. If no `network:` permission is specified, it defaults to `network: defaults` which allows access to basic infrastructure domains (certificates, JSON schema, Ubuntu, common package mirrors, Microsoft sources). @@ -64,12 +64,10 @@ Mix ecosystem identifiers with specific domains for fine-grained control: ## Implementation -Network permissions are enforced differently depending on the AI engine: +Both the Copilot and Claude engines support network permissions through AWF (Agent Workflow Firewall). AWF is a network firewall wrapper sourced from [github.com/githubnext/gh-aw-firewall](https://github.com/githubnext/gh-aw-firewall) that enforces domain-based access controls. ### Copilot Engine -The Copilot engine supports network permissions through AWF (Agent Workflow Firewall). AWF is a network firewall wrapper sourced from [github.com/githubnext/gh-aw-firewall](https://github.com/githubnext/gh-aw-firewall) that wraps Copilot CLI execution and enforces domain-based access controls. - Enable network permissions in your workflow: ```yaml wrap @@ -84,11 +82,28 @@ network: ``` When enabled, AWF: -- Wraps the Copilot CLI execution command +- Wraps the AI engine CLI execution command - Enforces domain allowlisting using the `--allow-domains` flag - Logs all network activity for audit purposes - Blocks access to domains not explicitly allowed +### Claude Engine + +The Claude engine also supports AWF firewall for network permissions: + +```yaml wrap +engine: claude + +network: + firewall: true # Enable AWF enforcement + allowed: + - defaults # Basic infrastructure + - python # Python ecosystem + - "api.example.com" # Custom domain +``` + +Claude uses the same AWF firewall configuration options as Copilot, providing consistent network security controls across both engines. + ### Firewall Log Level Control the verbosity of AWF firewall logs using the `log-level` field: