From 8cbf115991d9468f6931238cc2157cb5e9843d02 Mon Sep 17 00:00:00 2001 From: Sandeep Batchu Date: Mon, 6 Apr 2026 12:07:20 -0500 Subject: [PATCH 1/3] feat: add agentic-atx-platform - AI-powered code transformation at scale Multi-agent replatforming system built on Amazon Bedrock AgentCore, Strands Agents, and AWS Transform Custom (ATX). Features: - Orchestrator agent with find/create/execute sub-agents - AI-driven source code analysis for custom transformation creation - React UI with 6 tabs (Transformations, Execute, Create Custom, CSV Batch, Jobs, Chat) - DynamoDB job persistence, S3 file preview, async ZIP downloads - Two deployment options: CDK+SAM (recommended) and CDK-only (experimental) - Built-in observability via AgentCore + X-Ray + CloudWatch --- agentic-atx-platform/.gitignore | 49 + agentic-atx-platform/ARCHITECTURE.md | 184 + agentic-atx-platform/README.md | 359 ++ .../api/lambda/async_invoke_agent.py | 555 ++ agentic-atx-platform/cdk/.gitignore | 9 + agentic-atx-platform/cdk/README.md | 59 + agentic-atx-platform/cdk/bin/cdk.ts | 85 + agentic-atx-platform/cdk/cdk.json | 99 + agentic-atx-platform/cdk/deploy.sh | 74 + agentic-atx-platform/cdk/destroy.sh | 39 + .../cdk/lib/agentcore-stack.ts | 258 + .../cdk/lib/container-stack.ts | 56 + .../cdk/lib/infrastructure-stack.ts | 360 ++ agentic-atx-platform/cdk/lib/ui-stack.ts | 141 + agentic-atx-platform/cdk/package-lock.json | 4518 +++++++++++++++++ agentic-atx-platform/cdk/package.json | 33 + agentic-atx-platform/cdk/tsconfig.json | 32 + agentic-atx-platform/container/.dockerignore | 37 + agentic-atx-platform/container/Dockerfile | 167 + agentic-atx-platform/container/README.md | 414 ++ .../container/download-source.sh | 90 + agentic-atx-platform/container/entrypoint.sh | 395 ++ agentic-atx-platform/container/package.json | 12 + .../container/requirements.txt | 6 + .../container/upload-results.sh | 90 + .../deployment/1-build-and-push.sh | 111 + .../deployment/2-deploy-infrastructure.sh | 627 +++ agentic-atx-platform/deployment/README.md | 52 + .../deployment/check-prereqs.sh | 170 + agentic-atx-platform/deployment/cleanup.sh | 102 + .../deployment/config.env.template | 79 + .../deployment/generate-custom-policy.sh | 347 ++ agentic-atx-platform/docs/SECURITY.md | 341 ++ agentic-atx-platform/docs/TROUBLESHOOTING.md | 457 ++ agentic-atx-platform/orchestrator/Dockerfile | 15 + agentic-atx-platform/orchestrator/README.md | 53 + agentic-atx-platform/orchestrator/agent.py | 190 + .../orchestrator/requirements.txt | 5 + .../orchestrator/tools/__init__.py | 1 + .../orchestrator/tools/createtransform.py | 493 ++ .../orchestrator/tools/executetransform.py | 257 + .../orchestrator/tools/findtransform.py | 151 + .../orchestrator/tools/memory_client.py | 60 + .../orchestrator/tools/memory_hooks.py | 103 + agentic-atx-platform/sam/Dockerfile.deploy | 19 + agentic-atx-platform/sam/deploy.sh | 110 + agentic-atx-platform/sam/deploy_agentcore.py | 235 + .../sam/requirements-deploy.txt | 2 + agentic-atx-platform/sam/template.yaml | 262 + agentic-atx-platform/ui/deploy-aws.sh | 199 + agentic-atx-platform/ui/index.html | 12 + agentic-atx-platform/ui/package-lock.json | 1684 ++++++ agentic-atx-platform/ui/package.json | 20 + agentic-atx-platform/ui/sample-batch.csv | 6 + agentic-atx-platform/ui/src/App.css | 280 + agentic-atx-platform/ui/src/App.jsx | 140 + .../ui/src/components/Chat.jsx | 162 + .../ui/src/components/CreateCustom.jsx | 238 + .../ui/src/components/CsvUpload.jsx | 147 + .../ui/src/components/JobTracker.jsx | 704 +++ .../ui/src/components/TransformationForm.jsx | 149 + .../ui/src/components/TransformationList.jsx | 182 + agentic-atx-platform/ui/src/main.jsx | 10 + .../ui/test-create-transform.csv | 3 + agentic-atx-platform/ui/test-mixed-batch.csv | 4 + agentic-atx-platform/ui/vite.config.js | 16 + 66 files changed, 16319 insertions(+) create mode 100644 agentic-atx-platform/.gitignore create mode 100644 agentic-atx-platform/ARCHITECTURE.md create mode 100644 agentic-atx-platform/README.md create mode 100644 agentic-atx-platform/api/lambda/async_invoke_agent.py create mode 100644 agentic-atx-platform/cdk/.gitignore create mode 100644 agentic-atx-platform/cdk/README.md create mode 100644 agentic-atx-platform/cdk/bin/cdk.ts create mode 100644 agentic-atx-platform/cdk/cdk.json create mode 100755 agentic-atx-platform/cdk/deploy.sh create mode 100644 agentic-atx-platform/cdk/destroy.sh create mode 100644 agentic-atx-platform/cdk/lib/agentcore-stack.ts create mode 100644 agentic-atx-platform/cdk/lib/container-stack.ts create mode 100644 agentic-atx-platform/cdk/lib/infrastructure-stack.ts create mode 100644 agentic-atx-platform/cdk/lib/ui-stack.ts create mode 100644 agentic-atx-platform/cdk/package-lock.json create mode 100644 agentic-atx-platform/cdk/package.json create mode 100644 agentic-atx-platform/cdk/tsconfig.json create mode 100644 agentic-atx-platform/container/.dockerignore create mode 100644 agentic-atx-platform/container/Dockerfile create mode 100644 agentic-atx-platform/container/README.md create mode 100644 agentic-atx-platform/container/download-source.sh create mode 100644 agentic-atx-platform/container/entrypoint.sh create mode 100644 agentic-atx-platform/container/package.json create mode 100644 agentic-atx-platform/container/requirements.txt create mode 100644 agentic-atx-platform/container/upload-results.sh create mode 100644 agentic-atx-platform/deployment/1-build-and-push.sh create mode 100644 agentic-atx-platform/deployment/2-deploy-infrastructure.sh create mode 100644 agentic-atx-platform/deployment/README.md create mode 100644 agentic-atx-platform/deployment/check-prereqs.sh create mode 100644 agentic-atx-platform/deployment/cleanup.sh create mode 100644 agentic-atx-platform/deployment/config.env.template create mode 100755 agentic-atx-platform/deployment/generate-custom-policy.sh create mode 100644 agentic-atx-platform/docs/SECURITY.md create mode 100644 agentic-atx-platform/docs/TROUBLESHOOTING.md create mode 100644 agentic-atx-platform/orchestrator/Dockerfile create mode 100644 agentic-atx-platform/orchestrator/README.md create mode 100644 agentic-atx-platform/orchestrator/agent.py create mode 100644 agentic-atx-platform/orchestrator/requirements.txt create mode 100644 agentic-atx-platform/orchestrator/tools/__init__.py create mode 100644 agentic-atx-platform/orchestrator/tools/createtransform.py create mode 100644 agentic-atx-platform/orchestrator/tools/executetransform.py create mode 100644 agentic-atx-platform/orchestrator/tools/findtransform.py create mode 100644 agentic-atx-platform/orchestrator/tools/memory_client.py create mode 100644 agentic-atx-platform/orchestrator/tools/memory_hooks.py create mode 100644 agentic-atx-platform/sam/Dockerfile.deploy create mode 100755 agentic-atx-platform/sam/deploy.sh create mode 100644 agentic-atx-platform/sam/deploy_agentcore.py create mode 100644 agentic-atx-platform/sam/requirements-deploy.txt create mode 100644 agentic-atx-platform/sam/template.yaml create mode 100755 agentic-atx-platform/ui/deploy-aws.sh create mode 100644 agentic-atx-platform/ui/index.html create mode 100644 agentic-atx-platform/ui/package-lock.json create mode 100644 agentic-atx-platform/ui/package.json create mode 100644 agentic-atx-platform/ui/sample-batch.csv create mode 100644 agentic-atx-platform/ui/src/App.css create mode 100644 agentic-atx-platform/ui/src/App.jsx create mode 100644 agentic-atx-platform/ui/src/components/Chat.jsx create mode 100644 agentic-atx-platform/ui/src/components/CreateCustom.jsx create mode 100644 agentic-atx-platform/ui/src/components/CsvUpload.jsx create mode 100644 agentic-atx-platform/ui/src/components/JobTracker.jsx create mode 100644 agentic-atx-platform/ui/src/components/TransformationForm.jsx create mode 100644 agentic-atx-platform/ui/src/components/TransformationList.jsx create mode 100644 agentic-atx-platform/ui/src/main.jsx create mode 100644 agentic-atx-platform/ui/test-create-transform.csv create mode 100644 agentic-atx-platform/ui/test-mixed-batch.csv create mode 100644 agentic-atx-platform/ui/vite.config.js diff --git a/agentic-atx-platform/.gitignore b/agentic-atx-platform/.gitignore new file mode 100644 index 0000000..940df77 --- /dev/null +++ b/agentic-atx-platform/.gitignore @@ -0,0 +1,49 @@ +# Dependencies +node_modules/ +.venv/ +__pycache__/ +*.pyc + +# Build output +ui/dist/ +ui/.vite/ +cdk.out/ +.cdk.staging/ +sam/orchestrator-bundle/ +sam/.aws-sam/ + +# CDK compiled output (regenerated by tsc) +cdk/lib/*.js +cdk/lib/*.d.ts +cdk/bin/*.js +cdk/bin/*.d.ts +cdk/test/*.js +cdk/test/*.d.ts + +# CDK context (contains account-specific VPC lookups) +cdk/cdk.context.json + +# AgentCore +orchestrator/.bedrock_agentcore/ +orchestrator/.bedrock_agentcore.yaml +.bedrock_agentcore/ +.bedrock_agentcore.yaml + +# Environment and secrets +.env +*.env.local +deployment/config.env +deployment/iam-custom-policy.json + +# Logs +*.log + +# OS +.DS_Store +Thumbs.db + +# IDE +.idea/ +.vscode/ +.kiro/ +*.swp diff --git a/agentic-atx-platform/ARCHITECTURE.md b/agentic-atx-platform/ARCHITECTURE.md new file mode 100644 index 0000000..b33ab35 --- /dev/null +++ b/agentic-atx-platform/ARCHITECTURE.md @@ -0,0 +1,184 @@ +# ATX Transform Platform - Architecture + +## Overview + +AI-powered code transformation platform built on Amazon Bedrock AgentCore and AWS Transform CLI. All operations flow through a single orchestrator agent that coordinates specialized sub-agents. + +## Architecture + +``` +┌──────────────────────────────────────────────────────────────┐ +│ UI (React + CloudFront) │ +│ Tabs: Transformations | Execute | Create Custom | CSV Batch | Jobs │ +└──────────────────────┬───────────────────────────────────────┘ + │ + POST /orchestrate + (submit + poll) + │ + ▼ +┌──────────────────────────────────────────────────────────────┐ +│ async_invoke_agent Lambda │ +│ ├── submit: fire-and-forget to AgentCore │ +│ ├── poll: read result from S3 │ +│ └── direct: fast Batch/S3 calls (status, results, customs) │ +└──────────────────────┬───────────────────────────────────────┘ + │ + ▼ +┌──────────────────────────────────────────────────────────────┐ +│ Bedrock AgentCore Runtime │ +│ │ +│ Orchestrator Agent (Strands + Claude Sonnet 4) │ +│ ├── find_transform_agent (sub-agent) │ +│ │ ├── list_transformations (static catalog) │ +│ │ ├── search_transformations (keyword search) │ +│ │ └── list_published_custom (S3 lookup) │ +│ ├── execute_transform_agent (sub-agent) │ +│ │ ├── execute_transformation → Batch submit │ +│ │ ├── get_job_status → Batch describe │ +│ │ └── list_job_results → S3 list │ +│ └── create_transform_agent (direct tool calls) │ +│ ├── upload_repo_to_s3 → Batch clone + S3 sync │ +│ ├── list_repo_files → S3 list (file tree) │ +│ ├── read_repo_file → S3 get (individual files) │ +│ ├── generate_transformation_definition → Bedrock + S3 │ +│ ├── publish_transformation → Batch publish job │ +│ └── list_registry_transformations → Batch list job │ +│ │ +│ Memory: ShortTermMemoryHook (AgentCore Memory) │ +└──────────────────────┬───────────────────────────────────────┘ + │ + ┌────────────┼────────────┐ + ▼ ▼ ▼ + Amazon S3 AWS Batch Amazon Bedrock + (definitions (Fargate + (Claude + + results) ATX CLI) Sonnet 4) +``` + +## Data Flows + +### Execute Transformation +``` +UI → /orchestrate (submit) → Lambda (async) → AgentCore + → Orchestrator → execute_transform_agent → execute_transformation + → batch_client.submit_job() → Batch → ATX CLI container + → Results to S3 +UI → /orchestrate (poll) → Lambda → S3 → result with job_id +UI → /orchestrate (direct, status) → Lambda → Batch describe_jobs +UI → /orchestrate (direct, results) → Lambda → S3 list_objects +``` + +### Create Custom Transformation +``` +UI → /orchestrate (submit) → Lambda (async) → AgentCore + → Orchestrator → create_transform_agent + + Step 1: Extract parameters from natural language (Bedrock) + Step 2: Upload repo to S3 (if source URL provided) + → Batch job: git clone → aws s3 sync (full repo) → poll until done + → Files stored at s3://atx-source-code-{account}/repo-snapshots/{name}/ + + Step 3: Smart file selection + → list_repo_files: S3 list all files with sizes + → If total source size < 400K chars: read ALL source files (small repo) + → If total source size >= 400K chars: AI selects most relevant files + based on transformation requirements (budget-aware file count) + + Step 4: Read selected files from S3 (up to 400K chars / ~100K tokens) + Step 5: Generate definition (Bedrock with full source code context) + → Uploads transformation_definition.md to S3 + Step 6: Publish (Batch job: atx custom def publish) + → status.json written to S3 + + Without source repo: skips steps 2-4, generates from requirements only + +UI → /orchestrate (direct, list_custom) → Lambda → S3 list +UI → /orchestrate (direct, check_publish) → Lambda → Batch + S3 update +UI → /orchestrate (direct, get_file) → Lambda → S3 get (definition preview) +``` + +### Design Decisions: Custom Transformation Creation + +- **Full repo upload vs summary extraction**: The full repo is uploaded to S3 so the AI can + selectively read files based on the transformation requirements. This produces higher quality + definitions than a fixed shell-based summary because the AI chooses what's relevant. + +- **Smart file selection**: For small repos (< 400K chars of source code), all files are read + without an AI selection step — saves one Bedrock call. For large repos, AI picks files with + a budget-aware max count calculated from average file size vs the 400K context budget. + +- **400K character context limit**: ~100K tokens, leaving headroom in Claude Sonnet 4's 200K + token context window for the system prompt, requirements, and output generation (8K tokens). + +- **Direct tool calls vs nested agent**: The create_transform_agent uses direct Bedrock API + calls and sequential tool invocations instead of a nested Strands agent. This avoids a + streaming type bug in the Strands SDK and gives more predictable execution. + +- **Three Bedrock calls** (with source): extract params → select files (large repos only) → generate definition. + Two Bedrock calls for small repos (extract params → generate definition). + +### CSV Batch +``` +UI builds one prompt per row → sequential orchestrate() calls +Each row: submit → poll → extract job_id → add to Jobs tab +Rows with transformation specified: direct execute +Rows without transformation: orchestrator follows find → create → execute chain + → find_transform_agent searches catalog + → If no match: create_transform_agent generates + publishes custom transform + → execute_transform_agent runs the transformation +``` + +## Components + +| Component | Path | Purpose | +|-----------|------|---------| +| Orchestrator | `orchestrator/agent.py` | AgentCore agent with 3 sub-agents | +| Find tool | `orchestrator/tools/findtransform.py` | Catalog search + custom listing | +| Execute tool | `orchestrator/tools/executetransform.py` | Batch submit + status + results | +| Create tool | `orchestrator/tools/createtransform.py` | Analyze source, generate definition, publish | +| Memory | `orchestrator/tools/memory_*.py` | AgentCore short-term memory | +| Async Lambda | `api/lambda/async_invoke_agent.py` | Submit/poll/direct bridge | +| UI | `ui/src/` | React app (5 tabs) | +| Infrastructure | `cdk/` | Batch, S3, VPC, CloudFront, AgentCore | +| SAM Layer | `sam/` | AgentCore deploy Lambda + API (Option A) | +| Container | `container/` | ATX CLI Docker image | + +## AWS Services + +| Service | Purpose | +|---------|---------| +| Bedrock AgentCore | Orchestrator runtime | +| Bedrock (Claude Sonnet 4) | AI reasoning + YAML generation | +| AgentCore Memory | Conversation context | +| AWS Batch (Fargate) | ATX CLI execution | +| S3 | Definitions, repo snapshots, results, UI hosting, orchestrator results, job tracking | +| CloudFront | UI CDN | +| API Gateway v2 (HTTP) | Single /orchestrate endpoint | +| Lambda | Async bridge (submit/poll/direct) | +| DynamoDB | Job tracking (persisted across sessions) | + +## Project Structure + +``` +├── orchestrator/ # AgentCore orchestrator +│ ├── agent.py # Main agent (3 sub-agents) +│ ├── tools/ # find, execute, create, memory +│ ├── Dockerfile # Container image for CDK deployment +│ └── requirements.txt +├── api/lambda/ # Async bridge Lambda +│ └── async_invoke_agent.py +├── ui/ # React frontend (5 tabs) +│ └── src/components/ # TransformationList, Form, CreateCustom, CsvUpload, JobTracker +├── cdk/ # CDK stacks (Container, Infrastructure, AgentCore, UI) +│ └── lib/ +│ ├── container-stack.ts # ECR + Docker image +│ ├── infrastructure-stack.ts # Batch, S3, VPC, IAM +│ ├── agentcore-stack.ts # AgentCore + Lambda + API (Option B, experimental) +│ └── ui-stack.ts # S3 + CloudFront +├── sam/ # SAM template for AgentCore + API (Option A) +│ ├── template.yaml +│ ├── deploy_agentcore.py +│ └── deploy.sh +├── container/ # ATX CLI Docker image +├── deployment/ # Infrastructure scripts + config +└── docs/ # Security + troubleshooting +``` diff --git a/agentic-atx-platform/README.md b/agentic-atx-platform/README.md new file mode 100644 index 0000000..5e8eade --- /dev/null +++ b/agentic-atx-platform/README.md @@ -0,0 +1,359 @@ +# ATX Transform Platform + +AI-powered code transformation platform built on Amazon Bedrock AgentCore and AWS Transform CLI (ATX). Transform and modernize codebases at scale using natural language through a web UI or CLI. + +## What It Does + +- Execute AWS-managed transformations (Python/Java/Node.js upgrades, SDK migrations, codebase analysis) +- Create and publish custom transformations using natural language +- Batch process multiple repositories via CSV upload +- Track transformation jobs with real-time status and results + +## Architecture + +``` +UI (CloudFront) → HTTP API (/orchestrate) → async Lambda → AgentCore Orchestrator + ├── find_transform_agent + ├── execute_transform_agent → AWS Batch (ATX CLI) + └── create_transform_agent → Bedrock AI + Batch publish +``` + +See [ARCHITECTURE.md](ARCHITECTURE.md) for detailed diagrams and data flows. + +--- + +## Prerequisites + +| Tool | Version | macOS | Windows | Linux | +|------|---------|-------|---------|-------| +| AWS CLI | v2.13+ | `brew install awscli` | [MSI Installer](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) | `curl`, `apt`, or `yum` | +| Python | 3.11+ | `brew install python@3.11` | [python.org](https://www.python.org/downloads/) | `apt install python3.11` | +| Node.js | 18+ | `brew install node` | [nodejs.org](https://nodejs.org/) | `apt install nodejs` | +| Docker | 20+ | [Docker Desktop](https://docs.docker.com/get-docker/) | [Docker Desktop](https://docs.docker.com/get-docker/) | `apt install docker.io` | +| SAM CLI | Latest | `brew install aws-sam-cli` | [MSI Installer](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) | `pip install aws-sam-cli` | + +> **Windows users:** Use WSL2 (Windows Subsystem for Linux) for the best experience. The deployment scripts are bash-based. Alternatively, run commands in Git Bash or PowerShell with adjustments. + +**AWS Account Requirements:** +- Bedrock model access enabled for Claude Sonnet 4 +- Default VPC with public subnets (or configure existing VPC) + +--- + +## Configuration + +All configuration is in `deployment/config.env`. Copy the template and customize: + +```bash +cd deployment +cp config.env.template config.env +# Edit config.env as needed (defaults work for most setups) +``` + +Key settings: + +| Setting | Default | Description | +|---------|---------|-------------| +| `AWS_REGION` | `us-east-1` | AWS region for all services | +| `BEDROCK_MODEL_ID` | `us.anthropic.claude-sonnet-4-20250514-v1:0` | AI model for orchestrator | +| `FARGATE_VCPU` | `2` | vCPU for Batch jobs | +| `FARGATE_MEMORY` | `4096` | Memory (MB) for Batch jobs | +| `JOB_TIMEOUT` | `43200` | Max job duration (seconds) | + +See `deployment/config.env.template` for all options. + +--- + +## Deployment + +Two deployment options are available. Choose the one that fits your needs. + +### Option A: CDK + SAM Deployment (Recommended) + +Uses CDK for infrastructure (stable constructs only) and SAM for the AgentCore + API layer. No alpha/experimental constructs. Uses the `bedrock-agentcore-control` SDK to deploy the orchestrator. + +> **Requires:** [SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) in addition to CDK. + +#### Step 1: Enable Bedrock Model Access + +1. Go to [Bedrock console](https://console.aws.amazon.com/bedrock/home) → Model access +2. Enable **Anthropic Claude Sonnet 4** + +#### Step 2: Deploy Base Infrastructure via CDK + +```bash +cd deployment +cp config.env.template config.env + +# Login to ECR Public +aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws + +cd ../cdk +npm install +cdk bootstrap # First time only + +# Build and deploy Container + Infrastructure + UI stacks +npx tsc +CDK_DEFAULT_ACCOUNT=$(aws sts get-caller-identity --query Account --output text) \ + cdk deploy AtxContainerStack AtxInfrastructureStack AtxUiStack --require-approval never +``` + +> For accounts without a default VPC, pass VPC context: +> ```bash +> cdk deploy AtxContainerStack AtxInfrastructureStack AtxUiStack --require-approval never \ +> -c existingVpcId=vpc-xxx -c existingSubnetIds=subnet-aaa,subnet-bbb -c existingSecurityGroupId=sg-ccc +> ``` + +#### Step 3: Deploy AgentCore + API via SAM + +```bash +cd ../sam +./deploy.sh +``` + +This builds the orchestrator Docker image, pushes to ECR, and deploys: +- A deploy-Lambda that creates the AgentCore Runtime via SDK +- The async invoke Lambda + HTTP API Gateway + +#### Step 4: Deploy Orchestrator to AgentCore + +Invoke the deploy-Lambda directly (takes 2-5 minutes): +```bash +aws lambda invoke --function-name atx-deploy-agentcore \ + --cli-binary-format raw-in-base64-out \ + --payload '{"action":"deploy"}' \ + --cli-read-timeout 900 /tmp/deploy-output.json + +cat /tmp/deploy-output.json +``` + +The Lambda uses the `bedrock-agentcore-control` SDK to: +1. Create the AgentCore Runtime with the orchestrator container +2. Poll until the runtime is READY +3. Return the runtime ARN + +Then update the async Lambda with the Agent Runtime ARN from the output: +```bash +ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +RUNTIME_ARN=$(python3 -c "import json; print(json.loads(json.load(open('/tmp/deploy-output.json'))['body'])['runtime_arn'])") + +aws lambda update-function-configuration --function-name atx-async-invoke-agent \ + --environment "Variables={AGENT_RUNTIME_ARN=${RUNTIME_ARN},RESULT_BUCKET=atx-custom-output-${ACCOUNT_ID},JOBS_TABLE=atx-transform-jobs}" +``` + +#### Step 5: Rebuild UI with API Endpoint + +```bash +API_URL=$(aws cloudformation describe-stacks --stack-name AtxAgentCoreSAM \ + --query 'Stacks[0].Outputs[?OutputKey==`ApiEndpoint`].OutputValue' --output text) + +cd ui && npm install +VITE_API_ENDPOINT=$API_URL npx vite build +./deploy-aws.sh +``` +--- + +### Option B: CDK-Only Deployment (Experimental) + +Deploys the entire platform with a single `cdk deploy --all`. Uses the `@aws-cdk/aws-bedrock-agentcore-alpha` CDK construct. + +> ⚠️ **Note:** The AgentCore CDK construct is experimental and under active development. APIs may change in future releases. For production workloads, use Option A. + +#### Step 1: Enable Bedrock Model Access + +Same as Option A Step 1. + +#### Step 2: Deploy Everything + +```bash +cd deployment +cp config.env.template config.env # Edit if needed (defaults work) + +# Login to ECR Public (required for Docker base image pull) +aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws + +# Build UI placeholder (CDK needs ui/dist/ to exist at synth time) +cd ../ui && npm install && npx vite build && cd ../cdk +npm install + +# Bootstrap CDK (first time only) +cdk bootstrap + +# Build TypeScript and deploy all stacks +npx tsc +CDK_DEFAULT_ACCOUNT=$(aws sts get-caller-identity --query Account --output text) \ + cdk deploy --all --require-approval never +``` + +> **Note:** Use the global `cdk` CLI (`npm install -g aws-cdk`) rather than `npx cdk` to avoid version conflicts with the alpha package. + +This deploys 4 stacks in order: +1. `AtxContainerStack` — ECR + Docker image +2. `AtxInfrastructureStack` — Batch, S3, VPC, IAM +3. `AtxAgentCoreStack` — AgentCore Runtime + Lambda + HTTP API +4. `AtxUiStack` — S3 + CloudFront + +#### Step 3: Rebuild UI with API Endpoint + +The initial `cdk deploy` deploys the UI without the API endpoint (it's not known until the AgentCore stack completes). Rebuild with the correct endpoint: +```bash +API_URL=$(aws cloudformation describe-stacks --stack-name AtxAgentCoreStack \ + --query 'Stacks[0].Outputs[?OutputKey==`ApiEndpoint`].OutputValue' --output text) + +cd ui && npm install +VITE_API_ENDPOINT=$API_URL npx vite build + +# Upload to the CDK-created S3 bucket and invalidate CloudFront +./deploy-aws.sh +``` + +> The `deploy-aws.sh` script auto-detects the CDK-managed `AtxUiStack` and uploads directly to its S3 bucket. It will not create a duplicate CloudFormation stack. + +--- + +## Local Development + +```bash +# Terminal 1: Orchestrator (port 8080) +cd orchestrator && source .venv/bin/activate && python3.11 agent.py + +# Terminal 2: UI (port 3000, proxies /api to 8080) +cd ui && npm run dev +``` + +--- + +## Available Transformations + +### AWS Managed (10) + +| Transformation | Description | +|---|---| +| `AWS/python-version-upgrade` | Python 3.8 → 3.13 | +| `AWS/java-version-upgrade` | Java any → any (with dependency modernization) | +| `AWS/nodejs-version-upgrade` | Node.js any → any | +| `AWS/python-boto2-to-boto3` | boto2 → boto3 | +| `AWS/java-aws-sdk-v1-to-v2` | Java AWS SDK v1 → v2 | +| `AWS/nodejs-aws-sdk-v2-to-v3` | Node.js AWS SDK v2 → v3 | +| `AWS/early-access-comprehensive-codebase-analysis` | Deep codebase analysis | +| `AWS/early-access-java-x86-to-graviton` | Java x86 → ARM64/Graviton | +| `AWS/early-access-angular-to-react-migration` | Angular → React | +| `AWS/early-access-jfr-performance-optimization` | Java JFR performance | + +### Custom Transformations + +Create via the "Create Custom" tab. Published to the ATX registry via `atx custom def publish`. + +--- + +## UI Tabs + +| Tab | Purpose | +|-----|---------| +| **Transformations** | Browse AWS-managed + published custom transforms | +| **Execute** | Run a single transformation on a repository | +| **Create Custom** | Define and publish custom transformations | +| **CSV Batch** | Upload CSV to process multiple repos | +| **Jobs** | Track job status, view results | + +--- + +## Project Structure + +``` +├── orchestrator/ # Bedrock AgentCore orchestrator +│ ├── agent.py # Main agent (3 sub-agents) +│ ├── tools/ # find, execute, create, memory +│ ├── Dockerfile # Container image for CDK deployment +│ └── requirements.txt +├── api/lambda/ +│ └── async_invoke_agent.py # Async bridge (submit/poll/direct) +├── sam/ # SAM template for AgentCore + API (Option A) +│ ├── template.yaml # SAM resources +│ ├── deploy_agentcore.py # Deploy Lambda (SDK-based) +│ └── deploy.sh # One-command SAM deploy +├── ui/ # React frontend (5 tabs) +│ └── src/components/ +├── cdk/ # CDK stacks +│ └── lib/ +│ ├── container-stack.ts # ECR + Docker image +│ ├── infrastructure-stack.ts # Batch, S3, VPC, IAM +│ ├── agentcore-stack.ts # AgentCore + Lambda + API (Option B, experimental) +│ └── ui-stack.ts # S3 + CloudFront +├── container/ # ATX CLI Docker image +├── deployment/ # Infrastructure deployment scripts +├── docs/ # Security + troubleshooting +├── ARCHITECTURE.md +└── README.md +``` + +--- + +## Tech Stack + +| Component | Technology | +|-----------|------------| +| AI Orchestration | Amazon Bedrock AgentCore + Strands Agents | +| AI Model | Claude Sonnet 4 (cross-region inference) | +| Memory | AgentCore Memory (short-term) | +| Transformation Engine | AWS Transform CLI (ATX) | +| Compute | AWS Batch (Fargate) | +| Storage | Amazon S3 | +| UI | React + Vite | +| CDN | Amazon CloudFront | +| API | API Gateway v2 (HTTP API) | +| Job Tracking | Amazon DynamoDB | +| Infrastructure | AWS CDK (TypeScript) | + +--- + +## Observability + +The platform uses [AgentCore Observability](https://docs.aws.amazon.com/bedrock-agentcore/latest/devguide/observability.html) for tracing, debugging, and monitoring agent performance. + +### One-Time Setup: Enable CloudWatch Transaction Search + +Before traces appear, enable Transaction Search in your account (once per account): + +```bash +# 1. Create resource policy for X-Ray → CloudWatch Logs +ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +aws logs put-resource-policy --policy-name AgentCoreTransactionSearch \ + --policy-document "{ + \"Version\": \"2012-10-17\", + \"Statement\": [{ + \"Sid\": \"TransactionSearchXRayAccess\", + \"Effect\": \"Allow\", + \"Principal\": {\"Service\": \"xray.amazonaws.com\"}, + \"Action\": \"logs:PutLogEvents\", + \"Resource\": [ + \"arn:aws:logs:us-east-1:${ACCOUNT_ID}:log-group:aws/spans:*\", + \"arn:aws:logs:us-east-1:${ACCOUNT_ID}:log-group:/aws/application-signals/data:*\" + ], + \"Condition\": { + \"ArnLike\": {\"aws:SourceArn\": \"arn:aws:xray:us-east-1:${ACCOUNT_ID}:*\"}, + \"StringEquals\": {\"aws:SourceAccount\": \"${ACCOUNT_ID}\"} + } + }] + }" + +# 2. Route trace segments to CloudWatch Logs +aws xray update-trace-segment-destination --destination CloudWatchLogs +``` + +Or enable via the [CloudWatch console](https://console.aws.amazon.com/cloudwatch/) → Settings → X-Ray traces → Transaction Search → Enable. + +### What's Instrumented + +- **Agent tracing (ADOT)**: The orchestrator includes `aws-opentelemetry-distro` and runs via `opentelemetry-instrument`, which auto-captures Bedrock model invocations, tool calls, and sub-agent interactions using GenAI semantic conventions. +- **Lambda tracing (X-Ray)**: The async invoke Lambda has X-Ray active tracing enabled for end-to-end visibility from API Gateway through Lambda to AgentCore. +- **Batch job logs**: ATX CLI container output goes to CloudWatch Logs (`/aws/batch/atx-transform`). +- **CloudWatch dashboard**: Pre-built dashboard with Lambda metrics and Batch job log insights. + +### Viewing Traces + +- **CloudWatch GenAI Observability**: [CloudWatch console](https://console.aws.amazon.com/cloudwatch/) → Application Signals → Traces. Shows agent workflow steps, token usage, latency, and error rates. +- **X-Ray**: [X-Ray console](https://console.aws.amazon.com/xray/home) → Traces. Filter by service name. +- **CloudWatch Dashboard**: Search for `ATX-Transform-CLI-Dashboard` in CloudWatch Dashboards. + +See the [AgentCore Observability docs](https://docs.aws.amazon.com/bedrock-agentcore/latest/devguide/observability-configure.html) for advanced configuration. diff --git a/agentic-atx-platform/api/lambda/async_invoke_agent.py b/agentic-atx-platform/api/lambda/async_invoke_agent.py new file mode 100644 index 0000000..6ee45e5 --- /dev/null +++ b/agentic-atx-platform/api/lambda/async_invoke_agent.py @@ -0,0 +1,555 @@ +""" +Async AgentCore invocation Lambda. +Submit: fires async self-invoke, returns request_id immediately. +Execute: calls AgentCore, writes result to S3. +Poll: reads result from S3. +""" + +import json +import uuid +import os +import logging +import boto3 + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +REGION = os.environ.get('AWS_REGION', os.environ.get('AWS_DEFAULT_REGION', 'us-east-1')) + +s3_client = boto3.client('s3', region_name=REGION) +from botocore.config import Config as BotoConfig +agentcore_client = boto3.client('bedrock-agentcore', region_name=REGION, + config=BotoConfig(read_timeout=900, connect_timeout=10, retries={'max_attempts': 0})) + +AGENT_RUNTIME_ARN = os.environ.get('AGENT_RUNTIME_ARN', '') +RESULT_BUCKET = os.environ.get('RESULT_BUCKET', '') +RESULT_PREFIX = 'orchestrator-results/' +JOBS_TABLE = os.environ.get('JOBS_TABLE', 'atx-transform-jobs') + +dynamodb_client = boto3.resource('dynamodb', region_name=REGION) + + +def _jobs_table(): + return dynamodb_client.Table(JOBS_TABLE) + + +def lambda_handler(event, context): + # Internal async execution (invoked with InvocationType='Event') + if event.get('_async_execute'): + return _execute_agentcore(event['request_id'], event['prompt']) + + # Internal async download (invoked with InvocationType='Event') + if event.get('_async_download'): + return _execute_download(event['download_id'], event['bucket'], event['prefix']) + + # HTTP request from API Gateway + if event.get('requestContext', {}).get('http', {}).get('method') == 'OPTIONS': + return cors_response(200, '') + + try: + body = json.loads(event.get('body', '{}')) + action = body.get('action', 'submit') + + if action == 'submit': + return _handle_submit(body) + elif action == 'poll': + return _handle_poll(body) + elif action == 'direct': + return _handle_direct(body) + else: + return cors_response(400, json.dumps({'error': f'Unknown action: {action}'})) + except Exception as e: + logger.error(f"Handler error: {e}") + return cors_response(500, json.dumps({'error': str(e)})) + + +def _handle_submit(body): + prompt = body.get('prompt', '') + if not prompt: + return cors_response(400, json.dumps({'error': 'Missing prompt'})) + + request_id = str(uuid.uuid4()) + + # Write pending marker + s3_client.put_object( + Bucket=RESULT_BUCKET, + Key=f'{RESULT_PREFIX}{request_id}.json', + Body=json.dumps({'status': 'PROCESSING', 'prompt': prompt}).encode(), + ContentType='application/json' + ) + + # Fire async self-invoke + lambda_client = boto3.client('lambda', region_name=REGION) + lambda_client.invoke( + FunctionName=context_function_name() or 'atx-async-invoke-agent', + InvocationType='Event', + Payload=json.dumps({ + '_async_execute': True, + 'request_id': request_id, + 'prompt': prompt, + }) + ) + + return cors_response(200, json.dumps({'status': 'SUBMITTED', 'request_id': request_id})) + + +def _handle_poll(body): + request_id = body.get('request_id', '') + if not request_id: + return cors_response(400, json.dumps({'error': 'Missing request_id'})) + + try: + response = s3_client.get_object(Bucket=RESULT_BUCKET, Key=f'{RESULT_PREFIX}{request_id}.json') + result = json.loads(response['Body'].read().decode('utf-8')) + return cors_response(200, json.dumps(result)) + except s3_client.exceptions.NoSuchKey: + return cors_response(200, json.dumps({'status': 'NOT_FOUND'})) + + +def _handle_direct(body): + """Direct AWS service calls - no AgentCore, instant response.""" + op = body.get('op', '') + batch_client = boto3.client('batch', region_name=REGION) + + if op == 'status': + job_id = body.get('job_id', '') + if not job_id: + return cors_response(400, json.dumps({'error': 'Missing job_id'})) + try: + resp = batch_client.describe_jobs(jobs=[job_id]) + if not resp['jobs']: + return cors_response(200, json.dumps({'error': f'Job not found: {job_id}'})) + job = resp['jobs'][0] + result = { + 'job_id': job_id, 'job_name': job['jobName'], 'job_status': job['status'], + 'created_at': job.get('createdAt'), 'started_at': job.get('startedAt'), + 'stopped_at': job.get('stoppedAt'), + 'log_stream': job.get('container', {}).get('logStreamName'), + } + if job['status'] == 'FAILED' and 'statusReason' in job: + result['failure_reason'] = job['statusReason'] + if job['status'] == 'SUCCEEDED': + account = boto3.client('sts').get_caller_identity()['Account'] + result['results_location'] = f"s3://atx-custom-output-{account}/transformations/{job['jobName']}/" + # Persist job_name to DynamoDB for future lookups after Batch purges the record + try: + _jobs_table().update_item( + Key={'id': job_id}, + UpdateExpression='SET job_name = :jn', + ExpressionAttributeValues={':jn': job['jobName']}, + ) + except Exception: + pass + return cors_response(200, json.dumps(result)) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + elif op == 'results': + job_id = body.get('job_id', '') + if not job_id: + return cors_response(400, json.dumps({'error': 'Missing job_id'})) + try: + job_name = None + # Try Batch first + resp = batch_client.describe_jobs(jobs=[job_id]) + if resp['jobs']: + job_name = resp['jobs'][0]['jobName'] + else: + # Fallback: read job_name from DynamoDB + try: + ddb_item = _jobs_table().get_item(Key={'id': job_id}).get('Item', {}) + job_name = ddb_item.get('job_name') + except Exception: + pass + if not job_name: + return cors_response(200, json.dumps({'error': f'Job not found and no stored job_name for: {job_id}', 'files': []})) + account = boto3.client('sts').get_caller_identity()['Account'] + bucket = f"atx-custom-output-{account}" + prefix = f"transformations/{job_name}/" + # List all objects recursively (ATX creates nested conversation folders) + all_files = [] + paginator = s3_client.get_paginator('list_objects_v2') + for page in paginator.paginate(Bucket=bucket, Prefix=prefix): + for obj in page.get('Contents', []): + key = obj['Key'] + name = key[len(prefix):] + # Skip .git internals and hidden files + if '/.git/' in key or name.startswith('.git/'): + continue + all_files.append({'key': key, 'size': obj['Size'], 'name': name}) + return cors_response(200, json.dumps({ + 'job_id': job_id, 'job_name': job_name, + 'results_location': f's3://{bucket}/{prefix}', + 'file_count': len(all_files), 'files': all_files, + })) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + elif op == 'list_custom': + try: + account = boto3.client('sts').get_caller_identity()['Account'] + bucket = f"atx-source-code-{account}" + # List all custom definition directories that have status.json + paginator = s3_client.get_paginator('list_objects_v2') + customs = [] + for page in paginator.paginate(Bucket=bucket, Prefix='custom-definitions/', Delimiter='/'): + for prefix in page.get('CommonPrefixes', []): + name = prefix['Prefix'].replace('custom-definitions/', '').rstrip('/') + if not name: + continue + # Read status.json + try: + status_obj = s3_client.get_object(Bucket=bucket, Key=f'custom-definitions/{name}/status.json') + status_data = json.loads(status_obj['Body'].read().decode('utf-8')) + customs.append(status_data) + except Exception: + customs.append({'name': name, 'status': 'unknown'}) + return cors_response(200, json.dumps({'customs': customs})) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + elif op == 'check_publish': + # Check if a publish job succeeded and update status.json + try: + name = body.get('name', '') + if not name: + return cors_response(400, json.dumps({'error': 'Missing name'})) + account = boto3.client('sts').get_caller_identity()['Account'] + bucket = f"atx-source-code-{account}" + # Read current status + status_obj = s3_client.get_object(Bucket=bucket, Key=f'custom-definitions/{name}/status.json') + status_data = json.loads(status_obj['Body'].read().decode('utf-8')) + pub_job_id = status_data.get('job_id') + if not pub_job_id or status_data.get('status') == 'published': + return cors_response(200, json.dumps(status_data)) + # Check Batch job + resp = batch_client.describe_jobs(jobs=[pub_job_id]) + if resp['jobs']: + job_status = resp['jobs'][0]['status'] + if job_status == 'SUCCEEDED': + status_data['status'] = 'published' + elif job_status == 'FAILED': + status_data['status'] = 'failed' + status_data['failure_reason'] = resp['jobs'][0].get('statusReason', '') + else: + status_data['status'] = 'publishing' + # Update S3 + s3_client.put_object(Bucket=bucket, Key=f'custom-definitions/{name}/status.json', + Body=json.dumps(status_data).encode(), ContentType='application/json') + return cors_response(200, json.dumps(status_data)) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + elif op in ('save_job', 'list_jobs', 'delete_job', 'update_job'): + return _handle_jobs_ops(op, body) + + elif op == 'get_file': + try: + bucket = body.get('bucket', '') + key = body.get('key', '') + # Support shorthand for custom definitions + def_name = body.get('definition_name', '') + if def_name and not bucket: + account = boto3.client('sts').get_caller_identity()['Account'] + bucket = f"atx-source-code-{account}" + # Normalize name to lowercase-hyphenated (matches how create agent stores it) + normalized = def_name.lower().replace(' ', '-') + key = f"custom-definitions/{normalized}/transformation_definition.md" + if not bucket or not key: + return cors_response(400, json.dumps({'error': 'Missing bucket/key or definition_name'})) + obj = s3_client.get_object(Bucket=bucket, Key=key) + content = obj['Body'].read().decode('utf-8', errors='replace') + if len(content) > 512000: + content = content[:512000] + '\n\n... [truncated, file too large for preview]' + return cors_response(200, json.dumps({ + 'bucket': bucket, 'key': key, + 'content': content, 'size': obj['ContentLength'], + 'content_type': obj.get('ContentType', 'text/plain'), + })) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + elif op == 'download_url': + try: + bucket = body.get('bucket', '') + key = body.get('key', '') + if not bucket or not key: + return cors_response(400, json.dumps({'error': 'Missing bucket or key'})) + url = s3_client.generate_presigned_url('get_object', + Params={'Bucket': bucket, 'Key': key}, ExpiresIn=3600) + return cors_response(200, json.dumps({'url': url})) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + elif op == 'download_all': + try: + import tempfile, os as _os + bucket = body.get('bucket', '') + prefix = body.get('prefix', '') + if not bucket or not prefix: + return cors_response(400, json.dumps({'error': 'Missing bucket or prefix'})) + + download_id = body.get('download_id', '') + # Check if this is a poll for an existing download + if download_id: + try: + result_obj = s3_client.get_object(Bucket=bucket, Key=f'downloads/{download_id}.json') + result = json.loads(result_obj['Body'].read().decode('utf-8')) + return cors_response(200, json.dumps(result)) + except s3_client.exceptions.NoSuchKey: + return cors_response(200, json.dumps({'status': 'PROCESSING'})) + + # Start async download + import uuid + download_id = str(uuid.uuid4())[:8] + + # Check if a ZIP already exists for this prefix + zip_key = f"downloads/{prefix.rstrip('/').split('/')[-1]}.zip" + try: + s3_client.head_object(Bucket=bucket, Key=zip_key) + # ZIP exists, return presigned URL immediately + url = s3_client.generate_presigned_url('get_object', + Params={'Bucket': bucket, 'Key': zip_key}, ExpiresIn=3600) + return cors_response(200, json.dumps({'status': 'COMPLETED', 'url': url, 'cached': True})) + except s3_client.exceptions.ClientError: + pass # ZIP doesn't exist, create it + + # Write pending marker + s3_client.put_object(Bucket=bucket, Key=f'downloads/{download_id}.json', + Body=json.dumps({'status': 'PROCESSING'}).encode(), ContentType='application/json') + + # Fire async self-invoke + lambda_client = boto3.client('lambda', region_name=REGION) + lambda_client.invoke( + FunctionName=os.environ.get('AWS_LAMBDA_FUNCTION_NAME', 'atx-async-invoke-agent'), + InvocationType='Event', + Payload=json.dumps({ + '_async_download': True, + 'download_id': download_id, + 'bucket': bucket, + 'prefix': prefix, + }) + ) + return cors_response(200, json.dumps({'status': 'STARTED', 'download_id': download_id})) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + except Exception as e: + return cors_response(500, json.dumps({'error': str(e)})) + + return cors_response(400, json.dumps({'error': f'Unknown op: {op}. Use status, results, list_custom, check_publish, save_job, list_jobs, or delete_job'})) + + +def _handle_jobs_ops(op, body): + """DynamoDB job tracking operations.""" + import time as _time + from decimal import Decimal + + class DecimalEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, Decimal): + return int(o) if o == int(o) else float(o) + return super().default(o) + + if op == 'save_job': + job = body.get('job', {}) + if not job.get('id'): + return cors_response(400, json.dumps({'error': 'Missing job.id'})) + # Add TTL (30 days from now) + job['ttl'] = int(_time.time()) + 30 * 86400 + _jobs_table().put_item(Item=job) + return cors_response(200, json.dumps({'status': 'saved', 'id': job['id']})) + + elif op == 'list_jobs': + result = _jobs_table().scan() + jobs = sorted(result.get('Items', []), key=lambda j: j.get('submittedAt', ''), reverse=True) + return cors_response(200, json.dumps({'jobs': jobs}, cls=DecimalEncoder)) + + elif op == 'delete_job': + job_id = body.get('job_id', '') + if not job_id: + return cors_response(400, json.dumps({'error': 'Missing job_id'})) + _jobs_table().delete_item(Key={'id': job_id}) + return cors_response(200, json.dumps({'status': 'deleted', 'id': job_id})) + + elif op == 'update_job': + job_id = body.get('job_id', '') + updates = body.get('updates', {}) + if not job_id or not updates: + return cors_response(400, json.dumps({'error': 'Missing job_id or updates'})) + expr_parts = [] + expr_values = {} + expr_names = {} + for k, v in updates.items(): + safe_key = f'#k_{k}' + val_key = f':v_{k}' + expr_parts.append(f'{safe_key} = {val_key}') + expr_values[val_key] = v + expr_names[safe_key] = k + _jobs_table().update_item( + Key={'id': job_id}, + UpdateExpression='SET ' + ', '.join(expr_parts), + ExpressionAttributeValues=expr_values, + ExpressionAttributeNames=expr_names, + ) + return cors_response(200, json.dumps({'status': 'updated', 'id': job_id})) + + return cors_response(400, json.dumps({'error': f'Unknown jobs op: {op}'})) + + +def _execute_agentcore(request_id, prompt): + """Called asynchronously - no timeout pressure.""" + logger.info(f"Executing AgentCore for request {request_id}") + + def _update_step(step): + """Write intermediate step to S3 so the UI can show progress.""" + try: + s3_client.put_object( + Bucket=RESULT_BUCKET, + Key=f'{RESULT_PREFIX}{request_id}.json', + Body=json.dumps({'status': 'PROCESSING', 'step': step}).encode(), + ContentType='application/json' + ) + except Exception: + pass # Don't fail the main flow for a status update + + try: + _update_step('Sending request to orchestrator...') + + payload = json.dumps({'prompt': prompt}).encode() + session_id = str(uuid.uuid4()) + + response = agentcore_client.invoke_agent_runtime( + agentRuntimeArn=AGENT_RUNTIME_ARN, + runtimeSessionId=session_id, + payload=payload + ) + + _update_step('Orchestrator is processing...') + + content_type = response.get('contentType', '') + chunks = [] + chunk_count = 0 + if 'text/event-stream' in content_type: + for line in response['response'].iter_lines(chunk_size=10): + if line: + decoded = line.decode('utf-8') + if decoded.startswith('data: '): + chunk_text = decoded[6:] + chunks.append(chunk_text) + chunk_count += 1 + # Detect orchestrator steps from streaming chunks + _detect_and_update_step(request_id, chunk_text, chunk_count) + else: + for chunk in response.get('response', []): + if isinstance(chunk, bytes): + chunk_text = chunk.decode('utf-8') + chunks.append(chunk_text) + chunk_count += 1 + _detect_and_update_step(request_id, chunk_text, chunk_count) + + result_text = ''.join(chunks) + try: + result = json.loads(result_text) + except json.JSONDecodeError: + result = {'result': result_text} + + s3_client.put_object( + Bucket=RESULT_BUCKET, + Key=f'{RESULT_PREFIX}{request_id}.json', + Body=json.dumps({'status': 'COMPLETED', 'result': result}).encode(), + ContentType='application/json' + ) + logger.info(f"Request {request_id} completed successfully") + + except Exception as e: + logger.error(f"Request {request_id} failed: {e}") + s3_client.put_object( + Bucket=RESULT_BUCKET, + Key=f'{RESULT_PREFIX}{request_id}.json', + Body=json.dumps({'status': 'FAILED', 'error': str(e)}).encode(), + ContentType='application/json' + ) + + +def _detect_and_update_step(request_id, chunk_text, chunk_count): + """Parse streaming chunks to detect orchestrator steps and update S3.""" + lower = chunk_text.lower() + step = None + + if 'find_transform_agent' in lower or 'searching' in lower or 'finding' in lower: + step = 'Finding best transformation...' + elif 'create_transform_agent' in lower or 'generating' in lower or 'creating' in lower: + step = 'Creating custom transformation...' + elif 'publish' in lower: + step = 'Publishing transformation to registry...' + elif 'execute_transform_agent' in lower or 'executing' in lower or 'submit' in lower: + step = 'Executing transformation...' + + if step: + try: + s3_client.put_object( + Bucket=RESULT_BUCKET, + Key=f'{RESULT_PREFIX}{request_id}.json', + Body=json.dumps({'status': 'PROCESSING', 'step': step}).encode(), + ContentType='application/json' + ) + except Exception: + pass + + +def _execute_download(download_id, bucket, prefix): + """Called asynchronously to create ZIP of all result files.""" + import zipfile, tempfile, os as _os + logger.info(f"Starting download {download_id}: s3://{bucket}/{prefix}") + try: + zip_file = tempfile.NamedTemporaryFile(suffix='.zip', dir='/tmp', delete=False) # nosec B108 + zip_path = zip_file.name + zip_file.close() + paginator = s3_client.get_paginator('list_objects_v2') + file_count = 0 + with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zf: + for page in paginator.paginate(Bucket=bucket, Prefix=prefix): + for obj in page.get('Contents', []): + key = obj['Key'] + name = key[len(prefix):] + if not name or '/.git/' in key or name.startswith('.git/'): + continue + file_obj = s3_client.get_object(Bucket=bucket, Key=key) + zf.writestr(name, file_obj['Body'].read()) + file_count += 1 + + zip_key = f"downloads/{prefix.rstrip('/').split('/')[-1]}.zip" + zip_size = _os.path.getsize(zip_path) + with open(zip_path, 'rb') as f: + s3_client.put_object(Bucket=bucket, Key=zip_key, Body=f, ContentType='application/zip') + _os.remove(zip_path) + + url = s3_client.generate_presigned_url('get_object', + Params={'Bucket': bucket, 'Key': zip_key}, ExpiresIn=3600) + + s3_client.put_object(Bucket=bucket, Key=f'downloads/{download_id}.json', + Body=json.dumps({'status': 'COMPLETED', 'url': url, 'file_count': file_count, 'zip_size': zip_size}).encode(), + ContentType='application/json') + logger.info(f"Download {download_id} complete: {file_count} files, {zip_size} bytes") + except Exception as e: + logger.error(f"Download {download_id} failed: {e}") + s3_client.put_object(Bucket=bucket, Key=f'downloads/{download_id}.json', + Body=json.dumps({'status': 'FAILED', 'error': str(e)}).encode(), + ContentType='application/json') + + +def context_function_name(): + return os.environ.get('AWS_LAMBDA_FUNCTION_NAME') + + +def cors_response(status_code, body): + return { + 'statusCode': status_code, + 'headers': { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'POST, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type', + }, + 'body': body + } diff --git a/agentic-atx-platform/cdk/.gitignore b/agentic-atx-platform/cdk/.gitignore new file mode 100644 index 0000000..3a671bb --- /dev/null +++ b/agentic-atx-platform/cdk/.gitignore @@ -0,0 +1,9 @@ +*.js +!jest.config.js +*.d.ts +node_modules + +# CDK asset staging directory +.cdk.staging +cdk.out +cdk.context.json diff --git a/agentic-atx-platform/cdk/README.md b/agentic-atx-platform/cdk/README.md new file mode 100644 index 0000000..59c4f9a --- /dev/null +++ b/agentic-atx-platform/cdk/README.md @@ -0,0 +1,59 @@ +# CDK Infrastructure + +AWS CDK stacks for the ATX Transform Platform. + +## Stacks + +| Stack | Resources | +|-------|-----------| +| `AtxContainerStack` | ECR repository + Docker image build | +| `AtxInfrastructureStack` | VPC, S3 buckets, AWS Batch (Fargate), IAM roles, CloudWatch | +| `AtxAgentCoreStack` | AgentCore Runtime + async Lambda + HTTP API (experimental) | +| `AtxUiStack` | S3 bucket + CloudFront distribution for UI hosting | + +## Deploy (Option B: CDK-Only) + +```bash +cd cdk +npm install + +# Build UI first (AtxUiStack deploys from ui/dist/) +cd ../ui && npm install && npx vite build && cd ../cdk + +# Bootstrap CDK (first time only) +cdk bootstrap + +# Deploy all stacks +npx tsc +CDK_DEFAULT_ACCOUNT=$(aws sts get-caller-identity --query Account --output text) \ + cdk deploy --all --require-approval never +``` + +Or use the deploy script: +```bash +./deploy.sh +``` + +After deployment, rebuild the UI with the API endpoint and upload: +```bash +API_URL=$(aws cloudformation describe-stacks --stack-name AtxAgentCoreStack \ + --query 'Stacks[0].Outputs[?OutputKey==`ApiEndpoint`].OutputValue' --output text) +cd ../ui +VITE_API_ENDPOINT=$API_URL npx vite build +./deploy-aws.sh +``` + +## Deploy (Option A: CLI-Based) + +For Option A, only `AtxContainerStack` and `AtxInfrastructureStack` are used: +```bash +cd deployment +./1-build-and-push.sh +./2-deploy-infrastructure.sh +``` + +## Destroy + +```bash +./destroy.sh +``` diff --git a/agentic-atx-platform/cdk/bin/cdk.ts b/agentic-atx-platform/cdk/bin/cdk.ts new file mode 100644 index 0000000..d43aeef --- /dev/null +++ b/agentic-atx-platform/cdk/bin/cdk.ts @@ -0,0 +1,85 @@ +#!/usr/bin/env node +import * as cdk from 'aws-cdk-lib'; +// import { AwsSolutionsChecks } from 'cdk-nag'; +// import { Aspects } from 'aws-cdk-lib'; +import { AwsSolutionsChecks } from 'cdk-nag'; +import { Aspects } from 'aws-cdk-lib'; +import { ContainerStack } from '../lib/container-stack'; +import { InfrastructureStack } from '../lib/infrastructure-stack'; +import { AgentCoreStack } from '../lib/agentcore-stack'; +import { UiStack } from '../lib/ui-stack'; + +const app = new cdk.App(); + +const ecrRepoName = app.node.tryGetContext('ecrRepoName') || 'atx-custom-ecr'; +const awsRegion = app.node.tryGetContext('awsRegion') || 'us-east-1'; +const fargateVcpu = app.node.tryGetContext('fargateVcpu') || 2; +const fargateMemory = app.node.tryGetContext('fargateMemory') || 4096; +const jobTimeout = app.node.tryGetContext('jobTimeout') || 43200; +const maxVcpus = app.node.tryGetContext('maxVcpus') || 256; + +const existingOutputBucket = app.node.tryGetContext('existingOutputBucket') || ''; +const existingSourceBucket = app.node.tryGetContext('existingSourceBucket') || ''; +const existingVpcId = app.node.tryGetContext('existingVpcId') || ''; +const existingSubnetIds = (() => { + const raw = app.node.tryGetContext('existingSubnetIds'); + if (!raw) return []; + if (Array.isArray(raw)) return raw; + if (typeof raw === 'string') { + try { return JSON.parse(raw); } catch { return raw.split(','); } + } + return []; +})(); +const existingSecurityGroupId = app.node.tryGetContext('existingSecurityGroupId') || ''; + +const env = { + account: process.env.CDK_DEFAULT_ACCOUNT || process.env.AWS_ACCOUNT_ID, + region: awsRegion, +}; + +// Stack 1: Container (ECR + Docker Image) +const containerStack = new ContainerStack(app, 'AtxContainerStack', { + env, + ecrRepoName, + description: 'AWS Transform CLI - Container and ECR Repository', +}); + +// Stack 2: Infrastructure (Batch, S3, IAM, CloudWatch) +const infrastructureStack = new InfrastructureStack(app, 'AtxInfrastructureStack', { + env, + imageUri: containerStack.imageUri, + fargateVcpu, + fargateMemory, + jobTimeout, + maxVcpus, + existingOutputBucket, + existingSourceBucket, + existingVpcId, + existingSubnetIds, + existingSecurityGroupId, + description: 'AWS Transform CLI - Batch Infrastructure', +}); +infrastructureStack.addDependency(containerStack); + +// Stack 3: AgentCore + API (Orchestrator, Lambda, HTTP API) +// ⚠️ EXPERIMENTAL: Uses @aws-cdk/aws-bedrock-agentcore-alpha +const agentCoreStack = new AgentCoreStack(app, 'AtxAgentCoreStack', { + env, + outputBucketName: infrastructureStack.outputBucket.bucketName, + sourceBucketName: infrastructureStack.sourceBucket.bucketName, + jobsTableName: infrastructureStack.jobsTable.tableName, + description: 'AWS Transform CLI - AgentCore Orchestrator + API (Experimental)', +}); +agentCoreStack.addDependency(infrastructureStack); + +// Stack 4: UI (S3 Static Site + CloudFront) +// No dependency on AgentCore — works with both SAM (Option A) and CDK (Option B) +const uiStack = new UiStack(app, 'AtxUiStack', { + env, + description: 'AWS Transform CLI - UI (S3 + CloudFront)', +}); + +// Apply cdk-nag to stable stacks (uncomment for security audits) +// Aspects.of(containerStack).add(new AwsSolutionsChecks({ verbose: true })); +// Aspects.of(infrastructureStack).add(new AwsSolutionsChecks({ verbose: true })); +// Aspects.of(uiStack).add(new AwsSolutionsChecks({ verbose: true })); diff --git a/agentic-atx-platform/cdk/cdk.json b/agentic-atx-platform/cdk/cdk.json new file mode 100644 index 0000000..cf17df2 --- /dev/null +++ b/agentic-atx-platform/cdk/cdk.json @@ -0,0 +1,99 @@ +{ + "app": "node bin/cdk.js", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "**/*.d.ts", + "**/*.js", + "tsconfig.json", + "package*.json", + "yarn.lock", + "node_modules", + "test" + ] + }, + "context": { + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ], + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, + "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, + "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, + "@aws-cdk/core:enablePartitionLiterals": true, + "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, + "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, + "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, + "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, + "@aws-cdk/aws-route53-patters:useCertificate": true, + "@aws-cdk/customresources:installLatestAwsSdkDefault": false, + "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, + "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, + "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, + "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, + "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, + "@aws-cdk/aws-redshift:columnId": true, + "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true, + "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true, + "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true, + "@aws-cdk/aws-kms:aliasNameRef": true, + "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true, + "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, + "@aws-cdk/aws-efs:denyAnonymousAccess": true, + "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true, + "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, + "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true, + "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true, + "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true, + "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true, + "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true, + "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, + "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true, + "@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true, + "@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true, + "@aws-cdk/aws-eks:nodegroupNameAttribute": true, + "@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true, + "@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true, + "@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false, + "@aws-cdk/aws-s3:keepNotificationInImportedBucket": false, + "@aws-cdk/aws-ecs:enableImdsBlockingDeprecatedFeature": false, + "@aws-cdk/aws-ecs:disableEcsImdsBlocking": true, + "@aws-cdk/aws-ecs:reduceEc2FargateCloudWatchPermissions": true, + "@aws-cdk/aws-dynamodb:resourcePolicyPerReplica": true, + "@aws-cdk/aws-ec2:ec2SumTImeoutEnabled": true, + "@aws-cdk/aws-appsync:appSyncGraphQLAPIScopeLambdaPermission": true, + "@aws-cdk/aws-rds:setCorrectValueForDatabaseInstanceReadReplicaInstanceResourceId": true, + "@aws-cdk/core:cfnIncludeRejectComplexResourceUpdateCreatePolicyIntrinsics": true, + "@aws-cdk/aws-lambda-nodejs:sdkV3ExcludeSmithyPackages": true, + "@aws-cdk/aws-stepfunctions-tasks:fixRunEcsTaskPolicy": true, + "@aws-cdk/aws-ec2:bastionHostUseAmazonLinux2023ByDefault": true, + "@aws-cdk/aws-route53-targets:userPoolDomainNameMethodWithoutCustomResource": true, + "@aws-cdk/aws-elasticloadbalancingV2:albDualstackWithoutPublicIpv4SecurityGroupRulesDefault": true, + "@aws-cdk/aws-iam:oidcRejectUnauthorizedConnections": true, + "@aws-cdk/core:enableAdditionalMetadataCollection": true, + "@aws-cdk/aws-lambda:createNewPoliciesWithAddToRolePolicy": true, + "ecrRepoName": "atx-custom-ecr", + "awsRegion": "us-east-1", + "fargateVcpu": 2, + "fargateMemory": 4096, + "jobTimeout": 43200, + "maxVcpus": 256, + "existingOutputBucket": "", + "existingSourceBucket": "", + "existingVpcId": "", + "existingSubnetIds": [], + "existingSecurityGroupId": "" + } +} diff --git a/agentic-atx-platform/cdk/deploy.sh b/agentic-atx-platform/cdk/deploy.sh new file mode 100755 index 0000000..ed69dc5 --- /dev/null +++ b/agentic-atx-platform/cdk/deploy.sh @@ -0,0 +1,74 @@ +#!/bin/bash +set -e + +echo "==========================================" +echo "AWS Transform CLI - CDK Deployment" +echo "==========================================" +echo "" + +cd "$(dirname "$0")" + +# Check if AWS CLI is configured +if ! aws sts get-caller-identity &>/dev/null; then + echo "❌ AWS CLI is not configured" + echo " Run: aws configure" + exit 1 +fi + +# Get AWS account and region +AWS_ACCOUNT=$(aws sts get-caller-identity --query Account --output text) +AWS_REGION=$(aws configure get region || echo "us-east-1") + +export CDK_DEFAULT_ACCOUNT=$AWS_ACCOUNT +export CDK_DEFAULT_REGION=$AWS_REGION + +echo "✓ AWS Account: $AWS_ACCOUNT" +echo "✓ AWS Region: $AWS_REGION" +echo "" + +# Install dependencies +echo "Installing dependencies..." +npm install +echo "" + +# Login to ECR Public (required for Docker base image pull) +echo "Authenticating with ECR Public..." +aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws 2>/dev/null || echo "ECR Public login skipped (may not be needed)" +echo "" + +# Build TypeScript +echo "Building CDK project..." +npm run build +echo "" + +# Bootstrap CDK (if not already done) +echo "Checking CDK bootstrap..." +if ! aws cloudformation describe-stacks --stack-name CDKToolkit --region $AWS_REGION &>/dev/null; then + echo "Bootstrapping CDK..." + cdk bootstrap aws://$AWS_ACCOUNT/$AWS_REGION +else + echo "✓ CDK already bootstrapped" +fi +echo "" + +# Deploy all stacks +echo "Deploying stacks..." +echo " 1. AtxContainerStack (ECR + Docker Image)" +echo " 2. AtxInfrastructureStack (Batch, S3, IAM)" +echo " 3. AtxAgentCoreStack (AgentCore + Lambda + HTTP API) [Experimental]" +echo " 4. AtxUiStack (S3 + CloudFront)" +echo "" + +# Use global cdk CLI (not npx) to avoid version conflicts with alpha packages +CDK_DEFAULT_ACCOUNT=$AWS_ACCOUNT CDK_DEFAULT_REGION=$AWS_REGION cdk deploy --all --require-approval never + +echo "" +echo "==========================================" +echo "✅ Deployment Complete!" +echo "==========================================" +echo "" +echo "Next steps:" +echo ' 1. Get API endpoint:' +echo ' aws cloudformation describe-stacks --stack-name AtxAgentCoreStack --query "Stacks[0].Outputs[?OutputKey=='"'"'ApiEndpoint'"'"'].OutputValue" --output text' +echo ' 2. Build and deploy UI with the API endpoint (see README.md Step 5)' +echo "" diff --git a/agentic-atx-platform/cdk/destroy.sh b/agentic-atx-platform/cdk/destroy.sh new file mode 100644 index 0000000..250a577 --- /dev/null +++ b/agentic-atx-platform/cdk/destroy.sh @@ -0,0 +1,39 @@ +#!/bin/bash +set -e + +echo "==========================================" +echo "AWS Transform CLI - CDK Cleanup" +echo "==========================================" +echo "" + +cd "$(dirname "$0")" + +echo "⚠️ This will delete all deployed resources:" +echo " - Lambda functions and API Gateway" +echo " - Batch compute environment, job queue, job definition" +echo " - S3 buckets (if empty)" +echo " - IAM roles" +echo " - CloudWatch log groups" +echo " - ECR repository" +echo "" +read -p "Are you sure? (yes/no): " confirm + +if [ "$confirm" != "yes" ]; then + echo "Cancelled." + exit 0 +fi + +echo "" +echo "Destroying stacks..." +npx cdk destroy --all --force + +echo "" +echo "==========================================" +echo "✅ Cleanup Complete!" +echo "==========================================" +echo "" +echo "Note: S3 buckets with data are retained by default." +echo "To delete them manually:" +echo " aws s3 rb s3://atx-custom-output-ACCOUNT_ID --force" +echo " aws s3 rb s3://atx-source-code-ACCOUNT_ID --force" +echo "" diff --git a/agentic-atx-platform/cdk/lib/agentcore-stack.ts b/agentic-atx-platform/cdk/lib/agentcore-stack.ts new file mode 100644 index 0000000..2ca0752 --- /dev/null +++ b/agentic-atx-platform/cdk/lib/agentcore-stack.ts @@ -0,0 +1,258 @@ +/** + * AgentCore + API Stack + * + * Deploys the orchestrator to Bedrock AgentCore, the async Lambda bridge, + * and the HTTP API Gateway. Single stack for the entire agent layer. + * + * ⚠️ EXPERIMENTAL: Uses @aws-cdk/aws-bedrock-agentcore-alpha which is + * under active development and subject to breaking changes. + */ + +import * as cdk from 'aws-cdk-lib'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import * as iam from 'aws-cdk-lib/aws-iam'; +import * as apigwv2 from 'aws-cdk-lib/aws-apigatewayv2'; +import * as integrations from 'aws-cdk-lib/aws-apigatewayv2-integrations'; +import { Runtime, AgentRuntimeArtifact } from '@aws-cdk/aws-bedrock-agentcore-alpha'; +import { NagSuppressions } from 'cdk-nag'; +import { Construct } from 'constructs'; +import * as path from 'path'; + +export interface AgentCoreStackProps extends cdk.StackProps { + outputBucketName: string; + sourceBucketName: string; + jobsTableName: string; +} + +export class AgentCoreStack extends cdk.Stack { + public readonly agentRuntime: Runtime; + public readonly apiEndpoint: string; + + constructor(scope: Construct, id: string, props: AgentCoreStackProps) { + super(scope, id, props); + + // ======================================== + // 1. AgentCore Orchestrator Runtime + // ======================================== + + // IAM role for the AgentCore runtime + const agentRole = new iam.Role(this, 'AgentCoreRole', { + assumedBy: new iam.ServicePrincipal('bedrock-agentcore.amazonaws.com'), + description: 'Execution role for ATX Transform orchestrator on AgentCore', + }); + + // Bedrock model access + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: ['bedrock:InvokeModel', 'bedrock:InvokeModelWithResponseStream'], + resources: ['*'], + })); + + // Batch access (for execute_transform_agent) + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: ['batch:SubmitJob', 'batch:DescribeJobs'], + resources: ['*'], + })); + + // S3 access (for find/create/execute tools) + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: ['s3:ListBucket', 's3:GetObject', 's3:PutObject'], + resources: [ + `arn:aws:s3:::${props.outputBucketName}`, + `arn:aws:s3:::${props.outputBucketName}/*`, + `arn:aws:s3:::${props.sourceBucketName}`, + `arn:aws:s3:::${props.sourceBucketName}/*`, + ], + })); + + // STS for account ID lookups + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: ['sts:GetCallerIdentity'], + resources: ['*'], + })); + + // AgentCore Memory access + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: [ + 'bedrock-agentcore:ListMemories', 'bedrock-agentcore:GetMemory', + 'bedrock-agentcore:CreateMemory', 'bedrock-agentcore:UpdateMemory', + 'bedrock-agentcore:DeleteMemory', 'bedrock-agentcore:ListEvents', + 'bedrock-agentcore:GetEvent', 'bedrock-agentcore:CreateEvent', + 'bedrock-agentcore:DeleteEvent', + ], + resources: ['*'], + })); + + // Observability: X-Ray tracing + CloudWatch Application Signals + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: [ + 'xray:PutTraceSegments', 'xray:PutTelemetryRecords', + 'xray:GetSamplingRules', 'xray:GetSamplingTargets', + ], + resources: ['*'], + })); + + agentRole.addToPolicy(new iam.PolicyStatement({ + actions: [ + 'logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents', + 'logs:PutDeliverySource', 'logs:PutDeliveryDestination', + 'logs:CreateDelivery', + ], + resources: [ + `arn:aws:logs:${this.region}:${this.account}:log-group:/aws/bedrock-agentcore/*`, + `arn:aws:logs:${this.region}:${this.account}:log-group:aws/spans:*`, + ], + })); + + // Deploy orchestrator from local directory + const artifact = AgentRuntimeArtifact.fromAsset( + path.join(__dirname, '../../orchestrator') + ); + + this.agentRuntime = new Runtime(this, 'OrchestratorRuntime', { + runtimeName: 'atxTransformOrchestrator', + executionRole: agentRole, + agentRuntimeArtifact: artifact, + environmentVariables: { + AWS_REGION: this.region, + }, + }); + + // Suppress cdk-nag for AgentCore role + NagSuppressions.addResourceSuppressions(agentRole, [ + { id: 'AwsSolutions-IAM5', reason: 'Bedrock model and Batch resources require wildcard. S3 is scoped to atx-* buckets.' }, + ], true); + + // ======================================== + // 2. Async Lambda Bridge + // ======================================== + + const asyncLambdaRole = new iam.Role(this, 'AsyncLambdaRole', { + assumedBy: new iam.ServicePrincipal('lambda.amazonaws.com'), + managedPolicies: [ + iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AWSLambdaBasicExecutionRole'), + ], + }); + + // AgentCore invoke permission + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['bedrock-agentcore:InvokeAgentRuntime'], + resources: [this.agentRuntime.agentRuntimeArn + '*'], + })); + + // S3 for orchestrator results + custom definitions + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['s3:GetObject', 's3:PutObject', 's3:ListBucket'], + resources: [ + `arn:aws:s3:::${props.outputBucketName}`, + `arn:aws:s3:::${props.outputBucketName}/*`, + `arn:aws:s3:::${props.sourceBucketName}`, + `arn:aws:s3:::${props.sourceBucketName}/*`, + ], + })); + + // Batch describe for direct status/results + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['batch:DescribeJobs'], + resources: ['*'], + })); + + // STS for account ID + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['sts:GetCallerIdentity'], + resources: ['*'], + })); + + // DynamoDB for job tracking + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['dynamodb:GetItem', 'dynamodb:PutItem', 'dynamodb:UpdateItem', 'dynamodb:DeleteItem', 'dynamodb:Scan'], + resources: [`arn:aws:dynamodb:${this.region}:${this.account}:table/${props.jobsTableName}`], + })); + + // X-Ray tracing + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['xray:PutTraceSegments', 'xray:PutTelemetryRecords'], + resources: ['*'], + })); + + const asyncLambda = new lambda.Function(this, 'AsyncInvokeAgent', { + functionName: 'atx-async-invoke-agent', + runtime: lambda.Runtime.PYTHON_3_11, + handler: 'async_invoke_agent.lambda_handler', + code: lambda.Code.fromAsset(path.join(__dirname, '../../api/lambda')), + timeout: cdk.Duration.minutes(15), + memorySize: 1024, + role: asyncLambdaRole, + tracing: lambda.Tracing.ACTIVE, + environment: { + AGENT_RUNTIME_ARN: this.agentRuntime.agentRuntimeArn, + RESULT_BUCKET: props.outputBucketName, + JOBS_TABLE: props.jobsTableName, + }, + }); + + // Self-invoke permission (for async fire-and-forget) + // Use wildcard to avoid circular dependency with HTTP API integration + asyncLambdaRole.addToPolicy(new iam.PolicyStatement({ + actions: ['lambda:InvokeFunction'], + resources: [`arn:aws:lambda:${this.region}:${this.account}:function:atx-async-invoke-agent`], + })); + + NagSuppressions.addResourceSuppressions(asyncLambdaRole, [ + { id: 'AwsSolutions-IAM4', reason: 'AWSLambdaBasicExecutionRole is standard for Lambda CloudWatch access.' }, + { id: 'AwsSolutions-IAM5', reason: 'Batch DescribeJobs requires wildcard. S3 scoped to atx-* buckets.' }, + ], true); + + NagSuppressions.addResourceSuppressions(asyncLambda, [ + { id: 'AwsSolutions-L1', reason: 'Python 3.11 is stable and supported until Oct 2027.' }, + ], true); + + // ======================================== + // 3. HTTP API Gateway + // ======================================== + + const httpApi = new apigwv2.HttpApi(this, 'HttpApi', { + apiName: 'atx-ui-api', + corsPreflight: { + allowOrigins: ['*'], + allowMethods: [apigwv2.CorsHttpMethod.POST], + allowHeaders: ['content-type'], + maxAge: cdk.Duration.days(1), + }, + }); + + httpApi.addRoutes({ + path: '/orchestrate', + methods: [apigwv2.HttpMethod.POST], + integration: new integrations.HttpLambdaIntegration('OrchestrateIntegration', asyncLambda), + }); + + new apigwv2.HttpStage(this, 'ProdStage', { + httpApi, + stageName: 'prod', + autoDeploy: true, + }); + + this.apiEndpoint = `${httpApi.apiEndpoint}/prod`; + + // ======================================== + // Outputs + // ======================================== + + new cdk.CfnOutput(this, 'AgentRuntimeArn', { + value: this.agentRuntime.agentRuntimeArn, + description: 'AgentCore Runtime ARN', + exportName: 'AtxAgentRuntimeArn', + }); + + new cdk.CfnOutput(this, 'ApiEndpoint', { + value: this.apiEndpoint, + description: 'HTTP API endpoint for UI', + exportName: 'AtxApiEndpoint', + }); + + new cdk.CfnOutput(this, 'AsyncLambdaArn', { + value: asyncLambda.functionArn, + description: 'Async invoke Lambda ARN', + }); + } +} diff --git a/agentic-atx-platform/cdk/lib/container-stack.ts b/agentic-atx-platform/cdk/lib/container-stack.ts new file mode 100644 index 0000000..7973df1 --- /dev/null +++ b/agentic-atx-platform/cdk/lib/container-stack.ts @@ -0,0 +1,56 @@ +import * as cdk from 'aws-cdk-lib'; +import * as ecr from 'aws-cdk-lib/aws-ecr'; +import * as ecrAssets from 'aws-cdk-lib/aws-ecr-assets'; +import { Construct } from 'constructs'; +import * as path from 'path'; + +export interface ContainerStackProps extends cdk.StackProps { + ecrRepoName: string; +} + +export class ContainerStack extends cdk.Stack { + public readonly repository: ecr.IRepository; + public readonly imageUri: string; + + constructor(scope: Construct, id: string, props: ContainerStackProps) { + super(scope, id, props); + + // Create ECR repository + this.repository = new ecr.Repository(this, 'Repository', { + repositoryName: props.ecrRepoName, + removalPolicy: cdk.RemovalPolicy.RETAIN, // Keep images on stack deletion + imageScanOnPush: true, + lifecycleRules: [ + { + description: 'Keep last 10 images', + maxImageCount: 10, + }, + ], + }); + + // Build and push Docker image from Dockerfile + const dockerImage = new ecrAssets.DockerImageAsset(this, 'DockerImage', { + directory: path.join(__dirname, '../../container'), + platform: ecrAssets.Platform.LINUX_AMD64, + }); + + this.imageUri = dockerImage.imageUri; + + new cdk.CfnOutput(this, 'ImageUri', { + value: this.imageUri, + description: 'Container image URI', + exportName: 'AtxContainerImageUri', + }); + + new cdk.CfnOutput(this, 'RepositoryUri', { + value: this.repository.repositoryUri, + description: 'ECR repository URI', + exportName: 'AtxEcrRepositoryUri', + }); + + new cdk.CfnOutput(this, 'RepositoryName', { + value: this.repository.repositoryName, + description: 'ECR repository name', + }); + } +} diff --git a/agentic-atx-platform/cdk/lib/infrastructure-stack.ts b/agentic-atx-platform/cdk/lib/infrastructure-stack.ts new file mode 100644 index 0000000..b5acb38 --- /dev/null +++ b/agentic-atx-platform/cdk/lib/infrastructure-stack.ts @@ -0,0 +1,360 @@ +import * as cdk from 'aws-cdk-lib'; +import * as s3 from 'aws-cdk-lib/aws-s3'; +import * as iam from 'aws-cdk-lib/aws-iam'; +import * as ec2 from 'aws-cdk-lib/aws-ec2'; +import * as batch from 'aws-cdk-lib/aws-batch'; +import * as logs from 'aws-cdk-lib/aws-logs'; +import * as cloudwatch from 'aws-cdk-lib/aws-cloudwatch'; +import * as dynamodb from 'aws-cdk-lib/aws-dynamodb'; +import { NagSuppressions } from 'cdk-nag'; +import { Construct } from 'constructs'; + +export interface InfrastructureStackProps extends cdk.StackProps { + imageUri: string; + fargateVcpu: number; + fargateMemory: number; + jobTimeout: number; + maxVcpus: number; + existingOutputBucket?: string; + existingSourceBucket?: string; + existingVpcId?: string; + existingSubnetIds?: string[]; + existingSecurityGroupId?: string; +} + +export class InfrastructureStack extends cdk.Stack { + public readonly outputBucket: s3.IBucket; + public readonly sourceBucket: s3.IBucket; + public readonly jobQueue: batch.CfnJobQueue; + public readonly jobDefinition: batch.CfnJobDefinition; + public readonly logGroup: logs.LogGroup; + public readonly jobsTable: dynamodb.Table; + + constructor(scope: Construct, id: string, props: InfrastructureStackProps) { + super(scope, id, props); + + const accountId = cdk.Stack.of(this).account; + + // S3 Buckets - Use existing or create new + + // Create log bucket for S3 access logs (only if creating new buckets) + let logBucket: s3.IBucket | undefined; + if (!props.existingOutputBucket || !props.existingSourceBucket) { + logBucket = new s3.Bucket(this, 'LogBucket', { + bucketName: `atx-logs-${accountId}`, + encryption: s3.BucketEncryption.S3_MANAGED, + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + removalPolicy: cdk.RemovalPolicy.RETAIN, + enforceSSL: true, + }); + } + + if (props.existingOutputBucket) { + this.outputBucket = s3.Bucket.fromBucketName(this, 'OutputBucket', props.existingOutputBucket); + } else { + this.outputBucket = new s3.Bucket(this, 'OutputBucket', { + bucketName: `atx-custom-output-${accountId}`, + versioned: true, + encryption: s3.BucketEncryption.S3_MANAGED, + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + removalPolicy: cdk.RemovalPolicy.RETAIN, + enforceSSL: true, + serverAccessLogsBucket: logBucket, + serverAccessLogsPrefix: 'output-bucket/', + }); + } + + if (props.existingSourceBucket) { + this.sourceBucket = s3.Bucket.fromBucketName(this, 'SourceBucket', props.existingSourceBucket); + } else { + this.sourceBucket = new s3.Bucket(this, 'SourceBucket', { + bucketName: `atx-source-code-${accountId}`, + encryption: s3.BucketEncryption.S3_MANAGED, + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + lifecycleRules: [ + { + expiration: cdk.Duration.days(7), + prefix: 'uploads/', + }, + ], + removalPolicy: cdk.RemovalPolicy.RETAIN, + enforceSSL: true, + serverAccessLogsBucket: logBucket, + serverAccessLogsPrefix: 'source-bucket/', + }); + } + + // CloudWatch Log Group + this.logGroup = new logs.LogGroup(this, 'LogGroup', { + logGroupName: '/aws/batch/atx-transform', + retention: logs.RetentionDays.ONE_MONTH, + removalPolicy: cdk.RemovalPolicy.RETAIN, + }); + + // DynamoDB table for job tracking + this.jobsTable = new dynamodb.Table(this, 'JobsTable', { + tableName: 'atx-transform-jobs', + partitionKey: { name: 'id', type: dynamodb.AttributeType.STRING }, + billingMode: dynamodb.BillingMode.PAY_PER_REQUEST, + encryption: dynamodb.TableEncryption.AWS_MANAGED, + pointInTimeRecovery: true, + removalPolicy: cdk.RemovalPolicy.DESTROY, + timeToLiveAttribute: 'ttl', + }); + + // IAM Role for Batch Job + const jobRole = new iam.Role(this, 'BatchJobRole', { + roleName: 'ATXBatchJobRole', + assumedBy: new iam.ServicePrincipal('ecs-tasks.amazonaws.com'), + managedPolicies: [ + iam.ManagedPolicy.fromAwsManagedPolicyName('AWSTransformCustomFullAccess'), + ], + }); + + // Grant S3 access to job role + this.outputBucket.grantReadWrite(jobRole); + this.sourceBucket.grantReadWrite(jobRole); + + // Suppress cdk-nag findings for job role + NagSuppressions.addResourceSuppressions(jobRole, [ + { + id: 'AwsSolutions-IAM4', + reason: 'AWSTransformCustomFullAccess is required for AWS Transform API access. This is an AWS-managed policy specifically designed for this service.', + appliesTo: ['Policy::arn::iam::aws:policy/AWSTransformCustomFullAccess'], + }, + { + id: 'AwsSolutions-IAM5', + reason: 'S3 wildcard permissions are required for dynamic file operations. Jobs write results to unique paths (transformations/{jobName}/{conversationId}/).', + appliesTo: [ + 'Action::s3:Abort*', + 'Action::s3:DeleteObject*', + 'Action::s3:GetBucket*', + 'Action::s3:GetObject*', + 'Action::s3:List*', + 'Resource::/*', + 'Resource::/*', + ], + }, + ], true); + + // IAM Role for Batch Execution + const executionRole = new iam.Role(this, 'BatchExecutionRole', { + roleName: 'ATXBatchExecutionRole', + assumedBy: new iam.ServicePrincipal('ecs-tasks.amazonaws.com'), + managedPolicies: [ + iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AmazonECSTaskExecutionRolePolicy'), + ], + }); + + // Suppress cdk-nag findings for execution role + NagSuppressions.addResourceSuppressions(executionRole, [ + { + id: 'AwsSolutions-IAM4', + reason: 'AmazonECSTaskExecutionRolePolicy is the standard AWS-managed policy for ECS task execution. It provides necessary permissions for ECR, CloudWatch Logs, and Secrets Manager.', + appliesTo: ['Policy::arn::iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy'], + }, + ], true); + + // Get VPC - Use existing or default + let vpc: ec2.IVpc; + if (props.existingVpcId) { + // Use fromVpcAttributes to avoid lookup + const subnetIds = props.existingSubnetIds && props.existingSubnetIds.length > 0 + ? props.existingSubnetIds + : []; + + vpc = ec2.Vpc.fromVpcAttributes(this, 'Vpc', { + vpcId: props.existingVpcId, + availabilityZones: ['us-east-1a', 'us-east-1b'], // Dummy values, not used + publicSubnetIds: subnetIds.length > 0 ? subnetIds : undefined, + }); + } else { + // Lookup default VPC + vpc = ec2.Vpc.fromLookup(this, 'DefaultVpc', { isDefault: true }); + } + + // Security Group - Use existing or create new + let securityGroup: ec2.ISecurityGroup; + if (props.existingSecurityGroupId) { + securityGroup = ec2.SecurityGroup.fromSecurityGroupId(this, 'SecurityGroup', props.existingSecurityGroupId); + } else { + securityGroup = new ec2.SecurityGroup(this, 'BatchSecurityGroup', { + vpc, + description: 'Security group for AWS Transform Batch jobs', + allowAllOutbound: true, + }); + } + + // Get subnets - Use existing or VPC public subnets + const subnetIds = props.existingSubnetIds && props.existingSubnetIds.length > 0 + ? props.existingSubnetIds + : vpc.publicSubnets.map(subnet => subnet.subnetId); + + // Batch Compute Environment + const computeEnvironment = new batch.CfnComputeEnvironment(this, 'ComputeEnvironment', { + computeEnvironmentName: 'atx-fargate-compute', + type: 'MANAGED', + state: 'ENABLED', + computeResources: { + type: 'FARGATE', + maxvCpus: props.maxVcpus, + subnets: subnetIds, + securityGroupIds: [securityGroup.securityGroupId], + }, + }); + + // Batch Job Queue + this.jobQueue = new batch.CfnJobQueue(this, 'JobQueue', { + jobQueueName: 'atx-job-queue', + state: 'ENABLED', + priority: 1, + computeEnvironmentOrder: [ + { + order: 1, + computeEnvironment: computeEnvironment.attrComputeEnvironmentArn, + }, + ], + }); + + this.jobQueue.addDependency(computeEnvironment); + + // Batch Job Definition + this.jobDefinition = new batch.CfnJobDefinition(this, 'JobDefinition', { + jobDefinitionName: 'atx-transform-job', + type: 'container', + platformCapabilities: ['FARGATE'], + timeout: { + attemptDurationSeconds: props.jobTimeout, + }, + retryStrategy: { + attempts: 3, + }, + containerProperties: { + image: props.imageUri, + jobRoleArn: jobRole.roleArn, + executionRoleArn: executionRole.roleArn, + resourceRequirements: [ + { type: 'VCPU', value: props.fargateVcpu.toString() }, + { type: 'MEMORY', value: props.fargateMemory.toString() }, + ], + logConfiguration: { + logDriver: 'awslogs', + options: { + 'awslogs-group': this.logGroup.logGroupName, + 'awslogs-region': this.region, + 'awslogs-stream-prefix': 'atx', + }, + }, + networkConfiguration: { + assignPublicIp: 'ENABLED', + }, + environment: [ + { name: 'S3_BUCKET', value: this.outputBucket.bucketName }, + { name: 'SOURCE_BUCKET', value: this.sourceBucket.bucketName }, + { name: 'AWS_DEFAULT_REGION', value: this.region }, + ], + }, + }); + + // CloudWatch Dashboard with all widgets + const dashboard = new cloudwatch.Dashboard(this, 'Dashboard', { + dashboardName: 'ATX-Transform-CLI-Dashboard', + }); + + // Row 1: Job Completion Rate (Log Insights) + dashboard.addWidgets( + new cloudwatch.LogQueryWidget({ + title: '📊 Job Completion Rate (Hourly)', + logGroupNames: [this.logGroup.logGroupName], + queryLines: [ + 'filter @message like /Results uploaded successfully/ or @message like /Command failed after/', + 'stats sum(@message like /Results uploaded successfully/) as Completed, sum(@message like /Command failed after/) as Failed by bin(1h)', + ], + width: 24, + height: 6, + }) + ); + + // Row 2: Recent Jobs (Log Insights) + dashboard.addWidgets( + new cloudwatch.LogQueryWidget({ + title: '📋 Recent Jobs (Job Name, Time, Last Message, Log Stream)', + logGroupNames: [this.logGroup.logGroupName], + queryLines: [ + "parse @message 'Output: transformations/*/' as jobName", + 'stats latest(jobName) as job, latest(@timestamp) as lastActivity, latest(@message) as lastMessage by @logStream', + 'sort lastActivity desc', + 'limit 25', + ], + width: 24, + height: 8, + }) + ); + + // Row 3: Lambda and Batch Metrics + dashboard.addWidgets( + new cloudwatch.GraphWidget({ + title: '⚡ Lambda Invocations', + left: [ + new cloudwatch.Metric({ + namespace: 'AWS/Lambda', + metricName: 'Invocations', + dimensionsMap: { FunctionName: 'atx-async-invoke-agent' }, + statistic: 'Sum', + }), + ], + width: 12, + height: 6, + }), + new cloudwatch.GraphWidget({ + title: '⚡ Lambda Duration (ms)', + left: [ + new cloudwatch.Metric({ + namespace: 'AWS/Lambda', + metricName: 'Duration', + dimensionsMap: { FunctionName: 'atx-async-invoke-agent' }, + statistic: 'Average', + }), + ], + width: 12, + height: 6, + }) + ); + + // Outputs + new cdk.CfnOutput(this, 'OutputBucketName', { + value: this.outputBucket.bucketName, + description: 'S3 bucket for transformation outputs', + exportName: 'AtxOutputBucketName', + }); + + new cdk.CfnOutput(this, 'SourceBucketName', { + value: this.sourceBucket.bucketName, + description: 'S3 bucket for source code uploads', + exportName: 'AtxSourceBucketName', + }); + + new cdk.CfnOutput(this, 'JobQueueArn', { + value: this.jobQueue.attrJobQueueArn, + description: 'Batch job queue ARN', + exportName: 'AtxJobQueueArn', + }); + + new cdk.CfnOutput(this, 'JobDefinitionArn', { + value: this.jobDefinition.ref, + description: 'Batch job definition ARN', + exportName: 'AtxJobDefinitionArn', + }); + + new cdk.CfnOutput(this, 'LogGroupName', { + value: this.logGroup.logGroupName, + description: 'CloudWatch log group name', + }); + + new cdk.CfnOutput(this, 'JobsTableName', { + value: this.jobsTable.tableName, + description: 'DynamoDB table for job tracking', + exportName: 'AtxJobsTableName', + }); + } +} diff --git a/agentic-atx-platform/cdk/lib/ui-stack.ts b/agentic-atx-platform/cdk/lib/ui-stack.ts new file mode 100644 index 0000000..a020014 --- /dev/null +++ b/agentic-atx-platform/cdk/lib/ui-stack.ts @@ -0,0 +1,141 @@ +import * as cdk from 'aws-cdk-lib'; +import * as s3 from 'aws-cdk-lib/aws-s3'; +import * as s3deploy from 'aws-cdk-lib/aws-s3-deployment'; +import * as cloudfront from 'aws-cdk-lib/aws-cloudfront'; +import * as origins from 'aws-cdk-lib/aws-cloudfront-origins'; +import * as iam from 'aws-cdk-lib/aws-iam'; +import { NagSuppressions } from 'cdk-nag'; +import { Construct } from 'constructs'; +import * as path from 'path'; + +export interface UiStackProps extends cdk.StackProps { + apiEndpoint?: string; +} + +export class UiStack extends cdk.Stack { + public readonly distribution: cloudfront.Distribution; + public readonly websiteBucket: s3.Bucket; + + constructor(scope: Construct, id: string, props: UiStackProps) { + super(scope, id, props); + + // S3 bucket for static website hosting + this.websiteBucket = new s3.Bucket(this, 'WebsiteBucket', { + bucketName: `atx-transform-ui-${this.account}`, + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + encryption: s3.BucketEncryption.S3_MANAGED, + removalPolicy: cdk.RemovalPolicy.DESTROY, + autoDeleteObjects: true, + enforceSSL: true, + }); + + // CloudFront Origin Access Identity + const oai = new cloudfront.OriginAccessIdentity(this, 'OAI', { + comment: 'ATX Transform UI OAI', + }); + + this.websiteBucket.addToResourcePolicy(new iam.PolicyStatement({ + actions: ['s3:GetObject'], + resources: [this.websiteBucket.arnForObjects('*')], + principals: [oai.grantPrincipal], + })); + + // CloudFront access logs bucket + const logBucket = new s3.Bucket(this, 'CloudFrontLogBucket', { + bucketName: `atx-transform-ui-logs-${this.account}`, + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + encryption: s3.BucketEncryption.S3_MANAGED, + removalPolicy: cdk.RemovalPolicy.DESTROY, + autoDeleteObjects: true, + enforceSSL: true, + objectOwnership: s3.ObjectOwnership.OBJECT_WRITER, + lifecycleRules: [{ expiration: cdk.Duration.days(30) }], + }); + + // CloudFront distribution + this.distribution = new cloudfront.Distribution(this, 'Distribution', { + comment: 'ATX Transform UI', + defaultBehavior: { + origin: new origins.S3Origin(this.websiteBucket, { + originAccessIdentity: oai, + }), + viewerProtocolPolicy: cloudfront.ViewerProtocolPolicy.REDIRECT_TO_HTTPS, + cachePolicy: cloudfront.CachePolicy.CACHING_OPTIMIZED, + allowedMethods: cloudfront.AllowedMethods.ALLOW_GET_HEAD, + }, + defaultRootObject: 'index.html', + errorResponses: [ + { + httpStatus: 403, + responseHttpStatus: 200, + responsePagePath: '/index.html', + ttl: cdk.Duration.minutes(5), + }, + { + httpStatus: 404, + responseHttpStatus: 200, + responsePagePath: '/index.html', + ttl: cdk.Duration.minutes(5), + }, + ], + enableLogging: true, + logBucket: logBucket, + logFilePrefix: 'cloudfront/', + minimumProtocolVersion: cloudfront.SecurityPolicyProtocol.TLS_V1_2_2021, + }); + + // Deploy built UI assets to S3 + new s3deploy.BucketDeployment(this, 'DeployWebsite', { + sources: [s3deploy.Source.asset(path.join(__dirname, '../../ui/dist'))], + destinationBucket: this.websiteBucket, + distribution: this.distribution, + distributionPaths: ['/*'], + }); + + // cdk-nag suppressions + NagSuppressions.addResourceSuppressions(this.websiteBucket, [ + { id: 'AwsSolutions-S1', reason: 'Website bucket access is logged via CloudFront access logs, not S3 access logs.' }, + ], true); + + NagSuppressions.addResourceSuppressions(logBucket, [ + { id: 'AwsSolutions-S1', reason: 'This IS the log bucket. Enabling access logs on it would create an infinite loop.' }, + ], true); + + NagSuppressions.addResourceSuppressions(this.distribution, [ + { id: 'AwsSolutions-CFR1', reason: 'Geo restrictions not required for internal tool.' }, + { id: 'AwsSolutions-CFR2', reason: 'WAF not required for static website serving internal tool.' }, + { id: 'AwsSolutions-CFR4', reason: 'Using TLS 1.2 minimum protocol version which is secure.' }, + { id: 'AwsSolutions-CFR7', reason: 'Using OAI for S3 origin access. OAC migration planned for future release.' }, + ], true); + + // Suppress cdk-nag for CDK-managed BucketDeployment construct (not our code) + NagSuppressions.addStackSuppressions(this, [ + { id: 'AwsSolutions-IAM4', reason: 'BucketDeployment Lambda uses AWSLambdaBasicExecutionRole (CDK-managed construct).' }, + { id: 'AwsSolutions-IAM5', reason: 'BucketDeployment Lambda requires S3 wildcard permissions for sync (CDK-managed construct).' }, + { id: 'AwsSolutions-L1', reason: 'BucketDeployment Lambda runtime is managed by CDK, not user-configurable.' }, + ]); + + // Outputs + new cdk.CfnOutput(this, 'DistributionDomainName', { + value: this.distribution.distributionDomainName, + description: 'CloudFront distribution domain name', + exportName: 'AtxUiDomain', + }); + + new cdk.CfnOutput(this, 'DistributionId', { + value: this.distribution.distributionId, + description: 'CloudFront distribution ID', + }); + + new cdk.CfnOutput(this, 'WebsiteBucketName', { + value: this.websiteBucket.bucketName, + description: 'S3 bucket for UI assets', + }); + + new cdk.CfnOutput(this, 'WebsiteUrl', { + value: `https://${this.distribution.distributionDomainName}`, + description: 'Website URL', + exportName: 'AtxUiUrl', + }); + } +} diff --git a/agentic-atx-platform/cdk/package-lock.json b/agentic-atx-platform/cdk/package-lock.json new file mode 100644 index 0000000..147d659 --- /dev/null +++ b/agentic-atx-platform/cdk/package-lock.json @@ -0,0 +1,4518 @@ +{ + "name": "cdk", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "cdk", + "version": "0.1.0", + "dependencies": { + "@aws-cdk/aws-bedrock-agentcore-alpha": "^2.241.0-alpha.0", + "aws-cdk-lib": "^2.234.0", + "constructs": "^10.0.0" + }, + "bin": { + "cdk": "bin/cdk.js" + }, + "devDependencies": { + "@types/jest": "^29.5.14", + "@types/node": "22.7.9", + "aws-cdk": "^2.1100.3", + "cdk-nag": "^2.37.55", + "jest": "^29.7.0", + "ts-jest": "^29.2.5", + "ts-node": "^10.9.2", + "typescript": "~5.6.3" + } + }, + "node_modules/@aws-cdk/asset-awscli-v1": { + "version": "2.2.273", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.273.tgz", + "integrity": "sha512-X57HYUtHt9BQrlrzUNcMyRsDUCoakYNnY6qh5lNwRCHPtQoTfXmuISkfLk0AjLkcbS5lw1LLTQFiQhTDXfiTvg==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/asset-node-proxy-agent-v6": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.1.tgz", + "integrity": "sha512-We4bmHaowOPHr+IQR4/FyTGjRfjgBj4ICMjtqmJeBDWad3Q/6St12NT07leNtyuukv2qMhtSZJQorD8KpKTwRA==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/aws-bedrock-agentcore-alpha": { + "version": "2.241.0-alpha.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-bedrock-agentcore-alpha/-/aws-bedrock-agentcore-alpha-2.241.0-alpha.0.tgz", + "integrity": "sha512-boxrygJjP39Q6kLIb4TKt82vJS/nSDdSGdznFoCBHEfY9kNzDV74tqmtTNRskAJHAypOkwHS4Z7CxfSORQU4sw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "@aws-cdk/aws-bedrock-alpha": "2.241.0-alpha.0", + "aws-cdk-lib": "^2.241.0", + "constructs": "^10.5.0" + } + }, + "node_modules/@aws-cdk/aws-bedrock-alpha": { + "version": "2.241.0-alpha.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-bedrock-alpha/-/aws-bedrock-alpha-2.241.0-alpha.0.tgz", + "integrity": "sha512-fhbP0NBS5mSntxSKdYn79Lbrf9K+5fmb5fQ9A2DrqXNWLpGfQBRGS3DdrIby8Wk3IKmno5dqPiI6bDciTKNmTA==", + "license": "Apache-2.0", + "peer": true, + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "aws-cdk-lib": "^2.241.0", + "constructs": "^10.5.0" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema": { + "version": "53.13.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-53.13.0.tgz", + "integrity": "sha512-LgRc8Sl1VzuhhPWmJ4hpajBe8Y8coA3KbpAmej7X4nPvWO/x4nUoZSysUKCx2YldLAAYlzwc0mkDHmc/YMZ6vg==", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "~1.4.1", + "semver": "^7.7.4" + }, + "engines": { + "node": ">= 18.0.0" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { + "version": "7.7.4", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", + "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.4", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.5" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", + "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.5", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", + "integrity": "sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/node": { + "version": "22.7.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.9.tgz", + "integrity": "sha512-jrTfRC7FM6nChvU7X2KqcrgquofrWLFDeYC1hKfwNWomVvrn7JIksqf344WN2X/y8xrgqBd2dJATZV4GbatBfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.35", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz", + "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/aws-cdk": { + "version": "2.1100.3", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1100.3.tgz", + "integrity": "sha512-jeSamF+IwPJKhqMir7Cw+2IoeHsmNFc/SoDAlOS9BYM8Wrd0Q1jJd3GcJOFzsMcWv9mcBAP5o23amyKHu03dXA==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "cdk": "bin/cdk" + }, + "engines": { + "node": ">= 18.0.0" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/aws-cdk-lib": { + "version": "2.248.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.248.0.tgz", + "integrity": "sha512-PGQycx/OdyX+t0o6QUFI1KJAOLoyIVj2WwrN0syrwCi8lYxW2KzldZsW0X+/UN/ALNQwcjSr927ImTpuDOh+bg==", + "bundleDependencies": [ + "@balena/dockerignore", + "@aws-cdk/cloud-assembly-api", + "case", + "fs-extra", + "ignore", + "jsonschema", + "minimatch", + "punycode", + "semver", + "table", + "yaml", + "mime-types" + ], + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/asset-awscli-v1": "2.2.273", + "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.1", + "@aws-cdk/cloud-assembly-api": "^2.2.0", + "@aws-cdk/cloud-assembly-schema": "^53.0.0", + "@balena/dockerignore": "^1.0.2", + "case": "1.6.3", + "fs-extra": "^11.3.3", + "ignore": "^5.3.2", + "jsonschema": "^1.5.0", + "mime-types": "^2.1.35", + "minimatch": "^10.2.3", + "punycode": "^2.3.1", + "semver": "^7.7.4", + "table": "^6.9.0", + "yaml": "1.10.3" + }, + "engines": { + "node": ">= 20.0.0" + }, + "peerDependencies": { + "constructs": "^10.5.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-api": { + "version": "2.2.0", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "~1.4.1", + "semver": "^7.7.4" + }, + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "@aws-cdk/cloud-assembly-schema": ">=53.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-api/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-api/node_modules/semver": { + "version": "7.7.4", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/aws-cdk-lib/node_modules/@balena/dockerignore": { + "version": "1.0.2", + "inBundle": true, + "license": "Apache-2.0" + }, + "node_modules/aws-cdk-lib/node_modules/ajv": { + "version": "8.18.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/aws-cdk-lib/node_modules/ansi-regex": { + "version": "5.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/ansi-styles": { + "version": "4.3.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/aws-cdk-lib/node_modules/astral-regex": { + "version": "2.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/balanced-match": { + "version": "4.0.4", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/aws-cdk-lib/node_modules/brace-expansion": { + "version": "5.0.5", + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/aws-cdk-lib/node_modules/case": { + "version": "1.6.3", + "inBundle": true, + "license": "(MIT OR GPL-3.0-or-later)", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/color-convert": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/color-name": { + "version": "1.1.4", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/emoji-regex": { + "version": "8.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/fast-deep-equal": { + "version": "3.1.3", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/fast-uri": { + "version": "3.1.0", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "inBundle": true, + "license": "BSD-3-Clause" + }, + "node_modules/aws-cdk-lib/node_modules/fs-extra": { + "version": "11.3.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/aws-cdk-lib/node_modules/graceful-fs": { + "version": "4.2.11", + "inBundle": true, + "license": "ISC" + }, + "node_modules/aws-cdk-lib/node_modules/ignore": { + "version": "5.3.2", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/aws-cdk-lib/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/json-schema-traverse": { + "version": "1.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/jsonfile": { + "version": "6.2.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/jsonschema": { + "version": "1.5.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/aws-cdk-lib/node_modules/lodash.truncate": { + "version": "4.4.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/mime-db": { + "version": "1.52.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/mime-types": { + "version": "2.1.35", + "inBundle": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/minimatch": { + "version": "10.2.5", + "inBundle": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.5" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/aws-cdk-lib/node_modules/punycode": { + "version": "2.3.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/aws-cdk-lib/node_modules/require-from-string": { + "version": "2.0.2", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/semver": { + "version": "7.7.4", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/aws-cdk-lib/node_modules/slice-ansi": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/aws-cdk-lib/node_modules/string-width": { + "version": "4.2.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/strip-ansi": { + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/table": { + "version": "6.9.0", + "inBundle": true, + "license": "BSD-3-Clause", + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/universalify": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/yaml": { + "version": "1.10.3", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.2.0.tgz", + "integrity": "sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.11", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz", + "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001762", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001762.tgz", + "integrity": "sha512-PxZwGNvH7Ak8WX5iXzoK1KPZttBXNPuaOvI2ZYU7NrlM+d9Ov+TUvlLOBNGzVXAntMSMMlJPd+jY6ovrVjSmUw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/cdk-nag": { + "version": "2.37.55", + "resolved": "https://registry.npmjs.org/cdk-nag/-/cdk-nag-2.37.55.tgz", + "integrity": "sha512-xcAkygwbph3pp7N0UEzJBmXUH/MIsluV7DYJSeZ/V3yCr0Y0QaRGO298WyD6mi4K+Rmnpl+EJoWUxcOblOqLKA==", + "dev": true, + "license": "Apache-2.0", + "peerDependencies": { + "aws-cdk-lib": "^2.176.0", + "constructs": "^10.0.5" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.3.tgz", + "integrity": "sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/constructs": { + "version": "10.5.1", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.5.1.tgz", + "integrity": "sha512-f/TfFXiS3G/yVIXDjOQn9oTlyu9Wo7Fxyjj7lb8r92iO81jR2uST+9MstxZTmDGx/CgIbxCXkFXgupnLTNxQZg==", + "license": "Apache-2.0" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.1.tgz", + "integrity": "sha512-9JmrhGZpOlEgOLdQgSm0zxFaYoQon408V1v49aqTWuXENVlnCuY9JBZcXZiCsZQWDjTm5Qf/nIvAy77mXDAjEg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.4.tgz", + "integrity": "sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/handlebars": { + "version": "4.7.9", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.9.tgz", + "integrity": "sha512-4E71E0rpOaQuJR2A3xDZ+GM1HyWYv1clR58tC8emQNeQe3RH7MAzSbat+V0wG78LQBo6m6bzSG/L4pBuCsgnUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-jest": { + "version": "29.4.6", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.6.tgz", + "integrity": "sha512-fSpWtOO/1AjSNQguk43hb/JCo16oJDnMJf3CdEGNkqsEX3t0KX96xvyX1D7PfLCpVoKu4MfVrqUkFyblYoY4lA==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "fast-json-stable-stringify": "^2.1.0", + "handlebars": "^4.7.8", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.3", + "type-fest": "^4.41.0", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jest-util": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", + "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/agentic-atx-platform/cdk/package.json b/agentic-atx-platform/cdk/package.json new file mode 100644 index 0000000..9dac2fc --- /dev/null +++ b/agentic-atx-platform/cdk/package.json @@ -0,0 +1,33 @@ +{ + "name": "cdk", + "version": "0.1.0", + "bin": { + "cdk": "bin/cdk.js" + }, + "scripts": { + "build": "tsc", + "watch": "tsc -w", + "test": "jest", + "cdk": "cdk" + }, + "devDependencies": { + "@types/jest": "^29.5.14", + "@types/node": "22.7.9", + "aws-cdk": "^2.1100.3", + "cdk-nag": "^2.37.55", + "jest": "^29.7.0", + "ts-jest": "^29.2.5", + "ts-node": "^10.9.2", + "typescript": "~5.6.3" + }, + "dependencies": { + "@aws-cdk/aws-bedrock-agentcore-alpha": "^2.241.0-alpha.0", + "aws-cdk-lib": "^2.234.0", + "constructs": "^10.0.0" + }, + "overrides": { + "case": { + "license": "MIT" + } + } +} diff --git a/agentic-atx-platform/cdk/tsconfig.json b/agentic-atx-platform/cdk/tsconfig.json new file mode 100644 index 0000000..0060d95 --- /dev/null +++ b/agentic-atx-platform/cdk/tsconfig.json @@ -0,0 +1,32 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "lib": [ + "es2020", + "dom" + ], + "declaration": true, + "strict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "noImplicitThis": true, + "alwaysStrict": true, + "noUnusedLocals": false, + "noUnusedParameters": false, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": false, + "inlineSourceMap": true, + "inlineSources": true, + "experimentalDecorators": true, + "strictPropertyInitialization": false, + "typeRoots": [ + "./node_modules/@types" + ], + "skipLibCheck": true + }, + "exclude": [ + "node_modules", + "cdk.out" + ] +} diff --git a/agentic-atx-platform/container/.dockerignore b/agentic-atx-platform/container/.dockerignore new file mode 100644 index 0000000..6d97faa --- /dev/null +++ b/agentic-atx-platform/container/.dockerignore @@ -0,0 +1,37 @@ +# Git +.git +.gitignore +.gitattributes + +# Documentation +README.md +*.md + +# IDE +.vscode +.idea +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + +# Credentials and secrets +*.pem +*.key +*.crt +*.p12 +*.pfx +.env +.env.* +credentials +secrets + +# Build artifacts +*.log +*.tmp +.cache + +# Only include what's needed: Dockerfile, entrypoint.sh, download-source.sh, upload-results.sh, helpers/ diff --git a/agentic-atx-platform/container/Dockerfile b/agentic-atx-platform/container/Dockerfile new file mode 100644 index 0000000..88d0e2c --- /dev/null +++ b/agentic-atx-platform/container/Dockerfile @@ -0,0 +1,167 @@ +# AWS Transform CLI Container - All Languages +# Production-ready container with Java, Python, Node.js, and all common tools + +# Amazon Linux 2023 base image (required for public ECR compliance) +FROM public.ecr.aws/amazonlinux/amazonlinux:2023 + +ENV TZ=UTC + +# Install base dependencies from dnf +RUN dnf install -y \ + git \ + tar \ + gzip \ + unzip \ + jq \ + ca-certificates \ + gcc \ + gcc-c++ \ + make \ + patch \ + openssl-devel \ + bzip2-devel \ + libffi-devel \ + zlib-devel \ + readline-devel \ + sqlite-devel \ + xz-devel \ + && dnf clean all + +# Install AWS CLI v2 +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ + unzip awscliv2.zip && \ + ./aws/install && \ + rm -rf aws awscliv2.zip + +# Create non-root user +RUN useradd -m -u 1000 -s /bin/bash atxuser + +# Create working directories +RUN mkdir -p /source /output /app /usr/local/bin && \ + chown -R atxuser:atxuser /source /output /app + +# ============================================================================ +# JAVA - Amazon Corretto (8, 11, 17, 21) + Maven + Gradle +# ============================================================================ +# Install Amazon Corretto versions from dnf +RUN dnf install -y \ + java-1.8.0-amazon-corretto-devel \ + java-11-amazon-corretto-devel \ + java-17-amazon-corretto-devel \ + java-21-amazon-corretto-devel \ + maven \ + && dnf clean all + +# Set default Java version to Corretto 17 +ENV JAVA_HOME=/usr/lib/jvm/java-17-amazon-corretto.x86_64 +ENV MAVEN_HOME=/usr/share/maven +ENV PATH="$JAVA_HOME/bin:$MAVEN_HOME/bin:$PATH" + +# ============================================================================ +# GRADLE - via SDKMAN +# ============================================================================ +USER atxuser +ENV SDKMAN_DIR=/home/atxuser/.sdkman + +# Install SDKMAN +RUN curl -s "https://get.sdkman.io" | bash + +# Install Gradle +RUN bash -c "source $SDKMAN_DIR/bin/sdkman-init.sh && sdk install gradle 8.5" + +ENV PATH="$SDKMAN_DIR/candidates/gradle/current/bin:$PATH" + +USER root + +# ============================================================================ +# PYTHON - dnf (3.11, 3.12, 3.13) + pyenv (3.8, 3.9, 3.10) +# ============================================================================ +USER root +RUN dnf install -y python3.11 python3.11-pip python3.12 python3.13 && dnf clean all + +# Set Python 3.11 as default python3 (required for MCP servers) +RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \ + update-alternatives --set python3 /usr/bin/python3.11 + +# Install older versions via pyenv (3.8, 3.9, 3.10) - built from source +USER atxuser +ENV PYENV_ROOT=/home/atxuser/.pyenv +ENV PATH="$PYENV_ROOT/bin:$PATH" + +RUN git clone --depth 1 --branch v2.6.20 https://github.com/pyenv/pyenv.git $PYENV_ROOT && \ + eval "$(pyenv init -)" && \ + MAKE_OPTS="-j$(nproc)" pyenv install 3.8.18 && \ + MAKE_OPTS="-j$(nproc)" pyenv install 3.9.18 && \ + MAKE_OPTS="-j$(nproc)" pyenv install 3.10.13 + +# Install Python packages +COPY --chown=atxuser:atxuser requirements.txt /tmp/ +RUN python3.11 -m pip install --user --no-cache-dir -r /tmp/requirements.txt + +ENV PATH="/home/atxuser/.local/bin:$PATH" + +# ============================================================================ +# NODE.JS - Multiple versions via nvm (16, 18, 20, 22, 24) +# ============================================================================ +ENV NVM_DIR=/home/atxuser/.nvm + +# Install nvm +RUN git clone https://github.com/nvm-sh/nvm.git $NVM_DIR && \ + cd $NVM_DIR && git checkout v0.39.7 + +# Install Node.js versions and create symlink +RUN . $NVM_DIR/nvm.sh && \ + nvm install 16 && \ + nvm install 18 && \ + nvm install 20 && \ + nvm install 22 && \ + nvm install 24 && \ + nvm alias default 20 && \ + nvm use default && \ + cd $NVM_DIR/versions/node && \ + ln -sf $(ls -d v20.* | head -1) default + +# Add default Node.js to PATH (uses symlink) +ENV PATH="$NVM_DIR/versions/node/default/bin:$PATH" + +# Install npm packages from package.json +COPY --chown=atxuser:atxuser package.json /tmp/ +RUN . $NVM_DIR/nvm.sh && cd /tmp && npm install -g $(node -pe "Object.keys(require('./package.json').devDependencies).join(' ')") + +USER root + +# ============================================================================ +# CONTAINER SCRIPTS +# ============================================================================ +COPY --chown=atxuser:atxuser entrypoint.sh /app/ +COPY --chown=atxuser:atxuser download-source.sh /app/ +COPY --chown=atxuser:atxuser upload-results.sh /app/ + +RUN chmod +x /app/*.sh + +# ============================================================================ +# AWS TRANSFORM CLI +# ============================================================================ +USER atxuser + +# Install AWS Transform CLI (only curl-based install allowed for ATX CLI) +RUN curl -fsSL https://desktop-release.transform.us-east-1.api.aws/install.sh | bash + +# Add ATX to PATH +ENV PATH="/home/atxuser/.local/bin:$PATH" + +WORKDIR /source + +# Health check - verify ATX CLI binary exists +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD test -f /home/atxuser/.local/bin/atx || exit 1 + +ENTRYPOINT ["/app/entrypoint.sh"] + +# ============================================================================ +# METADATA +# ============================================================================ +LABEL maintainer="AWS Transform CLI" +LABEL description="Production-ready container with Amazon Corretto Java, Python, Node.js for AWS Transform CLI" +LABEL version="1.0" +LABEL languages="java-8,11,17,21-corretto python-3.8-3.13 nodejs-16,18,20,22,24" diff --git a/agentic-atx-platform/container/README.md b/agentic-atx-platform/container/README.md new file mode 100644 index 0000000..4d57382 --- /dev/null +++ b/agentic-atx-platform/container/README.md @@ -0,0 +1,414 @@ +# Container - All Languages Included + +Production-ready container with Java, Python, Node.js, and all common tools pre-installed. + +## What's Included + +### Java +- **Versions:** Amazon Corretto 8, 11, 17, 21 +- **Build Tools:** Maven 3.8.4, Gradle 8.5 +- **Default:** Java 17 ✓ + +### Python +- **Versions:** Python 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 +- **System Python:** 3.9 (used by AWS CLI) +- **Available:** `python3.11`, `python3.12`, `python3.13` (direct access) +- **Via pyenv:** 3.8, 3.9, 3.10 (use `pyenv shell 3.8.18`) +- **Tools:** pip, virtualenv, uv +- **Packages:** boto3, botocore, requests +- **Note:** Use `python3.11` or higher for MCP servers ✓ + +### Node.js +- **Versions:** Node.js 16, 18, 20, 22, 24 (via nvm) +- **Package Managers:** npm, yarn, pnpm +- **Tools:** TypeScript, ts-node +- **Default:** Node.js 20 ✓ +- **Switch versions:** `nvm use 22` + +### Other Tools +- **AWS CLI v2** (official installer) +- **AWS Transform CLI** +- **Git** +- **Build essentials** (gcc, make, etc.) +- **Base OS:** Amazon Linux 2023 + +--- + +## When to Customize + +**You DON'T need to customize if:** +- ✅ Using public repositories +- ✅ Using standard language versions (Java 17, Python 3.11, Node.js 20) +- ✅ Using public package registries (Maven Central, PyPI, npm) + +**You NEED to customize if:** +- ❌ Accessing private Git repositories +- ❌ Using private artifact registries (Maven, npm, PyPI) +- ❌ Need additional tools or languages +- ❌ Need specific language versions not in defaults + +--- + +## Private Repository Access + +For accessing private Git repositories or artifact registries during transformations: + +### Option 1: AWS Secrets Manager (RECOMMENDED) + +Store credentials in Secrets Manager and fetch at runtime. Credentials never stored in image. + +**1. Create secrets:** +```bash +# GitHub token +aws secretsmanager create-secret \ + --name atx/github-token \ + --secret-string "ghp_your_token_here" + +# npm token +aws secretsmanager create-secret \ + --name atx/npm-token \ + --secret-string "npm_your_token_here" +``` + +**2. Grant IAM role access:** +```bash +# Add to ATXBatchJobRole policy +aws iam put-role-policy --role-name ATXBatchJobRole --policy-name SecretsAccess --policy-document '{ + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Action": "secretsmanager:GetSecretValue", + "Resource": "arn:aws:secretsmanager:*:*:secret:atx/*" + }] +}' +``` + +**3. Uncomment placeholder in `container/entrypoint.sh`:** +```bash +# Find the "PRIVATE REPOSITORY ACCESS" section +# Uncomment the fetch_private_credentials() function and call +``` + +**4. Redeploy container:** + +_If using CDK:_ +```bash +cd cdk && ./deploy.sh +``` + +_If using bash scripts:_ +```bash +cd deployment +./1-build-and-push.sh --rebuild +./2-deploy-infrastructure.sh +``` + +### Option 2: Hardcode in Dockerfile (NOT RECOMMENDED) + +⚠️ **Security Risk**: Tokens permanently stored in image layers + +Only use for testing or if you understand the risks. + +**1. Uncomment placeholder in `container/Dockerfile`:** +```bash +# Find the "PRIVATE REPOSITORY ACCESS" section +# Uncomment the credentials you need (GitHub, npm, Maven, etc.) +``` + +**2. Redeploy container:** + +_If using CDK:_ +```bash +cd cdk && ./deploy.sh +``` + +_If using bash scripts:_ +```bash +cd deployment +./1-build-and-push.sh --rebuild +./2-deploy-infrastructure.sh +``` + +See "Detailed Examples" section below for complete examples. + +--- + +## Redeploying After Customization + +After customizing `Dockerfile` or `entrypoint.sh`: + +**If using CDK:** +```bash +cd cdk && ./deploy.sh +``` + +CDK automatically detects Dockerfile changes and rebuilds. If changes aren't detected, force rebuild: +```bash +cd cdk && ./deploy.sh --force +``` + +**If using bash scripts:** +```bash +cd deployment +./1-build-and-push.sh --rebuild +./2-deploy-infrastructure.sh +``` + +--- + +## Files + +- **Dockerfile** - Complete container definition with all languages +- **entrypoint.sh** - Container entry point with credential management +- **download-source.sh** - Source code download logic +- **upload-results.sh** - S3 upload logic with security exclusions +- **helpers/** - Version switching scripts (optional) + +## Container Arguments + +```bash +docker run aws-transform-cli \ + [--source ] \ + [--output ] \ + --command +``` + +**Arguments:** +- `--source` (optional): Git repo or S3 bucket with source code +- `--output` (optional): S3 path for results (requires S3_BUCKET env var) +- `--command` (required): ATX CLI command to execute + +**Environment Variables:** +- `S3_BUCKET` - S3 bucket name for output (results storage) +- `SOURCE_BUCKET` - S3 bucket name for source code uploads and MCP config (optional) +- `AWS_ACCESS_KEY_ID` - AWS access key (or use IAM role) +- `AWS_SECRET_ACCESS_KEY` - AWS secret key (or use IAM role) +- `AWS_DEFAULT_REGION` - AWS region (default: us-east-1) +- `ATX_SHELL_TIMEOUT` - Timeout in seconds (default: 43200 = 12 hours) + +## Building Locally (Optional) + +```bash +cd container +docker build -t aws-transform-cli . +``` + +**Build time:** ~15-18 minutes (one-time) +**Image size:** ~3.5GB + +## Testing Locally (Optional) + +```bash +# Test with explicit credentials +docker run --rm \ + -e AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY \ + -e AWS_SESSION_TOKEN \ + aws-transform-cli \ + --command "atx --version" + +# Test with source code +docker run --rm \ + -e AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY \ + -e S3_BUCKET=my-bucket \ + aws-transform-cli \ + --source "https://github.com/user/repo.git" \ + --output "results/" \ + --command "atx custom def exec -n MyDefinition -c noop -x -t" +``` + +## Supported Transformations + +This container supports all AWS-managed transformations: + +### Java +- ✅ **AWS/java-aws-sdk-v1-to-v2** - Upgrade AWS SDK from v1 to v2 +- ✅ Java version upgrades (8→11, 11→17, 17→21) +- ✅ Maven and Gradle project support + +### Python +- ✅ **AWS/python-boto2-to-boto3** - Migrate from boto2 to boto3 +- ✅ Python version upgrades (3.8→3.11, 3.11→3.12, etc.) + +### Node.js +- ✅ **AWS/nodejs-aws-sdk-v2-to-v3** - Upgrade AWS SDK from v2 to v3 +- ✅ Node.js version upgrades (16→18, 18→20, 20→22, etc.) +- ✅ TypeScript support + +### Custom +- ✅ Any custom transformation definitions +- ✅ Comprehensive codebase analysis + +## Advanced: Version Switching (Optional) + +**Most users don't need this** - the defaults (Java 17, Python 3.11, Node.js 20) work for 90% of cases. + +If your codebase requires a specific version, set environment variables: + +```bash +aws batch submit-job \ + --container-overrides '{ + "environment": [ + {"name": "JAVA_VERSION", "value": "8"}, # For legacy Java 8 + {"name": "PYTHON_VERSION", "value": "13"}, # For Python 3.13 + {"name": "NODE_VERSION", "value": "22"} # For Node.js 22 + ] + }' +``` + +**Available versions:** +- Java: 8, 11, 17, 21 +- Python: 8, 9, 10, 11, 12, 13 (refers to 3.8, 3.9, etc.) +- Node.js: 16, 18, 20, 22, 24 + +### Helper Commands (For Interactive Use) + +If running the container interactively: + +```bash +# Show all installed versions +show-versions + +# Switch versions +use-java 21 +use-python 13 +use-node 22 +``` + +## Security + +- **Non-root execution:** Container runs as `atxuser` (UID 1000) +- **IAM role credentials:** Automatic retrieval from Batch job role +- **Credential refresh:** Every 30 minutes for long-running jobs +- **S3 security exclusions:** .git, .env, credentials files excluded from uploads +- **Base image:** Amazon Linux 2023 from public.ecr.aws/amazonlinux/amazonlinux:2023 + +## Next Steps + +After customizing the container (if needed): + +1. **Deploy:** See [../deployment/README.md](../deployment/README.md) or [../cdk/README.md](../cdk/README.md) for deployment instructions +2. **Test:** Run `cd ../test && ./test-apis.sh` to validate all endpoints +3. **Monitor:** View logs in CloudWatch Console or use `python3 ../utilities/tail-logs.py ` + +--- + +## Detailed Examples + +**Note:** This section provides detailed examples for reference. For actual implementation, use the commented placeholders in `Dockerfile` and `entrypoint.sh` as described in the "Private Repository Access" section above. + +The examples below show the syntax for various package managers and registries. Simply uncomment the relevant sections in the source files rather than creating separate files. + +### Private Git Repositories + +**Use case:** Clone private repos during transformation + +**Create `Dockerfile.custom`:** +```dockerfile +FROM {account}.dkr.ecr.us-east-1.amazonaws.com/aws-transform-cli:latest + +# Configure Git credentials (use Personal Access Token) +RUN git config --global credential.helper store && \ + echo "https://USERNAME:TOKEN@github.com" > /home/atxuser/.git-credentials && \ + chown atxuser:atxuser /home/atxuser/.git-credentials && \ + chmod 600 /home/atxuser/.git-credentials +``` + +### Private npm Registry + +**Use case:** Install private npm packages during transformation (package.json dependencies) + +**Create `.npmrc`:** +``` +registry=https://npm.company.com/ +//npm.company.com/:_authToken=YOUR_NPM_TOKEN +``` + +**Create `Dockerfile.custom`:** +```dockerfile +FROM {account}.dkr.ecr.us-east-1.amazonaws.com/aws-transform-cli:latest + +# Copy npm config for runtime use +COPY .npmrc /home/atxuser/.npmrc +RUN chown atxuser:atxuser /home/atxuser/.npmrc + +# Optional: Pre-install global packages at build time +RUN npm install -g @company/cli-tool --registry https://npm.company.com/ +``` + +### Private Maven/Gradle Repository + +**Use case:** Download private artifacts during transformation (pom.xml dependencies) + +**Create `settings.xml`:** +```xml + + + + company-repo + USERNAME + PASSWORD + + + + + company-repo + https://artifactory.company.com/maven + * + + + +``` + +**Create `Dockerfile.custom`:** +```dockerfile +FROM {account}.dkr.ecr.us-east-1.amazonaws.com/aws-transform-cli:latest + +# Copy Maven settings for runtime use +COPY settings.xml /home/atxuser/.m2/settings.xml +RUN chown atxuser:atxuser /home/atxuser/.m2/settings.xml +``` + +### Private Python Package Index + +**Use case:** Install private Python packages during transformation (requirements.txt) + +**Create `Dockerfile.custom`:** +```dockerfile +FROM {account}.dkr.ecr.us-east-1.amazonaws.com/aws-transform-cli:latest + +# Configure pip for private index (runtime) +RUN pip config set global.index-url https://pypi.company.com/simple && \ + pip config set global.trusted-host pypi.company.com && \ + pip config set global.extra-index-url https://pypi.org/simple + +# Optional: Pre-install private packages (build time) +RUN pip install company-lib==1.0.0 +``` + +--- + +## Security Best Practices + +1. **Use tokens, not passwords** - GitHub PAT, npm tokens, Maven encrypted passwords +2. **Limit token scope** - Read-only access to specific repos +3. **Rotate credentials** - Rebuild container when tokens change +4. **Use build secrets** - Docker BuildKit secrets for sensitive data during build +5. **Scan images** - Use ECR image scanning for vulnerabilities + +**Example: Using Docker BuildKit Secrets** + +```dockerfile +# syntax=docker/dockerfile:1 +FROM {account}.dkr.ecr.us-east-1.amazonaws.com/aws-transform-cli:latest + +# Use build secret (not baked into image) +RUN --mount=type=secret,id=npm_token \ + echo "//npm.company.com/:_authToken=$(cat /run/secrets/npm_token)" > /home/atxuser/.npmrc +``` + +**Build with secret:** +```bash +docker build --secret id=npm_token,src=.npm_token -f Dockerfile.custom -t custom:latest . +``` diff --git a/agentic-atx-platform/container/download-source.sh b/agentic-atx-platform/container/download-source.sh new file mode 100644 index 0000000..ca881c2 --- /dev/null +++ b/agentic-atx-platform/container/download-source.sh @@ -0,0 +1,90 @@ +#!/bin/bash +set -e + +# Logging function with timestamps +log() { + echo "[$(date -u +"%Y-%m-%dT%H:%M:%SZ")] $1" +} + +SOURCE_URL="$1" + +if [[ -z "$SOURCE_URL" ]]; then + log "No source URL provided, skipping download" + # Create a default working directory for commands that don't need source + mkdir -p /source/workspace + cd /source/workspace + # Initialize empty git repo for ATX CLI compatibility + git init + git config user.email "container@aws-transform.local" + git config user.name "AWS Transform Container" + echo "workspace" > /tmp/repo_name.txt + log "Created workspace directory at /source/workspace" + exit 0 +fi + +# Clear /source directory +rm -rf /source/* + +# Determine if it's a git URL or S3 URL +if [[ "$SOURCE_URL" == s3://* ]]; then + log "Downloading from S3: $SOURCE_URL" + + # Check if it's a ZIP file + if [[ "$SOURCE_URL" == *.zip ]]; then + log "Detected ZIP file, downloading and extracting..." + + # Extract ZIP filename without extension for folder name + ZIP_BASENAME=$(basename "$SOURCE_URL" .zip) + + # Download ZIP file + ZIP_FILE="/tmp/source.zip" + aws s3 cp "$SOURCE_URL" "$ZIP_FILE" --quiet + + # Extract ZIP file to /source + unzip -q "$ZIP_FILE" -d /source/ + rm "$ZIP_FILE" + + # Find the extracted directory (could be nested) + EXTRACTED_DIRS=$(find /source -mindepth 1 -maxdepth 1 -type d) + DIR_COUNT=$(echo "$EXTRACTED_DIRS" | wc -l) + + if [ "$DIR_COUNT" -eq 1 ]; then + # Single directory extracted, use it + DIR_NAME=$(basename "$EXTRACTED_DIRS") + log "Extracted to directory: $DIR_NAME" + echo "$DIR_NAME" > /tmp/repo_name.txt + else + # Multiple files/dirs or no directory, create wrapper with ZIP name + log "Multiple items extracted, creating '$ZIP_BASENAME' wrapper directory" + mkdir -p "/source/$ZIP_BASENAME" + # Move all items into wrapper directory + find /source -mindepth 1 -maxdepth 1 ! -name "$ZIP_BASENAME" -exec mv {} "/source/$ZIP_BASENAME/" \; + echo "$ZIP_BASENAME" > /tmp/repo_name.txt + log "All files moved to /source/$ZIP_BASENAME/" + fi + else + # Regular S3 directory sync + log "Syncing S3 directory..." + mkdir -p /source/project + aws s3 sync "$SOURCE_URL" /source/project/ --quiet + echo "project" > /tmp/repo_name.txt + fi + +elif [[ "$SOURCE_URL" == *.git ]] || [[ "$SOURCE_URL" == *github.com* ]] || [[ "$SOURCE_URL" == *gitlab.com* ]] || [[ "$SOURCE_URL" == *bitbucket.org* ]]; then + log "Cloning git repository: $SOURCE_URL" + # Extract repo name for directory + REPO_NAME=$(basename "$SOURCE_URL" .git) + git clone "$SOURCE_URL" "/source/$REPO_NAME" + # Export repo name for use by entrypoint + echo "$REPO_NAME" > /tmp/repo_name.txt +else + log "Error: Unsupported source URL format: $SOURCE_URL" + log "Supported formats:" + log " - Git repositories: https://github.com/user/repo.git" + log " - S3 directories: s3://bucket-name/path/" + log " - S3 ZIP files: s3://bucket-name/path/file.zip" + exit 1 +fi + +log "Source code downloaded successfully to /source/" +ls -la /source/ \ No newline at end of file diff --git a/agentic-atx-platform/container/entrypoint.sh b/agentic-atx-platform/container/entrypoint.sh new file mode 100644 index 0000000..44673e3 --- /dev/null +++ b/agentic-atx-platform/container/entrypoint.sh @@ -0,0 +1,395 @@ +#!/bin/bash +set -e + +# Initialize nvm for Node.js +export NVM_DIR="/home/atxuser/.nvm" +[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" + +# Cleanup function +cleanup() { + rm -f /tmp/repo_name.txt +} +trap cleanup EXIT + +# Logging function with timestamps +log() { + echo "[$(date -u +"%Y-%m-%dT%H:%M:%SZ")] $1" +} + +# Retry function for network operations +retry() { + local max_attempts=3 + local timeout=5 + local attempt=1 + local exitCode=0 + + while [ $attempt -le $max_attempts ]; do + if "$@"; then + return 0 + else + exitCode=$? + fi + + if [ $attempt -lt $max_attempts ]; then + log "Command failed (attempt $attempt/$max_attempts). Retrying in $timeout seconds..." + sleep $timeout + timeout=$((timeout * 2)) + fi + attempt=$((attempt + 1)) + done + + log "Command failed after $max_attempts attempts." + return $exitCode +} + +# Function to refresh IAM role credentials (for long-running jobs) +refresh_credentials() { + # Only refresh if we're using IAM role (not explicit credentials) + if [[ -z "${USING_EXPLICIT_CREDS:-}" ]]; then + log "Refreshing temporary credentials from IAM role..." + + TEMP_CREDS=$(aws configure export-credentials --format env 2>/dev/null) + + if [[ $? -eq 0 && -n "$TEMP_CREDS" ]]; then + # Source the credentials directly to export them + eval "$TEMP_CREDS" + + # Also configure AWS CLI with these credentials + aws configure set aws_access_key_id "$AWS_ACCESS_KEY_ID" + aws configure set aws_secret_access_key "$AWS_SECRET_ACCESS_KEY" + if [[ -n "$AWS_SESSION_TOKEN" ]]; then + aws configure set aws_session_token "$AWS_SESSION_TOKEN" + fi + + log "Credentials refreshed successfully" + else + log "Warning: Failed to refresh credentials, continuing with existing credentials" + fi + fi +} + +# Parse arguments +SOURCE="" +OUTPUT="" +COMMAND="" + +while [[ $# -gt 0 ]]; do + case $1 in + --source) + SOURCE="$2" + shift 2 + ;; + --output) + OUTPUT="$2" + shift 2 + ;; + --command) + COMMAND="$2" + shift 2 + ;; + *) + echo "Unknown argument: $1" + echo "Usage: [--source ] --output --command " + exit 1 + ;; + esac +done + +# Validate required arguments +if [[ -z "$COMMAND" ]]; then + echo "Error: Missing required arguments" + echo "Usage: [--source ] [--output ] --command " + echo "" + echo "Environment Variables:" + echo " S3_BUCKET - S3 bucket name for output (required if --output is specified)" + echo "" + echo "Examples:" + echo " --command \"atx custom def list\"" + echo " --source https://github.com/user/repo.git --output results/job1/ --command \"atx custom def exec\"" + echo " With S3_BUCKET=my-bucket, output goes to s3://my-bucket/results/job1/" + exit 1 +fi + +# Check if output is specified and S3_BUCKET is set +if [[ -n "$OUTPUT" && -z "$S3_BUCKET" ]]; then + echo "Error: S3_BUCKET environment variable must be set when using --output" + echo "Example: docker run -e S3_BUCKET=my-bucket ... --output results/job1/" + exit 1 +fi + +log "Starting AWS Transform CLI execution..." +log "Source: $SOURCE" +log "Output: $OUTPUT" +log "Command: $COMMAND" + +# Set global git configuration for ATX +log "Configuring git identity for ATX..." +git config --global user.email "${GIT_USER_EMAIL:-atx-container@aws-transform.local}" +git config --global user.name "${GIT_USER_NAME:-AWS Transform Container}" + +# Set ATX shell timeout for long-running jobs (default: 12 hours) +export ATX_SHELL_TIMEOUT="${ATX_SHELL_TIMEOUT:-43200}" +log "Set ATX_SHELL_TIMEOUT=$ATX_SHELL_TIMEOUT for long-running transformations" + +# Configure AWS credentials for ATX CLI +# ATX CLI requires credentials as environment variables +if [[ -n "$AWS_ACCESS_KEY_ID" && -n "$AWS_SECRET_ACCESS_KEY" ]]; then + log "Using explicit AWS credentials from environment variables" + export USING_EXPLICIT_CREDS=true + aws configure set aws_access_key_id "$AWS_ACCESS_KEY_ID" + aws configure set aws_secret_access_key "$AWS_SECRET_ACCESS_KEY" + aws configure set region "${AWS_DEFAULT_REGION:-us-east-1}" + + if [[ -n "$AWS_SESSION_TOKEN" ]]; then + aws configure set aws_session_token "$AWS_SESSION_TOKEN" + fi +else + log "No explicit credentials found, retrieving temporary credentials from IAM role..." + + # Verify IAM role is available + if ! aws sts get-caller-identity > /dev/null 2>&1; then + log "Error: No credentials available (neither environment variables nor IAM role)" + exit 1 + fi + + # Retrieve temporary credentials from IAM role (EC2 instance profile, ECS task role, or Batch job role) + log "Retrieving temporary credentials from IAM role..." + + # Use AWS CLI to export credentials from the credential chain + # The aws configure export-credentials command outputs in env format + TEMP_CREDS=$(aws configure export-credentials --format env 2>/dev/null) + + if [[ $? -eq 0 && -n "$TEMP_CREDS" ]]; then + # Source the credentials directly to export them + eval "$TEMP_CREDS" + + # Verify credentials were exported + if [[ -z "$AWS_ACCESS_KEY_ID" || -z "$AWS_SECRET_ACCESS_KEY" ]]; then + log "Error: Failed to export credentials from IAM role" + log "TEMP_CREDS output: $TEMP_CREDS" + exit 1 + fi + + # Also configure AWS CLI with these credentials for consistency + aws configure set aws_access_key_id "$AWS_ACCESS_KEY_ID" + aws configure set aws_secret_access_key "$AWS_SECRET_ACCESS_KEY" + aws configure set region "${AWS_DEFAULT_REGION:-us-east-1}" + if [[ -n "$AWS_SESSION_TOKEN" ]]; then + aws configure set aws_session_token "$AWS_SESSION_TOKEN" + fi + + log "Successfully retrieved and exported temporary credentials from IAM role" + + # Log the role ARN for debugging (without exposing credentials) + ROLE_ARN=$(aws sts get-caller-identity --query 'Arn' --output text 2>/dev/null || echo "Unable to retrieve role ARN") + log "Using IAM role: $ROLE_ARN" + else + log "Error: Failed to retrieve credentials from IAM role" + exit 1 + fi +fi + +# Verify AWS credentials are working with retry +log "Verifying AWS credentials..." +log "AWS_ACCESS_KEY_ID is set: $([ -n "$AWS_ACCESS_KEY_ID" ] && echo 'yes' || echo 'no')" +log "AWS_SECRET_ACCESS_KEY is set: $([ -n "$AWS_SECRET_ACCESS_KEY" ] && echo 'yes' || echo 'no')" +log "AWS_SESSION_TOKEN is set: $([ -n "$AWS_SESSION_TOKEN" ] && echo 'yes' || echo 'no')" + +retry aws sts get-caller-identity || { + log "Error: Unable to authenticate with AWS after multiple attempts" + exit 1 +} +log "AWS credentials verified successfully" + +# ============================================================================ +# PRIVATE REPOSITORY ACCESS (Optional - RECOMMENDED) +# ============================================================================ +# Fetch credentials from AWS Secrets Manager for private repositories +# This is the RECOMMENDED approach (credentials never stored in image) +# +# Uncomment and customize the secret names for your environment: +# +# fetch_private_credentials() { +# log "Fetching private repository credentials from Secrets Manager..." +# +# # GitHub token for private repos +# GITHUB_TOKEN=$(aws secretsmanager get-secret-value \ +# --secret-id "atx/github-token" --query SecretString --output text 2>/dev/null || true) +# if [[ -n "$GITHUB_TOKEN" ]]; then +# echo "https://${GITHUB_TOKEN}@github.com" > /home/atxuser/.git-credentials +# git config --global credential.helper store +# log "✓ GitHub credentials configured" +# fi +# +# # npm token for private packages +# NPM_TOKEN=$(aws secretsmanager get-secret-value \ +# --secret-id "atx/npm-token" --query SecretString --output text 2>/dev/null || true) +# if [[ -n "$NPM_TOKEN" ]]; then +# echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > /home/atxuser/.npmrc +# log "✓ npm credentials configured" +# fi +# +# # Maven credentials for private artifacts +# MAVEN_USER=$(aws secretsmanager get-secret-value \ +# --secret-id "atx/maven-user" --query SecretString --output text 2>/dev/null || true) +# MAVEN_PASS=$(aws secretsmanager get-secret-value \ +# --secret-id "atx/maven-pass" --query SecretString --output text 2>/dev/null || true) +# if [[ -n "$MAVEN_USER" && -n "$MAVEN_PASS" ]]; then +# cat > /home/atxuser/.m2/settings.xml < +# +# +# company-repo +# ${MAVEN_USER} +# ${MAVEN_PASS} +# +# +# +# EOF +# log "✓ Maven credentials configured" +# fi +# } +# +# fetch_private_credentials +# ============================================================================ + +# Download MCP configuration from S3 if available +log "Checking for MCP configuration..." +MCP_CONFIG_KEY="mcp-config/mcp.json" +MCP_CONFIG_PATH="/home/atxuser/.aws/atx/mcp.json" + +# SOURCE_BUCKET is set as environment variable in job definition +if [ -n "$SOURCE_BUCKET" ] && aws s3 ls "s3://$SOURCE_BUCKET/$MCP_CONFIG_KEY" &>/dev/null; then + log "MCP configuration found in S3, downloading..." + + # Create directory if it doesn't exist + mkdir -p "$(dirname "$MCP_CONFIG_PATH")" + + # Download MCP config + if aws s3 cp "s3://$SOURCE_BUCKET/$MCP_CONFIG_KEY" "$MCP_CONFIG_PATH" --quiet; then + # Set proper ownership + chown atxuser:atxuser "$MCP_CONFIG_PATH" + chmod 644 "$MCP_CONFIG_PATH" + log "MCP configuration downloaded successfully to $MCP_CONFIG_PATH" + else + log "Warning: Failed to download MCP configuration, continuing without it" + fi +else + log "No MCP configuration found in S3, using default ATX settings" +fi + +# Start background credential refresh for long-running jobs (every 45 minutes) +# Only if using IAM role credentials (not explicit credentials) +if [[ -z "${USING_EXPLICIT_CREDS:-}" ]]; then + log "Starting background credential refresh (every 45 minutes) for long-running transformations..." + ( + while true; do + sleep 2700 # 45 minutes + refresh_credentials + done + ) & + REFRESH_PID=$! + log "Credential refresh background process started (PID: $REFRESH_PID)" +fi + +# ============================================================================ +# CUSTOM INITIALIZATION (Optional) +# ============================================================================ +# If you've extended this container with custom configurations, they're already +# set up from your Dockerfile (e.g., .npmrc, settings.xml, .git-credentials). +# +# Add any runtime-specific initialization here if needed: +# - Additional environment variables +# - Dynamic credential retrieval +# - Custom tool initialization +# +# See container/README.md for extending the base image with private repo access. +# ============================================================================ + + +# Download source code if provided +if [[ -n "$SOURCE" ]]; then + log "Downloading source code..." + retry /app/download-source.sh "$SOURCE" + + # Get the repo/project directory name + REPO_NAME=$(cat /tmp/repo_name.txt) + PROJECT_PATH="/source/$REPO_NAME" + + # Initialize git repo if not present + cd "$PROJECT_PATH" + if [ ! -d ".git" ]; then + log "Initializing git repository..." + git init + git config user.email "${GIT_USER_EMAIL:-container@aws-transform.local}" + git config user.name "${GIT_USER_NAME:-AWS Transform Container}" + git add . + git commit -m "Initial commit" + fi + + # Smart -p flag handling + # Only replace -p if it exists in the original command + if [[ "$COMMAND" == *" -p "* ]] || [[ "$COMMAND" == *" --project-path "* ]]; then + log "Detected -p flag in command, replacing with container path" + # Remove existing -p and its value + COMMAND=$(echo "$COMMAND" | sed 's/-p [^ ]*//g' | sed 's/--project-path [^ ]*//g') + # Add correct -p flag with container path + COMMAND="$COMMAND -p $PROJECT_PATH" + log "Replaced with: -p $PROJECT_PATH" + else + log "No -p flag in command, ATX will use current directory" + fi + + # Execute the ATX command + # Note: Using eval here is intentional to support complex commands with pipes/redirects + # COMMAND should only come from trusted sources (AWS Batch job definition) + log "Executing command: $COMMAND" + eval "$COMMAND" +else + # Execute command without source (e.g., atx custom def list) + # Note: Using eval here is intentional to support complex commands with pipes/redirects + # COMMAND should only come from trusted sources (AWS Batch job definition) + log "Executing command (no source code): $COMMAND" + mkdir -p /source + cd /source + eval "$COMMAND" +fi + +# Upload results if output is specified +if [[ -n "$OUTPUT" ]]; then + log "Uploading results..." + retry /app/upload-results.sh "$OUTPUT" "$S3_BUCKET" +else + log "No output specified, skipping S3 upload" +fi + +# ============================================================================ +# CUSTOM POST-TRANSFORMATION ACTIONS (Optional) +# ============================================================================ +# After transformation and S3 upload, create PR and push changes to remote. +# +# Option 1: Script-based PR creation (add git credentials in Dockerfile) +# ---------------------------------------------------------------------------- +# if [[ -n "$SOURCE" ]] && [[ -n "$GIT_REMOTE_URL" ]]; then +# log "Creating PR and pushing changes to remote..." +# cd "$PROJECT_PATH" +# +# # AWS Transform CLI auto-creates a branch for changes +# CURRENT_BRANCH=$(git branch --show-current) +# log "Current branch: $CURRENT_BRANCH" +# +# git add . +# git commit -m "Automated transformation by AWS Transform CLI" || true +# git push "$GIT_REMOTE_URL" "$CURRENT_BRANCH" +# +# # Create PR using GitHub CLI (install gh in Dockerfile) +# # gh pr create --title "Automated transformation" --body "..." --base main +# fi +# +# Option 2: Use AWS Transform Custom Definition with MCP (Recommended) +# ---------------------------------------------------------------------------- +# Use a Custom Transformation definition along with PR creation using MCP +# connecting to your git repos for more sophisticated workflows. +# ============================================================================ + +log "AWS Transform CLI execution completed successfully!" \ No newline at end of file diff --git a/agentic-atx-platform/container/package.json b/agentic-atx-platform/container/package.json new file mode 100644 index 0000000..29ab30e --- /dev/null +++ b/agentic-atx-platform/container/package.json @@ -0,0 +1,12 @@ +{ + "name": "aws-transform-scaled-execution-container", + "version": "1.0.0", + "description": "Node.js dependencies for AWS Transform CLI container", + "dependencies": {}, + "devDependencies": { + "yarn": "1.22.21", + "pnpm": "8.15.1", + "typescript": "5.3.3", + "ts-node": "10.9.2" + } +} diff --git a/agentic-atx-platform/container/requirements.txt b/agentic-atx-platform/container/requirements.txt new file mode 100644 index 0000000..bce3442 --- /dev/null +++ b/agentic-atx-platform/container/requirements.txt @@ -0,0 +1,6 @@ +# Python dependencies for AWS Transform CLI container +boto3==1.35.0 +botocore==1.35.0 +requests==2.31.0 +virtualenv>=20.26.6 +uv==0.5.11 diff --git a/agentic-atx-platform/container/upload-results.sh b/agentic-atx-platform/container/upload-results.sh new file mode 100644 index 0000000..7a9345a --- /dev/null +++ b/agentic-atx-platform/container/upload-results.sh @@ -0,0 +1,90 @@ +#!/bin/bash +set -e + +# Logging function with timestamps +log() { + echo "[$(date -u +"%Y-%m-%dT%H:%M:%SZ")] $1" +} + +OUTPUT_PATH="$1" +S3_BUCKET="$2" + +if [[ -z "$OUTPUT_PATH" ]]; then + log "Error: Output path is required" + exit 1 +fi + +if [[ -z "$S3_BUCKET" ]]; then + log "Error: S3_BUCKET is required" + exit 1 +fi + +# Remove leading/trailing slashes from OUTPUT_PATH +OUTPUT_PATH="${OUTPUT_PATH#/}" +OUTPUT_PATH="${OUTPUT_PATH%/}" + +# Construct full S3 URL +S3_BASE_URL="s3://${S3_BUCKET}/${OUTPUT_PATH}" + +# Ensure it ends with / +[[ "$S3_BASE_URL" != */ ]] && S3_BASE_URL="${S3_BASE_URL}/" + +# Find the conversation ID from ATX logs +# ATX stores logs in ~/.aws/atx/custom/ not ~/.atx/ +CONVERSATION_ID="" +if [ -d "$HOME/.aws/atx/custom" ]; then + # Get the most recent conversation directory + CONVERSATION_ID=$(ls -t "$HOME/.aws/atx/custom" 2>/dev/null | head -n 1) +fi + +# Use conversation ID or generate timestamp-based ID +if [[ -z "$CONVERSATION_ID" ]]; then + CONVERSATION_ID="job_$(date +"%Y%m%d_%H%M%S")" + log "No conversation ID found, using: $CONVERSATION_ID" +else + log "Found conversation ID: $CONVERSATION_ID" +fi + +# Create S3 structure: s3:////code/ and /logs/ +# Note: OUTPUT_PATH should end with / if it's a prefix (e.g., "transformations/") +S3_BASE="${S3_BASE_URL}${CONVERSATION_ID}" +S3_CODE="${S3_BASE}/code/" +S3_LOGS="${S3_BASE}/logs/" + +log "Uploading results to S3 structure:" +log " Base: $S3_BASE" +log " Code: $S3_CODE" +log " Logs: $S3_LOGS" + +# Upload transformed source code +if [ -d "/source" ] && [ "$(ls -A /source 2>/dev/null)" ]; then + log "Uploading transformed source code..." + aws s3 sync /source/ "$S3_CODE" \ + --exclude ".git/*" \ + --exclude ".env*" \ + --exclude "*.pem" \ + --exclude "*.key" \ + --exclude "node_modules/*" \ + --exclude ".aws/*" \ + --exclude "*.log" \ + --quiet + log "Code uploaded to: $S3_CODE" +else + log "No source code to upload" +fi + +# Upload ATX artifacts and logs +if [ -d "$HOME/.aws/atx" ]; then + log "Uploading ATX artifacts and logs..." + aws s3 sync "$HOME/.aws/atx/" "$S3_LOGS" --quiet + log "Logs uploaded to: $S3_LOGS" +else + log "No ATX logs found" +fi + +log "" +log "Results uploaded successfully!" +log "Conversation ID: $CONVERSATION_ID" +log "S3 Location: $S3_BASE" +log " - Code: $S3_CODE" +log " - Logs: $S3_LOGS" \ No newline at end of file diff --git a/agentic-atx-platform/deployment/1-build-and-push.sh b/agentic-atx-platform/deployment/1-build-and-push.sh new file mode 100644 index 0000000..0117b34 --- /dev/null +++ b/agentic-atx-platform/deployment/1-build-and-push.sh @@ -0,0 +1,111 @@ +#!/bin/bash +# Usage: ./1-build-and-push.sh [--rebuild] +# --rebuild: Force rebuild with --no-cache (ignore Docker layer cache) +set -e + +# Build and push container to customer's ECR + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +CONFIG_FILE="$SCRIPT_DIR/config.env" + +# Colors +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +log_info() { echo -e "${BLUE}ℹ${NC} $1"; } +log_success() { echo -e "${GREEN}✓${NC} $1"; } +log_warning() { echo -e "${YELLOW}⚠${NC} $1"; } +log_error() { echo -e "${RED}✗${NC} $1"; } + +# Check for --rebuild flag +FORCE_REBUILD=false +if [ "$1" = "--rebuild" ]; then + FORCE_REBUILD=true + log_info "Force rebuild requested" +fi + +echo "==========================================" +echo "Step 1: Build and Push Container to ECR" +echo "==========================================" +echo "" + +# Load configuration if exists +if [ -f "$CONFIG_FILE" ]; then + log_info "Loading configuration from config.env" + source "$CONFIG_FILE" +fi + +# Detect or use configured AWS account ID +if [ -z "$AWS_ACCOUNT_ID" ]; then + AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text 2>/dev/null) + if [ -z "$AWS_ACCOUNT_ID" ]; then + log_error "Failed to detect AWS account ID. Is AWS CLI configured?" + exit 1 + fi + log_info "Auto-detected AWS Account: $AWS_ACCOUNT_ID" +else + log_info "Using configured AWS Account: $AWS_ACCOUNT_ID" +fi + +AWS_REGION="${AWS_REGION:-us-east-1}" +ECR_REPO_NAME="${ECR_REPO_NAME:-aws-transform-cli}" +ECR_URI="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ECR_REPO_NAME}" + +log_info "Region: $AWS_REGION" +log_info "ECR Repository: $ECR_REPO_NAME" +echo "" + +# Create ECR repository if it doesn't exist +log_info "Creating ECR repository..." +aws ecr describe-repositories --repository-names "$ECR_REPO_NAME" --region "$AWS_REGION" &>/dev/null || { + aws ecr create-repository \ + --repository-name "$ECR_REPO_NAME" \ + --region "$AWS_REGION" >/dev/null + log_success "ECR repository created" +} +log_success "ECR repository ready: $ECR_URI" +echo "" + +# Build container from Dockerfile +log_info "Building container from Dockerfile..." +if [ "$FORCE_REBUILD" = true ]; then + log_info "Using --no-cache for clean build" + cd "$PROJECT_ROOT/container" + docker build --no-cache -t "$ECR_REPO_NAME:latest" . +else + log_info "Building with Docker layer cache (use --rebuild to force clean build)" + cd "$PROJECT_ROOT/container" + docker build -t "$ECR_REPO_NAME:latest" . +fi +log_success "Container built" + +echo "" +log_info "Pushing to ECR..." + +# Login to ECR +aws ecr get-login-password --region "$AWS_REGION" | \ + docker login --username AWS --password-stdin "${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com" + +# Tag and push +docker tag "$ECR_REPO_NAME:latest" "$ECR_URI:latest" +docker push "$ECR_URI:latest" + +log_success "Container pushed to ECR" +echo "" + +# Save ECR URI for step 2 +echo "$ECR_URI:latest" > "$SCRIPT_DIR/.ecr-uri.txt" + +echo "==========================================" +echo "Step 1 Complete!" +echo "==========================================" +echo "" +log_success "Container available at: $ECR_URI:latest" +echo "" +echo "Next step:" +echo " ./2-deploy-cloudformation.sh" +echo "" diff --git a/agentic-atx-platform/deployment/2-deploy-infrastructure.sh b/agentic-atx-platform/deployment/2-deploy-infrastructure.sh new file mode 100644 index 0000000..18fbbc0 --- /dev/null +++ b/agentic-atx-platform/deployment/2-deploy-infrastructure.sh @@ -0,0 +1,627 @@ +#!/bin/bash +set -e + +# Automated AWS Transform CLI Deployment +# Reads configuration from config.env and deploys without user interaction + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +CONFIG_FILE="$SCRIPT_DIR/config.env" + +# Colors +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +log_info() { echo -e "${BLUE}ℹ${NC} $1"; } +log_success() { echo -e "${GREEN}✓${NC} $1"; } +log_warning() { echo -e "${YELLOW}⚠${NC} $1"; } +log_error() { echo -e "${RED}✗${NC} $1"; } + +echo "==========================================" +echo "AWS Transform CLI - Automated Deployment" +echo "==========================================" +echo "" + +# Check prerequisites +log_info "Checking prerequisites..." + +# Check Docker +if ! command -v docker &> /dev/null; then + log_error "Docker is not installed or not in PATH" + echo "" + echo "Please install Docker:" + echo " - Windows/Mac: https://www.docker.com/products/docker-desktop" + echo " - Linux: https://docs.docker.com/engine/install/" + exit 1 +fi + +if ! docker info &> /dev/null; then + log_error "Docker is not running" + echo "" + echo "Please start Docker Desktop or Docker daemon" + exit 1 +fi + +# Check AWS CLI +if ! command -v aws &> /dev/null; then + log_error "AWS CLI is not installed or not in PATH" + echo "" + echo "Please install AWS CLI v2:" + echo " https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + exit 1 +fi + +log_success "Prerequisites check passed" +echo "" + +# Check for config file +if [ ! -f "$CONFIG_FILE" ]; then + log_error "Configuration file not found: $CONFIG_FILE" + echo "" + echo "Please create config.env from template:" + echo " cp config.env.template config.env" + echo " # Edit config.env with your settings" + echo " ./deploy-automated.sh" + exit 1 +fi + +# Load configuration +log_info "Loading configuration from $CONFIG_FILE" +source "$CONFIG_FILE" + +# Auto-detect AWS account ID if not set +if [ -z "$AWS_ACCOUNT_ID" ]; then + AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) + log_info "Auto-detected AWS Account: $AWS_ACCOUNT_ID" +fi + +# Set defaults +AWS_REGION="${AWS_REGION:-us-east-1}" +ECR_REPO_NAME="${ECR_REPO_NAME:-aws-transform-cli}" +S3_BUCKET_NAME="${S3_BUCKET_NAME:-atx-custom-output}" +COMPUTE_ENV_NAME="${COMPUTE_ENV_NAME:-atx-fargate-compute}" +JOB_QUEUE_NAME="${JOB_QUEUE_NAME:-atx-job-queue}" +JOB_DEFINITION_NAME="${JOB_DEFINITION_NAME:-atx-transform-job}" +FARGATE_VCPU="${FARGATE_VCPU:-2}" +FARGATE_MEMORY="${FARGATE_MEMORY:-4096}" +JOB_TIMEOUT="${JOB_TIMEOUT:-43200}" +JOB_RETRY_ATTEMPTS="${JOB_RETRY_ATTEMPTS:-3}" +ENABLE_ECR_SCANNING="${ENABLE_ECR_SCANNING:-true}" +LOG_RETENTION_DAYS="${LOG_RETENTION_DAYS:-30}" + +# Make S3 bucket name unique if needed +if [[ "$S3_BUCKET_NAME" != *"$AWS_ACCOUNT_ID"* ]]; then + S3_BUCKET_NAME="${S3_BUCKET_NAME}-${AWS_ACCOUNT_ID}" + log_info "S3 bucket name: $S3_BUCKET_NAME (added account ID for uniqueness)" +fi + +log_success "Configuration loaded" +echo "" + +# ============================================ +# STEP 1: Verify Container Dockerfile +# ============================================ +echo "==========================================" +echo "STEP 1: Verify Container Dockerfile" +echo "==========================================" +echo "" + +cd "$PROJECT_ROOT/container" + +if [ ! -f "Dockerfile" ]; then + log_error "Dockerfile not found in container/ directory" + exit 1 +fi + +log_success "Dockerfile found" +echo "" + +# ============================================ +# STEP 2: Network Setup +# ============================================ +echo "==========================================" +echo "STEP 2: Network Configuration" +echo "==========================================" +echo "" + +# Auto-detect or create network resources +if [ -z "$VPC_ID" ]; then + log_info "No VPC specified, detecting default VPC..." + VPC_ID=$(aws ec2 describe-vpcs --filters "Name=isDefault,Values=true" --query 'Vpcs[0].VpcId' --output text 2>/dev/null) + + if [[ "$VPC_ID" == "None" || -z "$VPC_ID" ]]; then + log_warning "No default VPC found" + log_error "Please specify VPC_ID in config.env or create a default VPC" + exit 1 + fi + + log_success "Using default VPC: $VPC_ID" +fi + +# Auto-detect public subnets if not specified +if [ -z "$SUBNET_IDS" ]; then + log_info "No subnets specified, auto-detecting public subnets..." + + SUBNETS=$(aws ec2 describe-subnets \ + --filters "Name=vpc-id,Values=$VPC_ID" "Name=map-public-ip-on-launch,Values=true" \ + --query 'Subnets[*].SubnetId' --output text 2>/dev/null) + + if [ -z "$SUBNETS" ]; then + log_error "No public subnets found in VPC $VPC_ID" + log_error "Please specify SUBNET_IDS in config.env" + exit 1 + fi + + # Use first 2 subnets + SUBNET_IDS=$(echo "$SUBNETS" | tr '\t' ' ' | tr ' ' '\n' | head -2 | tr '\n' ',' | sed 's/,$//') + log_success "Auto-detected public subnets: $SUBNET_IDS" +fi + +# Create security group if not specified +if [ -z "$SECURITY_GROUP_ID" ]; then + log_info "No security group specified, checking for existing..." + + EXISTING_SG=$(aws ec2 describe-security-groups \ + --filters "Name=group-name,Values=atx-batch-sg" "Name=vpc-id,Values=$VPC_ID" \ + --query 'SecurityGroups[0].GroupId' --output text 2>/dev/null) + + if [[ "$EXISTING_SG" != "None" && -n "$EXISTING_SG" ]]; then + SECURITY_GROUP_ID="$EXISTING_SG" + log_success "Using existing security group: $SECURITY_GROUP_ID" + else + log_info "Creating security group..." + SECURITY_GROUP_ID=$(aws ec2 create-security-group \ + --group-name atx-batch-sg \ + --description "AWS Transform Batch Security Group" \ + --vpc-id "$VPC_ID" \ + --query 'GroupId' --output text) + + # Allow HTTPS outbound traffic (required for AWS Transform, S3, ECR) + aws ec2 authorize-security-group-egress \ + --group-id "$SECURITY_GROUP_ID" \ + --protocol tcp \ + --port 443 \ + --cidr 0.0.0.0/0 2>/dev/null || true + + log_success "Created security group: $SECURITY_GROUP_ID" + fi +fi + +log_success "Network configuration complete" +echo " VPC: $VPC_ID" +echo " Subnets: $SUBNET_IDS (public)" +echo " Security Group: $SECURITY_GROUP_ID" +echo "" + +# ============================================ +# STEP 3: Get Container Image URI +# ============================================ +echo "==========================================" +echo "STEP 3: Get Container Image URI" +echo "==========================================" +echo "" + +# Check if ECR URI file exists from step 1 +ECR_URI_FILE="$SCRIPT_DIR/.ecr-uri.txt" +if [ ! -f "$ECR_URI_FILE" ]; then + log_error "ECR URI file not found. Did you run step 1?" + echo "" + echo "Please run: ./1-build-and-push.sh" + exit 1 +fi + +ECR_URI=$(cat "$ECR_URI_FILE") +log_info "Using container image: $ECR_URI" +log_success "Container image ready" +echo "" + +# STEP 4: Create S3 Bucket +# ============================================ +echo "==========================================" +echo "STEP 4: Create S3 Bucket" +echo "==========================================" +echo "" + +aws s3 ls "s3://$S3_BUCKET_NAME" &>/dev/null || { + log_info "Creating S3 bucket: $S3_BUCKET_NAME" + aws s3 mb "s3://$S3_BUCKET_NAME" --region "$AWS_REGION" + + aws s3api put-bucket-versioning \ + --bucket "$S3_BUCKET_NAME" \ + --versioning-configuration Status=Enabled + + aws s3api put-bucket-encryption \ + --bucket "$S3_BUCKET_NAME" \ + --server-side-encryption-configuration '{ + "Rules": [{ + "ApplyServerSideEncryptionByDefault": { + "SSEAlgorithm": "AES256" + } + }] + }' + + # Block all public access + aws s3api put-public-access-block \ + --bucket "$S3_BUCKET_NAME" \ + --public-access-block-configuration \ + BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true + + log_success "S3 bucket created with versioning, encryption, and public access blocks" +} + +log_success "S3 bucket ready: s3://$S3_BUCKET_NAME" +echo "" + +# ============================================ +# STEP 5: Create IAM Roles +# ============================================ +echo "==========================================" +echo "STEP 5: Create IAM Roles" +echo "==========================================" +echo "" + +# Job Role +aws iam get-role --role-name ATXBatchJobRole &>/dev/null || { + log_info "Creating IAM job role..." + + cat > /tmp/trust-policy.json << 'EOF' +{ + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Principal": {"Service": "ecs-tasks.amazonaws.com"}, + "Action": "sts:AssumeRole" + }] +} +EOF + + aws iam create-role \ + --role-name ATXBatchJobRole \ + --assume-role-policy-document file:///tmp/trust-policy.json >/dev/null + + aws iam attach-role-policy \ + --role-name ATXBatchJobRole \ + --policy-arn arn:aws:iam::aws:policy/AWSTransformCustomFullAccess + + cat > /tmp/s3-policy.json << EOF +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": ["s3:GetObject", "s3:ListBucket"], + "Resource": [ + "arn:aws:s3:::$S3_BUCKET_NAME/*", + "arn:aws:s3:::$S3_BUCKET_NAME", + "arn:aws:s3:::atx-source-code-${AWS_ACCOUNT_ID}/*", + "arn:aws:s3:::atx-source-code-${AWS_ACCOUNT_ID}" + ] + }, + { + "Effect": "Allow", + "Action": ["s3:PutObject"], + "Resource": [ + "arn:aws:s3:::$S3_BUCKET_NAME/*" + ] + } + ] +} +EOF + + aws iam put-role-policy \ + --role-name ATXBatchJobRole \ + --policy-name S3BucketAccess \ + --policy-document file:///tmp/s3-policy.json + + log_success "IAM job role created" +} + +# Execution Role +aws iam get-role --role-name ATXBatchExecutionRole &>/dev/null || { + log_info "Creating IAM execution role..." + + aws iam create-role \ + --role-name ATXBatchExecutionRole \ + --assume-role-policy-document file:///tmp/trust-policy.json >/dev/null + + aws iam attach-role-policy \ + --role-name ATXBatchExecutionRole \ + --policy-arn arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy + + log_success "IAM execution role created" +} + +log_info "Waiting for IAM roles to propagate (this may take up to 30 seconds)..." +sleep 15 +log_success "IAM roles ready" +echo "" + +# ============================================ +# STEP 6: Create CloudWatch Log Group +# ============================================ +echo "==========================================" +echo "STEP 6: Create CloudWatch Log Group" +echo "==========================================" +echo "" + +aws logs describe-log-groups --log-group-name-prefix "/aws/batch/atx-transform" --region "$AWS_REGION" | grep -q "atx-transform" || { + log_info "Creating CloudWatch log group..." + aws logs create-log-group \ + --log-group-name /aws/batch/atx-transform \ + --region "$AWS_REGION" + + aws logs put-retention-policy \ + --log-group-name /aws/batch/atx-transform \ + --retention-in-days "$LOG_RETENTION_DAYS" \ + --region "$AWS_REGION" + + log_success "Log group created" +} + +log_success "CloudWatch log group ready" +echo "" + +# ============================================ +# STEP 7: Create Batch Resources +# ============================================ +echo "==========================================" +echo "STEP 7: Create AWS Batch Resources" +echo "==========================================" +echo "" + +# Convert subnet IDs to JSON array +SUBNET_ARRAY=$(echo "$SUBNET_IDS" | sed 's/,/","/g' | sed 's/^/"/' | sed 's/$/"/') +SG_ARRAY="\"$SECURITY_GROUP_ID\"" + +# Create Compute Environment +COMPUTE_EXISTS=$(aws batch describe-compute-environments --compute-environments "$COMPUTE_ENV_NAME" --region "$AWS_REGION" --query "length(computeEnvironments)" --output text 2>/dev/null) +if [ "$COMPUTE_EXISTS" = "0" ] || [ -z "$COMPUTE_EXISTS" ]; then + log_info "Creating compute environment..." + + COMPUTE_RESOURCES="{ + \"type\": \"FARGATE\", + \"maxvCpus\": 256, + \"subnets\": [$SUBNET_ARRAY], + \"securityGroupIds\": [$SG_ARRAY] + }" + + aws batch create-compute-environment \ + --compute-environment-name "$COMPUTE_ENV_NAME" \ + --type MANAGED \ + --state ENABLED \ + --compute-resources "$COMPUTE_RESOURCES" \ + --region "$AWS_REGION" >/dev/null + + log_info "Waiting for compute environment to become VALID..." + while true; do + STATUS=$(aws batch describe-compute-environments \ + --compute-environments "$COMPUTE_ENV_NAME" \ + --region "$AWS_REGION" \ + --query 'computeEnvironments[0].status' \ + --output text) + + if [ "$STATUS" = "VALID" ]; then + break + fi + sleep 5 + done + + log_success "Compute environment created" +fi + +# Create Job Queue +QUEUE_EXISTS=$(aws batch describe-job-queues --job-queues "$JOB_QUEUE_NAME" --region "$AWS_REGION" --query "length(jobQueues)" --output text 2>/dev/null) +if [ "$QUEUE_EXISTS" = "0" ] || [ -z "$QUEUE_EXISTS" ]; then + log_info "Creating job queue..." + + aws batch create-job-queue \ + --job-queue-name "$JOB_QUEUE_NAME" \ + --state ENABLED \ + --priority 1 \ + --compute-environment-order "[{ + \"order\": 1, + \"computeEnvironment\": \"$COMPUTE_ENV_NAME\" + }]" \ + --region "$AWS_REGION" >/dev/null + + log_success "Job queue created" +fi + +# Register Job Definition +log_info "Registering job definition..." + +cat > /tmp/job-definition.json << EOF +{ + "jobDefinitionName": "$JOB_DEFINITION_NAME", + "type": "container", + "platformCapabilities": ["FARGATE"], + "timeout": { + "attemptDurationSeconds": $JOB_TIMEOUT + }, + "retryStrategy": { + "attempts": $JOB_RETRY_ATTEMPTS, + "evaluateOnExit": [ + {"action": "RETRY", "onStatusReason": "Task failed to start"}, + {"action": "EXIT", "onExitCode": "0"} + ] + }, + "containerProperties": { + "image": "$ECR_URI", + "command": [], + "jobRoleArn": "arn:aws:iam::$AWS_ACCOUNT_ID:role/ATXBatchJobRole", + "executionRoleArn": "arn:aws:iam::$AWS_ACCOUNT_ID:role/ATXBatchExecutionRole", + "resourceRequirements": [ + {"type": "VCPU", "value": "$FARGATE_VCPU"}, + {"type": "MEMORY", "value": "$FARGATE_MEMORY"} + ], + "fargatePlatformConfiguration": { + "platformVersion": "LATEST" + }, + "networkConfiguration": { + "assignPublicIp": "ENABLED" + }, + "environment": [ + {"name": "S3_BUCKET", "value": "$S3_BUCKET_NAME"}, + {"name": "SOURCE_BUCKET", "value": "atx-source-code-${AWS_ACCOUNT_ID}"}, + {"name": "AWS_DEFAULT_REGION", "value": "$AWS_REGION"}, + {"name": "ATX_SHELL_TIMEOUT", "value": "$JOB_TIMEOUT"} + ], + "logConfiguration": { + "logDriver": "awslogs", + "options": { + "awslogs-group": "/aws/batch/atx-transform", + "awslogs-region": "$AWS_REGION", + "awslogs-stream-prefix": "atx" + } + } + } +} +EOF + +aws batch register-job-definition \ + --cli-input-json file:///tmp/job-definition.json \ + --region "$AWS_REGION" >/dev/null + +log_success "Job definition registered" +echo "" + +# ========================================== +# STEP 8: Create CloudWatch Dashboard +# ========================================== +echo "==========================================" +echo "STEP 8: Create CloudWatch Dashboard" +echo "==========================================" +echo "" + +log_info "Creating CloudWatch dashboard for monitoring..." + +DASHBOARD_NAME="ATX-Transform-CLI-Dashboard" + +cat > /tmp/dashboard.json << DASHBOARD_EOF + +{ + "widgets": [ + { + "type": "log", + "x": 0, + "y": 0, + "width": 24, + "height": 6, + "properties": { + "query": "SOURCE '/aws/batch/atx-transform'\n| filter @message like /Results uploaded successfully/ or @message like /Command failed after/\n| stats sum(@message like /Results uploaded successfully/) as Completed, sum(@message like /Command failed after/) as Failed by bin(1h)", + "region": "us-east-1", + "title": "📊 Job Completion Rate (Hourly)", + "view": "bar" + } + }, + { + "type": "log", + "x": 0, + "y": 6, + "width": 24, + "height": 8, + "properties": { + "query": "SOURCE '/aws/batch/atx-transform'\n| parse @message 'Output: transformations/*/' as jobName\n| stats latest(jobName) as job, latest(@timestamp) as lastActivity, latest(@message) as lastMessage by @logStream\n| sort lastActivity desc\n| limit 25", + "region": "us-east-1", + "title": "📋 Recent Jobs (Job Name, Time, Last Message, Log Stream)" + } + }, + { + "type": "metric", + "x": 0, + "y": 14, + "width": 12, + "height": 6, + "properties": { + "metrics": [ + ["AWS/ApiGateway", "Count", {"stat": "Sum"}], + [".", "4XXError", {"stat": "Sum"}], + [".", "5XXError", {"stat": "Sum"}] + ], + "view": "timeSeries", + "region": "us-east-1", + "title": "🔌 API Gateway", + "period": 300 + } + }, + { + "type": "metric", + "x": 12, + "y": 14, + "width": 12, + "height": 6, + "properties": { + "metrics": [ + ["AWS/Lambda", "Invocations", "FunctionName", "atx-async-invoke-agent"] + ], + "view": "timeSeries", + "region": "us-east-1", + "title": "⚡ Lambda Invocations", + "period": 300, + "stat": "Sum" + } + }, + { + "type": "metric", + "x": 0, + "y": 20, + "width": 24, + "height": 6, + "properties": { + "metrics": [ + ["AWS/Lambda", "Duration", "FunctionName", "atx-async-invoke-agent", {"stat": "Average"}] + ], + "view": "timeSeries", + "region": "us-east-1", + "title": "⚡ Lambda Duration (ms)", + "period": 300 + } + } + ] +} + +DASHBOARD_EOF + +aws cloudwatch put-dashboard \ + --dashboard-name "$DASHBOARD_NAME" \ + --dashboard-body file:///tmp/dashboard.json \ + --region "$AWS_REGION" >/dev/null + +rm -f /tmp/dashboard.json + +log_success "CloudWatch dashboard created: $DASHBOARD_NAME" +echo "" + +# Cleanup temp files +rm -f /tmp/trust-policy.json /tmp/s3-policy.json /tmp/job-definition.json + +# ============================================ +# Deployment Complete +# ============================================ +echo "==========================================" +echo "Deployment Complete!" +echo "==========================================" +echo "" +log_success "All resources created successfully" +echo "" +echo "Resources:" +echo " • ECR Repository: $ECR_URI" +echo " • S3 Bucket: s3://$S3_BUCKET_NAME" +echo " • Job Queue: $JOB_QUEUE_NAME" +echo " • Job Definition: $JOB_DEFINITION_NAME:1" +echo " • CloudWatch Logs: /aws/batch/atx-transform" +echo " • Dashboard: $DASHBOARD_NAME" +echo "" +echo "View Dashboard:" +echo " https://console.aws.amazon.com/cloudwatch/home?region=$AWS_REGION#dashboards:name=$DASHBOARD_NAME" +echo "" +echo "Test your deployment:" +echo " aws batch submit-job \\" +echo " --job-name test \\" +echo " --job-queue $JOB_QUEUE_NAME \\" +echo " --job-definition $JOB_DEFINITION_NAME:1 \\" +echo " --container-overrides '{\"command\":[\"--command\",\"atx custom def list\"]}'" +echo "" diff --git a/agentic-atx-platform/deployment/README.md b/agentic-atx-platform/deployment/README.md new file mode 100644 index 0000000..d7e9ceb --- /dev/null +++ b/agentic-atx-platform/deployment/README.md @@ -0,0 +1,52 @@ +# Deployment Guide + +2-step deployment for the base infrastructure (Batch + S3). The orchestrator and UI are deployed separately. + +## Prerequisites + +- Docker installed and running +- AWS CLI v2.13+ configured +- Git, Bash + +## Configuration + +```bash +cd deployment +cp config.env.template config.env +# Edit config.env if you want custom resource names (optional) +``` + +## Quick Start + +```bash +# Step 0: Check prerequisites +./check-prereqs.sh + +# Step 1: Build and push ATX CLI container +./1-build-and-push.sh + +# Step 2: Deploy infrastructure (Batch, S3, VPC, IAM) +./2-deploy-infrastructure.sh +``` + +**Time:** 20-30 minutes total + +After this, deploy the orchestrator and UI following the main [README.md](../README.md). + +## Cleanup + +```bash +./cleanup.sh +``` + +## IAM Permissions + +Generate a least-privilege policy: +```bash +./generate-custom-policy.sh +aws iam create-policy --policy-name ATXCustomDeploymentPolicy --policy-document file://iam-custom-policy.json +``` + +## Troubleshooting + +See [../docs/TROUBLESHOOTING.md](../docs/TROUBLESHOOTING.md). diff --git a/agentic-atx-platform/deployment/check-prereqs.sh b/agentic-atx-platform/deployment/check-prereqs.sh new file mode 100644 index 0000000..5d5df42 --- /dev/null +++ b/agentic-atx-platform/deployment/check-prereqs.sh @@ -0,0 +1,170 @@ +#!/bin/bash +# Prerequisites Check Script for AWS Transform CLI Deployment + +echo "==========================================" +echo "Prerequisites Check" +echo "==========================================" +echo "" + +ALL_GOOD=true + +# Check Docker +echo "1. Checking Docker..." +if command -v docker &> /dev/null; then + DOCKER_VERSION=$(docker --version) + echo " ✅ Docker installed: $DOCKER_VERSION" + + if docker info &> /dev/null 2>&1; then + echo " ✅ Docker daemon is running" + else + echo " ❌ Docker daemon is NOT running" + echo " → Start Docker Desktop or run: sudo systemctl start docker" + ALL_GOOD=false + fi +else + echo " ❌ Docker is NOT installed" + echo " → Install from: https://www.docker.com/products/docker-desktop" + ALL_GOOD=false +fi +echo "" + +# Check AWS CLI +echo "2. Checking AWS CLI..." +if command -v aws &> /dev/null; then + AWS_VERSION=$(aws --version 2>&1) + echo " ✅ AWS CLI installed: $AWS_VERSION" + + # Check if it's v2 + if [[ "$AWS_VERSION" == *"aws-cli/2"* ]]; then + echo " ✅ AWS CLI v2 detected" + else + echo " ⚠️ AWS CLI v1 detected - v2 is recommended" + echo " → Upgrade: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + fi + + # Check credentials + if aws sts get-caller-identity &> /dev/null 2>&1; then + ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text 2>/dev/null) + USER_ARN=$(aws sts get-caller-identity --query Arn --output text 2>/dev/null) + echo " ✅ AWS credentials configured" + echo " Account: $ACCOUNT_ID" + echo " Identity: $USER_ARN" + else + echo " ❌ AWS credentials NOT configured" + echo " → Run: aws configure" + ALL_GOOD=false + fi +else + echo " ❌ AWS CLI is NOT installed" + echo " → Install from: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + ALL_GOOD=false +fi +echo "" + +# Check Git +echo "3. Checking Git..." +if command -v git &> /dev/null; then + GIT_VERSION=$(git --version) + echo " ✅ Git installed: $GIT_VERSION" +else + echo " ❌ Git is NOT installed" + echo " → Install from: https://git-scm.com/downloads" + ALL_GOOD=false +fi +echo "" + +# Check Bash +echo "4. Checking Bash..." +if command -v bash &> /dev/null; then + BASH_VERSION=$(bash --version | head -n1) + echo " ✅ Bash installed: $BASH_VERSION" +else + echo " ❌ Bash is NOT installed" + ALL_GOOD=false +fi +echo "" + +# Check Node.js (for CDK deployment) +echo "5. Checking Node.js (for CDK deployment)..." +if command -v node &> /dev/null; then + NODE_VERSION=$(node --version) + echo " ✅ Node.js installed: $NODE_VERSION" + + # Check if it's v18+ + NODE_MAJOR=$(node --version | cut -d'.' -f1 | sed 's/v//') + if [ "$NODE_MAJOR" -ge 18 ]; then + echo " ✅ Node.js version is 18+ (required for CDK)" + else + echo " ⚠️ Node.js version is below 18 - CDK requires 18+" + echo " → Upgrade: https://nodejs.org/" + fi +else + echo " ⚠️ Node.js is NOT installed (required for CDK deployment)" + echo " → Install from: https://nodejs.org/" + echo " → Or skip if using bash deployment only" +fi +echo "" + +# Check AWS CDK CLI (for CDK deployment) +echo "6. Checking AWS CDK CLI (for CDK deployment)..." +if command -v cdk &> /dev/null || npx cdk --version &> /dev/null 2>&1; then + if command -v cdk &> /dev/null; then + CDK_VERSION=$(cdk --version 2>&1) + else + CDK_VERSION=$(npx cdk --version 2>&1) + fi + echo " ✅ AWS CDK installed: $CDK_VERSION" +else + echo " ⚠️ AWS CDK is NOT installed (required for CDK deployment)" + echo " → Install: npm install -g aws-cdk" + echo " → Or skip if using bash deployment only" +fi +echo "" + +# Check VPC and Subnets (if AWS CLI is configured) +if command -v aws &> /dev/null && aws sts get-caller-identity &> /dev/null 2>&1; then + echo "7. Checking AWS Network Resources..." + + # Check for VPC + VPC_COUNT=$(aws ec2 describe-vpcs --query 'length(Vpcs)' --output text 2>/dev/null || echo "0") + if [ "$VPC_COUNT" -gt 0 ]; then + echo " ✅ Found $VPC_COUNT VPC(s)" + + # Check for default VPC + DEFAULT_VPC=$(aws ec2 describe-vpcs --filters "Name=isDefault,Values=true" --query 'Vpcs[0].VpcId' --output text 2>/dev/null) + if [[ "$DEFAULT_VPC" != "None" && -n "$DEFAULT_VPC" ]]; then + echo " ✅ Default VPC exists: $DEFAULT_VPC" + fi + else + echo " ⚠️ No VPCs found" + echo " → Create a VPC or use default VPC" + fi + + # Check for public subnets + SUBNET_COUNT=$(aws ec2 describe-subnets --filters "Name=map-public-ip-on-launch,Values=true" --query 'length(Subnets)' --output text 2>/dev/null || echo "0") + if [ "$SUBNET_COUNT" -ge 2 ]; then + echo " ✅ Found $SUBNET_COUNT public subnet(s)" + elif [ "$SUBNET_COUNT" -eq 1 ]; then + echo " ⚠️ Found only 1 public subnet (2+ recommended for high availability)" + else + echo " ⚠️ No public subnets found" + echo " → Create public subnets or the deployment will fail" + fi + echo "" +fi + +# Final summary +echo "==========================================" +if [ "$ALL_GOOD" = true ]; then + echo "✅ All prerequisites met!" + echo "" + echo "Next steps:" + echo " 1. Configure: cp config.env.template config.env" + echo " 2. Setup IAM: ./generate-custom-policy.sh (see README.md)" + echo " 3. Deploy: ./1-build-and-push.sh && ./2-deploy-infrastructure.sh" +else + echo "❌ Some prerequisites are missing" + echo "" + echo "Please install missing components and try again." +fi +echo "==========================================" diff --git a/agentic-atx-platform/deployment/cleanup.sh b/agentic-atx-platform/deployment/cleanup.sh new file mode 100644 index 0000000..dc7ff33 --- /dev/null +++ b/agentic-atx-platform/deployment/cleanup.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Cleanup script - deletes all AWS Transform CLI resources + +REGION="${AWS_REGION:-us-east-1}" + +echo "==========================================" +echo "AWS Transform CLI - Cleanup" +echo "==========================================" +echo "" +echo "This will delete ALL resources created by the deployment." +echo "" +read -p "Are you sure? (yes/no): " CONFIRM + +if [ "$CONFIRM" != "yes" ]; then + echo "Cleanup cancelled" + exit 0 +fi + +echo "" +echo "Deleting resources..." + +# Disable and delete job queue +echo "1. Disabling job queue..." +aws batch update-job-queue --job-queue atx-job-queue --state DISABLED --region $REGION 2>/dev/null +sleep 10 + +# Disable and delete compute environment +echo "2. Disabling compute environment..." +aws batch update-compute-environment --compute-environment atx-fargate-compute --state DISABLED --region $REGION 2>/dev/null +sleep 15 + +echo "3. Deleting job queue..." +aws batch delete-job-queue --job-queue atx-job-queue --region $REGION 2>/dev/null +sleep 10 + +echo "4. Deleting compute environment..." +aws batch delete-compute-environment --compute-environment atx-fargate-compute --region $REGION 2>/dev/null +sleep 10 + +# Deregister job definitions +echo "5. Deregistering job definitions..." +aws batch describe-job-definitions --job-definition-name atx-transform-job --status ACTIVE --region $REGION --query 'jobDefinitions[*].revision' --output text 2>/dev/null | tr '\t' '\n' | while read rev; do + aws batch deregister-job-definition --job-definition atx-transform-job:$rev --region $REGION 2>/dev/null +done + +# Delete security group +echo "6. Deleting security group..." +aws ec2 delete-security-group --group-name atx-batch-sg --region $REGION 2>/dev/null + +# Delete IAM roles +echo "7. Deleting IAM roles..." +# ATXBatchJobRole +aws iam detach-role-policy --role-name ATXBatchJobRole --policy-arn arn:aws:iam::aws:policy/AWSTransformCustomFullAccess 2>/dev/null +aws iam delete-role-policy --role-name ATXBatchJobRole --policy-name S3Access 2>/dev/null +aws iam delete-role --role-name ATXBatchJobRole 2>/dev/null + +# ATXBatchExecutionRole +aws iam detach-role-policy --role-name ATXBatchExecutionRole --policy-arn arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy 2>/dev/null +aws iam delete-role --role-name ATXBatchExecutionRole 2>/dev/null + +# ATXApiLambdaRole +aws iam detach-role-policy --role-name ATXApiLambdaRole --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole 2>/dev/null +aws iam delete-role-policy --role-name ATXApiLambdaRole --policy-name ATXApiPolicy 2>/dev/null +aws iam delete-role-policy --role-name ATXApiLambdaRole --policy-name LambdaSelfInvoke 2>/dev/null +aws iam delete-role-policy --role-name ATXApiLambdaRole --policy-name S3OutputBucketAccess 2>/dev/null +aws iam delete-role --role-name ATXApiLambdaRole 2>/dev/null + +# Delete Lambda functions +echo "8. Deleting Lambda functions..." +for func in atx-async-invoke-agent; do + aws lambda delete-function --function-name $func --region $REGION 2>/dev/null +done + +# Delete API Gateway +echo "9. Deleting API Gateway..." +API_ID=$(aws apigateway get-rest-apis --region $REGION --query "items[?name=='atx-ui-api-v2'].id" --output text 2>/dev/null) +if [ -n "$API_ID" ]; then + aws apigatewayv2 delete-api --api-id --rest-api-id $API_ID --region $REGION 2>/dev/null +fi + +# Delete CloudWatch dashboard +echo "10. Deleting CloudWatch dashboard..." +aws cloudwatch delete-dashboards --dashboard-names ATX-Transform-CLI-Dashboard --region $REGION 2>/dev/null + +# Delete CloudWatch log group +echo "11. Deleting CloudWatch log group..." +aws logs delete-log-group --log-group-name /aws/batch/atx-transform --region $REGION 2>/dev/null + +# Delete S3 buckets (empty first) +echo "12. Deleting S3 buckets..." +ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text 2>/dev/null) +aws s3 rm s3://atx-source-code-$ACCOUNT_ID --recursive 2>/dev/null +aws s3 rb s3://atx-source-code-$ACCOUNT_ID 2>/dev/null +aws s3 rm s3://atx-custom-output-$ACCOUNT_ID --recursive 2>/dev/null +aws s3 rb s3://atx-custom-output-$ACCOUNT_ID 2>/dev/null + +# Delete ECR repository +echo "13. Deleting ECR repository..." +aws ecr delete-repository --repository-name aws-transform-cli --force --region $REGION 2>/dev/null + +echo "" +echo "✓ Cleanup complete!" diff --git a/agentic-atx-platform/deployment/config.env.template b/agentic-atx-platform/deployment/config.env.template new file mode 100644 index 0000000..b96106b --- /dev/null +++ b/agentic-atx-platform/deployment/config.env.template @@ -0,0 +1,79 @@ +# ATX Transform Platform - Configuration +# Copy to config.env and customize as needed. All settings have defaults. + +# ============================================ +# AWS Configuration +# ============================================ +AWS_REGION=us-east-1 +AWS_ACCOUNT_ID= # Leave empty to auto-detect + +# ============================================ +# AI Model +# ============================================ +# Bedrock model for orchestrator and sub-agents +# Options: us.anthropic.claude-sonnet-4-20250514-v1:0 (default, recommended) +# us.anthropic.claude-sonnet-4-5-20250929-v1:0 (newer, more capable) +BEDROCK_MODEL_ID=us.anthropic.claude-sonnet-4-20250514-v1:0 + +# ============================================ +# Container (Step 1) +# ============================================ +ECR_REPO_NAME=atx-custom-ecr +ENABLE_ECR_SCANNING=true + +# ============================================ +# Infrastructure (Step 2) +# ============================================ + +# S3 Bucket Names (account ID appended automatically) +S3_BUCKET_NAME=atx-custom-output +SOURCE_BUCKET=atx-source-code + +# Batch Resources +COMPUTE_ENV_NAME=atx-fargate-compute +JOB_QUEUE_NAME=atx-job-queue +JOB_DEFINITION_NAME=atx-transform-job + +# Compute Resources +FARGATE_VCPU=2 +FARGATE_MEMORY=4096 + +# Job Settings +JOB_TIMEOUT=43200 # 12 hours +JOB_RETRY_ATTEMPTS=3 + +# ============================================ +# Network +# ============================================ +# Leave ALL empty to use the default VPC (recommended for getting started). +# The default VPC has public subnets with internet access out of the box. +# +# If your account has no default VPC, or you want to use a custom VPC: +# VPC_ID: Your VPC ID (e.g., vpc-0abc123def456) +# SUBNET_IDS: Comma-separated PUBLIC subnet IDs (must have internet access) +# Batch jobs need internet to clone repos, call AWS APIs, and run ATX CLI. +# Use public subnets with auto-assign public IP, or private subnets with NAT Gateway. +# SECURITY_GROUP_ID: Security group that allows: +# - Outbound HTTPS (port 443) to 0.0.0.0/0 (required for AWS APIs, ECR, Git) +# - Outbound HTTP (port 80) to 0.0.0.0/0 (required for some Git repos) +# - No inbound rules needed (Batch jobs don't receive traffic) +# +# Example for custom VPC: +# VPC_ID=vpc-0abc123def456 +# SUBNET_IDS=subnet-0aaa111,subnet-0bbb222 +# SECURITY_GROUP_ID=sg-0ccc333 +VPC_ID= +SUBNET_IDS= +SECURITY_GROUP_ID= + +# ============================================ +# API +# ============================================ +API_NAME=atx-ui-api-v2 +API_STAGE=prod + +# ============================================ +# CloudWatch +# ============================================ +LOG_GROUP=/aws/batch/atx-transform +LOG_RETENTION_DAYS=30 diff --git a/agentic-atx-platform/deployment/generate-custom-policy.sh b/agentic-atx-platform/deployment/generate-custom-policy.sh new file mode 100755 index 0000000..ae01ee6 --- /dev/null +++ b/agentic-atx-platform/deployment/generate-custom-policy.sh @@ -0,0 +1,347 @@ +#!/bin/bash +set -e + +# Generate custom IAM policy based on config.env values +# This creates the most restrictive policy possible using actual resource names + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CONFIG_FILE="$SCRIPT_DIR/config.env" +OUTPUT_FILE="$SCRIPT_DIR/iam-custom-policy.json" + +# Colors +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +log_info() { echo -e "${BLUE}ℹ${NC} $1"; } +log_success() { echo -e "${GREEN}✓${NC} $1"; } +log_warning() { echo -e "${YELLOW}⚠${NC} $1"; } +log_error() { echo -e "${RED}✗${NC} $1"; } + +echo "==========================================" +echo "Generate Custom IAM Policy" +echo "==========================================" +echo "" + +# Check for config file +if [ ! -f "$CONFIG_FILE" ]; then + log_error "Configuration file not found: $CONFIG_FILE" + echo "" + echo "Please create config.env from template:" + echo " cp config.env.template config.env" + echo " # Edit config.env with your settings" + echo " ./generate-custom-policy.sh" + exit 1 +fi + +# Load configuration +log_info "Loading configuration from $CONFIG_FILE" +source "$CONFIG_FILE" + +# Auto-detect AWS account ID if not set +if [ -z "$AWS_ACCOUNT_ID" ]; then + AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text 2>/dev/null) + if [ -z "$AWS_ACCOUNT_ID" ]; then + log_warning "Could not auto-detect AWS Account ID" + AWS_ACCOUNT_ID="REPLACE_WITH_ACCOUNT_ID" + else + log_info "Auto-detected AWS Account: $AWS_ACCOUNT_ID" + fi +fi + +# Set defaults from config or use standard defaults +AWS_REGION="${AWS_REGION:-us-east-1}" +ECR_REPO_NAME="${ECR_REPO_NAME:-aws-transform-cli}" +S3_BUCKET_NAME="${S3_BUCKET_NAME:-atx-custom-output}" +SOURCE_BUCKET="${SOURCE_BUCKET:-atx-source-code}" +COMPUTE_ENV_NAME="${COMPUTE_ENV_NAME:-atx-fargate-compute}" +JOB_QUEUE_NAME="${JOB_QUEUE_NAME:-atx-job-queue}" +JOB_DEFINITION_NAME="${JOB_DEFINITION_NAME:-atx-transform-job}" +API_NAME="${API_NAME:-atx-ui-api-v2}" +LOG_GROUP="${LOG_GROUP:-/aws/batch/atx-transform}" + +# Generate S3 bucket names with account ID +S3_OUTPUT_BUCKET="${S3_BUCKET_NAME}-${AWS_ACCOUNT_ID}" +S3_SOURCE_BUCKET="${SOURCE_BUCKET}-${AWS_ACCOUNT_ID}" + +log_info "Generating policy with these resources:" +echo " • ECR Repository: $ECR_REPO_NAME" +echo " • S3 Output Bucket: $S3_OUTPUT_BUCKET" +echo " • S3 Source Bucket: $S3_SOURCE_BUCKET" +echo " • Compute Environment: $COMPUTE_ENV_NAME" +echo " • Job Queue: $JOB_QUEUE_NAME" +echo " • Job Definition: $JOB_DEFINITION_NAME" +echo " • API Name: $API_NAME" +echo " • Log Group: $LOG_GROUP" +echo "" + +# Generate the custom policy +cat > "$OUTPUT_FILE" << EOF +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "ECRSpecificRepository", + "Effect": "Allow", + "Action": [ + "ecr:CreateRepository", + "ecr:DescribeRepositories", + "ecr:GetAuthorizationToken", + "ecr:BatchCheckLayerAvailability", + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + "ecr:InitiateLayerUpload", + "ecr:UploadLayerPart", + "ecr:CompleteLayerUpload", + "ecr:PutImage", + "ecr:PutImageScanningConfiguration" + ], + "Resource": [ + "arn:aws:ecr:$AWS_REGION:$AWS_ACCOUNT_ID:repository/$ECR_REPO_NAME" + ] + }, + { + "Sid": "ECRAuthToken", + "Effect": "Allow", + "Action": [ + "ecr:GetAuthorizationToken" + ], + "Resource": "*" + }, + { + "Sid": "S3SpecificBuckets", + "Effect": "Allow", + "Action": [ + "s3:CreateBucket", + "s3:ListBucket", + "s3:GetBucketLocation", + "s3:GetBucketVersioning", + "s3:PutBucketVersioning", + "s3:GetEncryptionConfiguration", + "s3:PutEncryptionConfiguration", + "s3:GetBucketLifecycleConfiguration", + "s3:PutBucketLifecycleConfiguration", + "s3:GetBucketPublicAccessBlock", + "s3:PutBucketPublicAccessBlock", + "s3:GetObject", + "s3:PutObject", + "s3:HeadObject", + "s3:DeleteObject" + ], + "Resource": [ + "arn:aws:s3:::$S3_OUTPUT_BUCKET", + "arn:aws:s3:::$S3_OUTPUT_BUCKET/*", + "arn:aws:s3:::$S3_SOURCE_BUCKET", + "arn:aws:s3:::$S3_SOURCE_BUCKET/*" + ] + }, + { + "Sid": "IAMSpecificRoles", + "Effect": "Allow", + "Action": [ + "iam:CreateRole", + "iam:GetRole", + "iam:AttachRolePolicy", + "iam:DetachRolePolicy", + "iam:PutRolePolicy", + "iam:GetRolePolicy", + "iam:DeleteRolePolicy", + "iam:ListAttachedRolePolicies", + "iam:ListRolePolicies", + "iam:PassRole" + ], + "Resource": [ + "arn:aws:iam::$AWS_ACCOUNT_ID:role/ATXBatchJobRole", + "arn:aws:iam::$AWS_ACCOUNT_ID:role/ATXBatchExecutionRole", + "arn:aws:iam::$AWS_ACCOUNT_ID:role/ATXApiLambdaRole", + "arn:aws:iam::$AWS_ACCOUNT_ID:role/ATXApiConsumerRole" + ] + }, + { + "Sid": "BatchDescribeAll", + "Effect": "Allow", + "Action": [ + "batch:DescribeComputeEnvironments", + "batch:DescribeJobQueues", + "batch:DescribeJobDefinitions", + "batch:DescribeJobs", + "batch:ListJobs" + ], + "Resource": "*" + }, + { + "Sid": "BatchSpecificResources", + "Effect": "Allow", + "Action": [ + "batch:CreateComputeEnvironment", + "batch:UpdateComputeEnvironment", + "batch:CreateJobQueue", + "batch:UpdateJobQueue", + "batch:RegisterJobDefinition", + "batch:SubmitJob", + "batch:TerminateJob" + ], + "Resource": [ + "arn:aws:batch:$AWS_REGION:$AWS_ACCOUNT_ID:compute-environment/$COMPUTE_ENV_NAME", + "arn:aws:batch:$AWS_REGION:$AWS_ACCOUNT_ID:job-queue/$JOB_QUEUE_NAME", + "arn:aws:batch:$AWS_REGION:$AWS_ACCOUNT_ID:job-definition/$JOB_DEFINITION_NAME", + "arn:aws:batch:$AWS_REGION:$AWS_ACCOUNT_ID:job-definition/$JOB_DEFINITION_NAME:*" + ] + }, + { + "Sid": "EC2NetworkOperations", + "Effect": "Allow", + "Action": [ + "ec2:DescribeVpcs", + "ec2:DescribeSubnets", + "ec2:DescribeSecurityGroups", + "ec2:CreateSecurityGroup", + "ec2:AuthorizeSecurityGroupEgress", + "ec2:AuthorizeSecurityGroupIngress", + "ec2:RevokeSecurityGroupEgress", + "ec2:RevokeSecurityGroupIngress" + ], + "Resource": "*" + }, + { + "Sid": "CloudWatchDescribeAll", + "Effect": "Allow", + "Action": [ + "logs:DescribeLogGroups" + ], + "Resource": "*" + }, + { + "Sid": "CloudWatchSpecificResources", + "Effect": "Allow", + "Action": [ + "logs:CreateLogGroup", + "logs:PutRetentionPolicy", + "logs:GetLogEvents", + "logs:FilterLogEvents" + ], + "Resource": [ + "arn:aws:logs:$AWS_REGION:$AWS_ACCOUNT_ID:log-group:$LOG_GROUP*", + "arn:aws:logs:$AWS_REGION:$AWS_ACCOUNT_ID:log-group:/aws/lambda/atx-*" + ] + }, + { + "Sid": "CloudWatchDashboardAll", + "Effect": "Allow", + "Action": [ + "cloudwatch:PutDashboard", + "cloudwatch:GetDashboard", + "cloudwatch:DeleteDashboard", + "cloudwatch:ListDashboards" + ], + "Resource": "*" + }, + { + "Sid": "LambdaSpecificFunctions", + "Effect": "Allow", + "Action": [ + "lambda:CreateFunction", + "lambda:GetFunction", + "lambda:UpdateFunctionCode", + "lambda:UpdateFunctionConfiguration", + "lambda:AddPermission", + "lambda:RemovePermission", + "lambda:InvokeFunction", + "lambda:ListFunctions" + ], + "Resource": [ + "arn:aws:lambda:$AWS_REGION:$AWS_ACCOUNT_ID:function:atx-async-invoke-agent" + ] + }, + { + "Sid": "APIGatewaySpecific", + "Effect": "Allow", + "Action": [ + "apigateway:GET", + "apigateway:POST", + "apigateway:PUT", + "apigateway:DELETE", + "apigateway:PATCH" + ], + "Resource": [ + "arn:aws:apigateway:$AWS_REGION::/restapis", + "arn:aws:apigateway:$AWS_REGION::/restapis/*" + ] + }, + { + "Sid": "ExecuteAPIPermissions", + "Effect": "Allow", + "Action": [ + "execute-api:Invoke" + ], + "Resource": [ + "arn:aws:execute-api:$AWS_REGION:$AWS_ACCOUNT_ID:*/prod/*/*" + ] + }, + { + "Sid": "RequiredReadOnlyPermissions", + "Effect": "Allow", + "Action": [ + "sts:GetCallerIdentity" + ], + "Resource": "*" + }, + { + "Sid": "ECSTaskDefinitionAccess", + "Effect": "Allow", + "Action": [ + "ecs:DescribeTaskDefinition", + "ecs:RegisterTaskDefinition" + ], + "Resource": "*" + } + ] +} +EOF + +log_success "Custom IAM policy generated: $OUTPUT_FILE" +echo "" + +# Display summary +echo "==========================================" +echo "Policy Summary" +echo "==========================================" +echo "" +echo "This policy is customized for your specific configuration:" +echo "" +echo "🔒 Restricted Resources:" +echo " • ECR: Only $ECR_REPO_NAME repository" +echo " • S3: Only $S3_OUTPUT_BUCKET and $S3_SOURCE_BUCKET buckets (with correct encryption permissions)" +echo " • Batch: Only $COMPUTE_ENV_NAME, $JOB_QUEUE_NAME, $JOB_DEFINITION_NAME" +echo " • Lambda: Only atx-* functions" +echo " • API Gateway: Only $API_NAME API (management + execution)" +echo " • Execute API: Can invoke deployed API endpoints" +echo " • CloudWatch: Only $LOG_GROUP log group" +echo " • IAM: Only ATX* roles" +echo "" +echo "🔄 Policy Updates Applied:" +echo " • Fixed S3 encryption permissions (s3:PutEncryptionConfiguration)" +echo " • Added execute-api:Invoke for API endpoint access" +echo " • Supports both deployment and API usage" +echo "" +echo "📋 Usage:" +echo " aws iam create-policy \\" +echo " --policy-name ATXCustomDeploymentPolicy \\" +echo " --policy-document file://$OUTPUT_FILE" +echo "" +echo " aws iam attach-user-policy \\" +echo " --user-name YOUR_USERNAME \\" +echo " --policy-arn arn:aws:iam::$AWS_ACCOUNT_ID:policy/ATXCustomDeploymentPolicy" +echo "" + +if [ "$AWS_ACCOUNT_ID" = "REPLACE_WITH_ACCOUNT_ID" ]; then + log_warning "Account ID could not be detected" + echo "Please replace 'REPLACE_WITH_ACCOUNT_ID' in $OUTPUT_FILE with your actual AWS account ID" + echo "" +fi + +echo "🔄 To regenerate after config changes:" +echo " ./generate-custom-policy.sh" +echo "" \ No newline at end of file diff --git a/agentic-atx-platform/docs/SECURITY.md b/agentic-atx-platform/docs/SECURITY.md new file mode 100644 index 0000000..2d65f2a --- /dev/null +++ b/agentic-atx-platform/docs/SECURITY.md @@ -0,0 +1,341 @@ +# Security Best Practices + +Security considerations and best practices for the AWS Transform CLI container. + +## Container Security + +### Image Security + +✅ **Implemented:** +- Non-root user (UID 1000) +- Minimal base image (Amazon Linux 2023) +- Official AWS base image from public ECR +- Regular security updates via dnf package manager +- Health checks +- Checksum verification for downloaded binaries (Maven, Gradle) +- Comprehensive .dockerignore + +⚠️ **Recommendations:** +- Scan images regularly with `docker scan` or AWS ECR scanning +- Update base image regularly for security patches (Amazon Linux 2023 updates via dnf) +- Review and update language versions quarterly + +### Runtime Security + +✅ **Implemented:** +- IAM role-based authentication (no long-lived credentials) +- Automatic credential refresh (every 45 minutes) +- Encrypted S3 storage (AES256) +- CloudWatch logging with 30-day retention +- HTTPS-only egress (port 443) +- Public subnet deployment with assignPublicIp + +⚠️ **Recommendations:** +- Enable VPC Flow Logs for network monitoring +- Implement least-privilege IAM policies +- Enable S3 bucket versioning and MFA delete +- Use KMS for S3 encryption (instead of AES256) + +## AWS Batch Security + +### Job Definition Security + +✅ **Implemented:** +- Job timeout (12 hours default, configurable) +- Retry strategy (3 attempts with exponential backoff) +- Resource limits (2 vCPU, 4GB RAM default, configurable) +- Separate job role (ATXBatchJobRole) and execution role (ATXBatchExecutionRole) +- Fargate compute environment + +⚠️ **Recommendations:** +- Adjust timeout based on workload size +- Monitor job duration and set CloudWatch alarms +- Review IAM role permissions quarterly + +### Network Security + +✅ **Implemented:** +- Public subnets with assignPublicIp=ENABLED +- Security group with HTTPS-only egress (port 443) +- Auto-detected VPC and subnets + +⚠️ **Recommendations:** +- Restrict security group outbound to specific AWS service endpoints +- Enable VPC Flow Logs for audit +- Monitor network traffic patterns with CloudWatch + +## REST API Security + +### IAM Authentication + +✅ **Implemented:** +- AWS IAM authentication (AWS Signature V4) +- No API keys or shared secrets +- IAM user/role permissions with `execute-api:Invoke` +- Full CloudTrail audit trail + +⚠️ **Recommendations:** +- Grant users `execute-api:Invoke` permission on the API +- Use temporary credentials (AWS SSO or STS) +- Monitor API access via CloudTrail +- Set up CloudWatch Alarms for unusual activity + +**Grant API access:** +```bash +aws iam put-user-policy \ + --user-name YOUR_USERNAME \ + --policy-name InvokeATXApi \ + --policy-document '{ + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Action": "execute-api:Invoke", + "Resource": "arn:aws:execute-api:*:*:*/prod/*" + }] + }' +``` + +**See:** The orchestrator handles API authentication via AgentCore IAM roles. + +## Secrets Management + +### Private Repository Access + +⚠️ **Critical:** Never hardcode credentials in Dockerfile or scripts + +**Recommended Approach:** +1. Store secrets in AWS Secrets Manager +2. Grant IAM job role `secretsmanager:GetSecretValue` permission +3. Retrieve secrets at runtime in entrypoint.sh + +**Example:** +```bash +# In entrypoint.sh +GITHUB_TOKEN=$(aws secretsmanager get-secret-value \ + --secret-id atx/github-token \ + --query SecretString \ + --output text | jq -r .token) +``` + +### AWS Credentials + +✅ **Implemented:** IAM role-based authentication (preferred) + +**Priority:** +1. ✅ IAM role (ECS task role, Batch job role) - **RECOMMENDED** +2. ⚠️ Environment variables (temporary credentials only) +3. ❌ Never use long-lived access keys + +## S3 Security + +### Bucket Configuration + +✅ **Implemented:** +- S3 encryption at rest (AES256) +- Block public access enabled +- Versioning enabled on output bucket +- Server access logging +- Enforce SSL (deny non-HTTPS requests) + +⚠️ **Recommendations:** +- Use KMS encryption instead of AES256 for compliance requirements +- Enable MFA delete for production buckets +- Implement S3 lifecycle policies to archive old results +- Enable S3 Object Lock for immutable results + +**Bucket policies:** +```bash +# Verify bucket encryption +aws s3api get-bucket-encryption --bucket atx-custom-output-{account} + +# Verify public access block +aws s3api get-public-access-block --bucket atx-custom-output-{account} +``` + +### Data Protection + +✅ **Implemented:** +- Sensitive file exclusions (.git, .env, credentials, private keys) +- Encryption in transit (HTTPS) +- 7-day lifecycle for source code uploads + +⚠️ **Recommendations:** +- Implement S3 Intelligent-Tiering for cost optimization +- Enable S3 Inventory for compliance reporting +- Use S3 Access Points for fine-grained access control + +## Input Validation + +✅ **Implemented:** +- Argument parsing with validation +- Path traversal prevention + +⚠️ **Note:** `eval` is used in entrypoint.sh for command execution. This is acceptable since commands come from trusted AWS Batch job definitions, not user input. + +## Monitoring & Auditing + +### CloudWatch Logging + +✅ **Implemented:** +- Structured logging with ISO 8601 timestamps +- 30-day log retention +- Real-time log streaming + +⚠️ **Recommendations:** +- Set up CloudWatch Alarms for: + - Job failures + - Long-running jobs (> 6 hours) + - High error rates +- Export logs to S3 for long-term retention +- Use CloudWatch Insights for analysis + +### AWS CloudTrail + +⚠️ **Recommendations:** +- Enable CloudTrail for API audit logging +- Enable S3 access logging for result buckets +- Monitor for: + - Unauthorized API calls + - IAM policy changes + - S3 bucket policy changes + +## Compliance + +### Data Protection + +✅ **Implemented:** +- S3 encryption at rest (AES256) +- Encryption in transit (HTTPS) +- Sensitive file exclusions from S3 uploads + +⚠️ **Recommendations:** +- Use KMS for S3 encryption (instead of AES256) +- Enable S3 bucket versioning +- Implement S3 lifecycle policies +- Enable MFA delete for production buckets + +### Access Control + +✅ **Implemented:** +- IAM role-based access +- Least-privilege IAM policies +- Non-root container user + +⚠️ **Recommendations:** +- Implement IAM permission boundaries +- Use AWS Organizations SCPs for guardrails +- Regular IAM access reviews +- Enable AWS Config for compliance monitoring + +## Vulnerability Management + +### Container Scanning + +⚠️ **Critical:** Enable ECR image scanning + +```bash +# Enable scanning on push +aws ecr put-image-scanning-configuration \ + --repository-name aws-transform-cli \ + --image-scanning-configuration scanOnPush=true + +# Scan existing images +aws ecr start-image-scan \ + --repository-name aws-transform-cli \ + --image-id imageTag=latest +``` + +### Dependency Management + +⚠️ **Recommendations:** +- Update language versions quarterly +- Monitor security advisories for: + - Java (OpenJDK) + - Python + - Node.js + - npm packages (@aws/atx-cli) +- Use Dependabot or Renovate for automated updates + +## Incident Response + +### Logging + +✅ **Implemented:** +- CloudWatch Logs with structured logging +- IAM role ARN logging +- Command execution logging + +⚠️ **Recommendations:** +- Define incident response procedures +- Set up CloudWatch Alarms for anomalies +- Implement automated remediation with Lambda + +### Backup & Recovery + +⚠️ **Recommendations:** +- Enable S3 versioning for results +- Implement S3 cross-region replication +- Regular backup testing +- Document recovery procedures + +## Security Checklist + +### Before Deployment + +- [ ] Review and customize IAM policies +- [ ] Configure VPC and subnets +- [ ] Set up security groups +- [ ] Enable ECR image scanning +- [ ] Configure CloudWatch Alarms +- [ ] Enable CloudTrail +- [ ] Set up S3 bucket policies +- [ ] Configure secrets in Secrets Manager + +### After Deployment + +- [ ] Test IAM role permissions +- [ ] Verify network connectivity +- [ ] Test job submission and execution +- [ ] Verify CloudWatch logging +- [ ] Test credential refresh +- [ ] Review CloudTrail logs +- [ ] Scan container images +- [ ] Document configuration + +### Ongoing + +- [ ] Monthly: Update base image and rebuild container +- [ ] Quarterly: Update language versions +- [ ] Quarterly: Review IAM policies +- [ ] Quarterly: Review CloudWatch Alarms +- [ ] Annually: Security audit +- [ ] Annually: Penetration testing + +## Known Limitations + +1. **eval in entrypoint.sh:** Used for command execution. Acceptable since commands come from trusted AWS Batch job definitions. + +2. **Writable filesystem:** Required by ATX CLI for transformations. Cannot use read-only root filesystem. + +3. **Version switching:** Helper scripts use symlinks in user directory (no root required). + +4. **Network access:** AWS Transform Custom service requires internet access (no VPC endpoint available). + +## Reporting Security Issues + +If you discover a security vulnerability: + +1. **Do not** create a public GitHub issue +2. Email security contact (configure this) +3. Include: + - Description of vulnerability + - Steps to reproduce + - Potential impact + - Suggested fix (if any) + +## References + +- [AWS Batch Security Best Practices](https://docs.aws.amazon.com/batch/latest/userguide/security.html) +- [Container Security Best Practices](https://docs.aws.amazon.com/AmazonECS/latest/bestpracticesguide/security.html) +- [AWS Well-Architected Framework - Security Pillar](https://docs.aws.amazon.com/wellarchitected/latest/security-pillar/welcome.html) +- [CIS Docker Benchmark](https://www.cisecurity.org/benchmark/docker) diff --git a/agentic-atx-platform/docs/TROUBLESHOOTING.md b/agentic-atx-platform/docs/TROUBLESHOOTING.md new file mode 100644 index 0000000..6c6db8f --- /dev/null +++ b/agentic-atx-platform/docs/TROUBLESHOOTING.md @@ -0,0 +1,457 @@ +# Troubleshooting Guide + +Common issues customers may encounter and how to resolve them. + +--- + +## Deployment Issues + +### Deployment Script Fails + +**Symptom:** Deployment script exits with errors + +**Common Causes:** +1. **Missing AWS credentials** + ```bash + # Verify credentials + aws sts get-caller-identity + ``` + +2. **Insufficient IAM permissions** + - Need permissions for: ECR, S3, IAM, Batch, EC2, CloudWatch Logs + - Check error message for specific missing permission + + **Solution:** Generate least-privilege policy: + ```bash + cd deployment + ./generate-custom-policy.sh + # Follow instructions to create and attach the policy + ``` + +3. **VPC/Subnet not found** + ```bash + # List available VPCs + aws ec2 describe-vpcs --query 'Vpcs[*].[VpcId,IsDefault]' --output table + + # List public subnets + aws ec2 describe-subnets --filters "Name=vpc-id,Values=vpc-xxx" --query 'Subnets[*].[SubnetId,AvailabilityZone,MapPublicIpOnLaunch]' --output table + ``` + +**Solution:** Ensure AWS CLI is configured and you have necessary permissions. + +--- + +### Docker Login to ECR Fails + +**Symptom:** `docker login` fails or Docker can't pull base images (403 Forbidden) + +**Solution:** +```bash +# Login to ECR Public (required before building container images) +aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws + +# If still failing, pull the base image manually first +docker pull public.ecr.aws/amazonlinux/amazonlinux:2023 + +# Verify Docker is running +docker info +``` + +### API Gateway CloudWatch Logs Role Missing + +**Symptom:** CDK deployment fails with "CloudWatch Logs role ARN must be set in account settings" + +**Cause:** First-time API Gateway deployment in this account/region + +**Solution (one-time per account/region):** +```bash +# Create role +aws iam create-role \ + --role-name APIGatewayCloudWatchLogsRole \ + --assume-role-policy-document '{ + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Principal": {"Service": "apigateway.amazonaws.com"}, + "Action": "sts:AssumeRole" + }] + }' + +# Attach policy +aws iam attach-role-policy \ + --role-name APIGatewayCloudWatchLogsRole \ + --policy-arn arn:aws:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs + +# Set in API Gateway +aws apigateway update-account \ + --patch-operations op=replace,path=/cloudwatchRoleArn,value=arn:aws:iam::{account-id}:role/APIGatewayCloudWatchLogsRole \ + --region us-east-1 +``` + +Then redeploy. + +### cdk-nag Validation Errors + +**Symptom:** CDK deployment fails with "AwsSolutions-IAM5" or similar errors + +**Cause:** cdk-nag security validation is enabled + +**Solution:** + +The cdk-nag suppressions are already configured in the code. If you see errors for your account: +1. Check that `cdk/lib/agentcore-stack.ts` uses dynamic `${this.account}` (not hardcoded) +2. Redeploy after fixing + +Or disable cdk-nag temporarily in `cdk/bin/cdk.ts`: +```typescript +// Comment out this line: +// Aspects.of(app).add(new AwsSolutionsChecks({ verbose: true })); +``` + +--- + +## Job Execution Issues + +### Job Stays in RUNNABLE Status + +**Symptom:** Job never starts, stuck in RUNNABLE + +**Causes:** +- Compute environment is disabled or has no capacity +- Image pull fails (ECR permissions) + +**Solution:** +```bash +# Check compute environment +aws batch describe-compute-environments --compute-environments atx-fargate-compute + +# Check job queue +aws batch describe-job-queues --job-queues atx-job-queue + +# View job details +aws batch describe-jobs --jobs +``` + +### Job Fails Immediately (FAILED status) + +**Symptom:** Job goes from SUBMITTED → FAILED quickly + +**Common Causes:** + +1. **Invalid Git URL** + ```bash + # Test Git URL manually + git ls-remote https://github.com/user/repo.git + ``` + +2. **Invalid command syntax** + - Check quotes in command + - Verify transformation name exists: `atx custom def list` + +3. **Network connectivity issues** + - Verify security group allows HTTPS outbound (port 443) + - Check subnet has internet access + +**Solution:** Check CloudWatch logs for specific error: +```bash +# Get log stream +JOB_ID="your-job-id" +LOG_STREAM=$(aws batch describe-jobs --jobs $JOB_ID --query 'jobs[0].container.logStreamName' --output text) + +# View logs +aws logs get-log-events \ + --log-group-name /aws/batch/atx-transform \ + --log-stream-name $LOG_STREAM \ + --query 'events[*].message' \ + --output text +``` + +### Job Times Out + +**Symptom:** Job runs for hours then fails with timeout + +**Causes:** +- Transformation is too complex for allocated resources +- Repository is very large + +**Solution:** +```bash +# Increase timeout and resources +curl -X POST "$API_ENDPOINT/jobs" \ + -H "Content-Type: application/json" \ + -d '{ + "source": "https://github.com/user/repo.git", + "command": "atx custom def exec ...", + "vcpu": "4", + "memory": "8192", + "timeout": 86400 + }' +``` + +### Transformation Compiles Language from Source (Very Slow) + +**Symptom:** Job takes 1-2 hours, logs show "Compiling Python/Java/Node.js" + +**Cause:** ATX doesn't know the language is pre-installed + +**Solution:** Specify installation path in `additionalPlanContext`: + +**Python:** +```bash +"additionalPlanContext": "Target Python 3.13. Python 3.13 is already installed at /usr/bin/python3.13" +``` + +**Java:** +```bash +"additionalPlanContext": "Target Java 21. Java 21 is already installed at /usr/lib/jvm/java-21-openjdk-amd64" +``` + +**Node.js:** +```bash +"additionalPlanContext": "Target Node.js 22. Node.js 22 is already installed at /home/atxuser/.nvm/versions/node/v22.12.0/bin/node" +``` + +See the container README for complete examples. + +--- + +## API Issues + +### UI API Calls Return HTML Instead of JSON + +**Symptom:** All API calls fail. Browser network tab shows the response is HTML (`...`) instead of JSON. CSV batch shows "0 jobs submitted, N failed." + +**Cause:** The UI was built without `VITE_API_ENDPOINT` set. Vite bakes environment variables at build time, so the app defaults to `/api` (relative path). On CloudFront, `/api/orchestrate` resolves back to CloudFront itself, returning `index.html`. + +**Solution:** Rebuild the UI with the correct API endpoint: +```bash +API_URL=$(aws cloudformation describe-stacks --stack-name AtxAgentCoreStack \ + --query 'Stacks[0].Outputs[?OutputKey==`ApiEndpoint`].OutputValue' --output text) + +cd ui +VITE_API_ENDPOINT=$API_URL npx vite build +./deploy-aws.sh +``` + +> This is the most common issue after Option B deployment. The API endpoint isn't known until the AgentCore stack deploys, so the UI must be rebuilt afterward. + +### Bedrock Model Marked as Legacy + +**Symptom:** API returns `ResourceNotFoundException: Access denied. This Model is marked by provider as Legacy and you have not been actively using the model in the last 15 days.` + +**Cause:** The configured Bedrock model has been deprecated or marked legacy. + +**Solution:** +1. Update `BEDROCK_MODEL_ID` in `deployment/config.env` to an active model: + ```bash + # Check available models + aws bedrock list-foundation-models --query "modelSummaries[?contains(modelId,'claude')].{id:modelId,status:modelLifecycle.status}" --output table + + # Recommended: Claude Sonnet 4 + BEDROCK_MODEL_ID=us.anthropic.claude-sonnet-4-20250514-v1:0 + ``` +2. Redeploy the AgentCore stack (Option B) or update the AgentCore runtime (Option A) + +### API Returns 403 Forbidden + +**Symptom:** All API calls return 403 + +**Cause:** Missing IAM permissions for API Gateway + +**Solution:** + +Grant your IAM user permission to invoke the API: +```bash +AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) + +aws iam put-user-policy \ + --user-name YOUR_USERNAME \ + --policy-name InvokeATXApi \ + --policy-document "{ + \"Version\": \"2012-10-17\", + \"Statement\": [{ + \"Effect\": \"Allow\", + \"Action\": \"execute-api:Invoke\", + \"Resource\": \"arn:aws:execute-api:us-east-1:${AWS_ACCOUNT_ID}:*/prod/*\" + }] + }" +``` + +Then use the UI or AgentCore CLI to interact with the platform. + +### Cannot Find Conversation ID + +**Symptom:** Job succeeds but `atxConversationId` is null + +**Causes:** +- Job completed but results weren't uploaded to S3 +- Check CloudWatch logs for upload errors + +**Solution:** +```bash +# Check S3 bucket for results +aws s3 ls s3://atx-custom-output-{account}/transformations/ --recursive + +# Check CloudWatch logs for conversation ID +aws logs filter-log-events \ + --log-group-name /aws/batch/atx-transform \ + --filter-pattern "Conversation ID" \ + --query 'events[*].message' +``` + +--- + +## Results Issues + +### No Results in S3 + +**Symptom:** Job completes but logs show "Command failed after 3 attempts" when uploading to S3 + +**Cause:** IAM role lacks permissions to write to S3 bucket + +**Solution:** +```bash +# Update the IAM role policy to include S3 permissions +# See deployment/README.md for IAM policy configuration +``` + +This ensures ATXBatchJobRole has correct permissions for: +- `atx-custom-output-{ACCOUNT_ID}` (read/write) +- `atx-source-code-{ACCOUNT_ID}` (read) + +**Verify permissions:** +```bash +aws iam get-role-policy --role-name ATXBatchJobRole --policy-name S3BucketAccess +``` + +--- + +## Debugging Commands + +### View Real-Time Logs + +**Using AWS CLI:** +```bash +# Tail all jobs +aws logs tail /aws/batch/atx-transform --follow --region us-east-1 + +# Tail specific job +aws logs tail /aws/batch/atx-transform \ + --log-stream-names "atx/default/{stream-name}" \ + --follow +``` + +### Check Job Status + +```bash +# Via AWS CLI +aws batch describe-jobs --jobs {job-id} --region us-east-1 + +# Via AgentCore CLI +agentcore invoke '{"prompt": "Check status of job {job-id}"}' +``` + +### List Recent Jobs + +```bash +aws batch list-jobs \ + --job-queue atx-job-queue \ + --job-status SUCCEEDED \ + --max-results 10 +``` + +### Check IAM Permissions + +```bash +# Verify role exists +aws iam get-role --role-name ATXBatchJobRole + +# List attached policies +aws iam list-attached-role-policies --role-name ATXBatchJobRole + +# View inline policies +aws iam list-role-policies --role-name ATXBatchJobRole +aws iam get-role-policy --role-name ATXBatchJobRole --policy-name {policy-name} +``` + +### Verify Network Configuration + +```bash +# Check security group +aws ec2 describe-security-groups --group-ids {sg-id} + +# Check subnet internet access +aws ec2 describe-subnets --subnet-ids {subnet-id} \ + --query 'Subnets[*].[SubnetId,MapPublicIpOnLaunch]' + +# Check route table +aws ec2 describe-route-tables \ + --filters "Name=association.subnet-id,Values={subnet-id}" +``` + +--- + +## Common Error Messages + +### "Essential container in task exited" + +**Meaning:** Container crashed or exited with error + +**Action:** Check CloudWatch logs for actual error message + +### "CannotPullContainerError" + +**Meaning:** Cannot pull Docker image from ECR + +**Causes:** +- Image doesn't exist +- Network cannot reach ECR +- Missing ECR permissions + +**Solution:** +```bash +# Verify image exists +aws ecr describe-images \ + --repository-name aws-transform-cli \ + --region us-east-1 + +# Check execution role has ECR permissions +aws iam get-role --role-name ATXBatchExecutionRole +``` + +### "ResourceInitializationError" + +**Meaning:** Cannot initialize task resources + +**Causes:** +- Network configuration issues +- IAM permission issues + +**Solution:** +- Verify subnet has internet access +- Check security group allows outbound HTTPS +- Verify execution role permissions + +--- + +## Getting Help + +If you're still stuck: + +1. **Check CloudWatch Logs** - Most issues are explained in logs +2. **Review Configuration** - Verify all resources are created correctly +3. **Test Incrementally** - Start with simple commands like `atx custom def list` +4. **Check AWS Service Health** - Verify AWS Batch/ECR/S3 are operational in your region + +**Useful Log Patterns:** +```bash +# Search for errors +aws logs filter-log-events \ + --log-group-name /aws/batch/atx-transform \ + --filter-pattern "ERROR" + +# Search for specific job +aws logs filter-log-events \ + --log-group-name /aws/batch/atx-transform \ + --filter-pattern "{job-name}" +``` diff --git a/agentic-atx-platform/orchestrator/Dockerfile b/agentic-atx-platform/orchestrator/Dockerfile new file mode 100644 index 0000000..5e766cf --- /dev/null +++ b/agentic-atx-platform/orchestrator/Dockerfile @@ -0,0 +1,15 @@ +FROM public.ecr.aws/docker/library/python:3.11-slim + +WORKDIR /app +COPY . . + +RUN pip install --no-cache-dir -r requirements.txt + +RUN useradd -m -u 1000 bedrock_agentcore +USER bedrock_agentcore + +ENV AWS_REGION=us-east-1 +ENV BEDROCK_MODEL_ID=us.anthropic.claude-sonnet-4-20250514-v1:0 +EXPOSE 8080 + +CMD ["opentelemetry-instrument", "python", "agent.py"] diff --git a/agentic-atx-platform/orchestrator/README.md b/agentic-atx-platform/orchestrator/README.md new file mode 100644 index 0000000..33cd91a --- /dev/null +++ b/agentic-atx-platform/orchestrator/README.md @@ -0,0 +1,53 @@ +# ATX Transform Orchestrator + +Bedrock AgentCore agent that coordinates code transformations using AWS Transform CLI. + +## Architecture + +The orchestrator is a Strands Agent with 3 specialized sub-agents: + +``` +Orchestrator (agent.py) +├── find_transform_agent → Search catalog + custom transforms +├── execute_transform_agent → Submit Batch jobs, check status, list results +└── create_transform_agent → Generate definitions, publish to ATX registry +``` + +Each sub-agent is itself a Strands Agent with its own system prompt and tools that call AWS services directly (Batch, S3, Bedrock). + +## Files + +| File | Purpose | +|------|---------| +| `agent.py` | Main orchestrator with system prompt and entrypoint | +| `tools/findtransform.py` | Catalog search (static + S3 custom) | +| `tools/executetransform.py` | Batch submit, status, results | +| `tools/createtransform.py` | Generate definition (Bedrock), publish (Batch) | +| `tools/memory_client.py` | AgentCore Memory client | +| `tools/memory_hooks.py` | Short-term memory hooks | +| `requirements.txt` | Python dependencies | + +## Deploy + +```bash +python3.11 -m venv .venv && source .venv/bin/activate +pip install bedrock-agentcore strands-agents boto3 pyyaml bedrock-agentcore-starter-toolkit + +agentcore configure -e agent.py -n atx_transform_orchestrator -r us-east-1 -ni \ + --deployment-type direct_code_deploy --runtime PYTHON_3_11 -rf requirements.txt +agentcore deploy --auto-update-on-conflict +``` + +## Test + +```bash +agentcore invoke '{"prompt": "List Python transformations"}' +agentcore invoke '{"prompt": "Execute AWS/python-version-upgrade on https://github.com/user/repo"}' +``` + +## Local Development + +```bash +source .venv/bin/activate +python3.11 agent.py # Runs on port 8080 +``` diff --git a/agentic-atx-platform/orchestrator/agent.py b/agentic-atx-platform/orchestrator/agent.py new file mode 100644 index 0000000..752b6d9 --- /dev/null +++ b/agentic-atx-platform/orchestrator/agent.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 +""" +ATX Transform Orchestrator Agent + +A Strands agent that orchestrates code transformation using ATX CLI. +Coordinates three specialized sub-agents: +1. FindTransform Agent: Discovers existing transformations +2. ExecuteTransform Agent: Executes transformations and monitors jobs +3. CreateTransform Agent: Creates custom transformation definitions +""" + +import os +import json +import logging +from datetime import datetime + +# Monkey-patch Strands streaming to fix type concatenation bug +# (upstream issue: streaming.py line 216 does str += int when tool input has integer values) +try: + from strands.event_loop import streaming as _streaming + _original_handle = _streaming.handle_content_block_delta + def _patched_handle(content_block_delta, state): + if "toolUse" in content_block_delta.get("delta", {}): + delta = content_block_delta["delta"]["toolUse"] + if "input" in delta and isinstance(delta["input"], (int, float)): + delta["input"] = str(delta["input"]) + return _original_handle(content_block_delta, state) + _streaming.handle_content_block_delta = _patched_handle +except Exception: + pass # If patch fails, continue without it + +from strands import Agent +from strands.models import BedrockModel +from bedrock_agentcore.runtime import BedrockAgentCoreApp + +# Lazy imports for tools (loaded on first request, not at startup) +find_transform_agent = None +execute_transform_agent = None +create_transform_agent = None + +def _load_tools(): + global find_transform_agent, execute_transform_agent, create_transform_agent + if find_transform_agent is None: + from tools.findtransform import find_transform_agent as _find + from tools.executetransform import execute_transform_agent as _execute + from tools.createtransform import create_transform_agent as _create + find_transform_agent = _find + execute_transform_agent = _execute + create_transform_agent = _create +from tools.memory_hooks import ShortTermMemoryHook + +# Initialize the App +app = BedrockAgentCoreApp() + +# Lazy-initialize memory (don't block startup) +memory_client = None +memory_id = None + +def _init_memory(): + global memory_client, memory_id + if memory_client is None: + try: + from tools.memory_client import get_memory_client, initialize_memory as _init_mem + memory_client = get_memory_client() + memory_id = _init_mem() + except Exception as e: + logger.warning(f"Memory init failed (continuing without memory): {e}") + memory_id = None + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +ORCHESTRATOR_PROMPT = """You are the ATX Transform Orchestrator, responsible for coordinating code transformations. + +# Available Tools + +1. **find_transform_agent**: Finds the best matching transformation from the available catalog +2. **execute_transform_agent**: Executes transformations, checks job status, and retrieves job results +3. **create_transform_agent**: Creates and publishes custom transformation definitions to the ATX registry + +# Available Transformations (ONLY these exist) +- AWS/python-version-upgrade: Upgrade Python 3.8 → 3.13 +- AWS/java-version-upgrade: Upgrade Java 8 → 21 +- AWS/nodejs-version-upgrade: Upgrade Node.js 16 → 22 +- AWS/python-boto2-to-boto3: Migrate boto2 → boto3 AWS SDK +- AWS/java-aws-sdk-v1-to-v2: Migrate Java AWS SDK v1 → v2 +- AWS/nodejs-aws-sdk-v2-to-v3: Migrate Node.js AWS SDK v2 → v3 +- AWS/early-access-comprehensive-codebase-analysis: Analyze codebase +- AWS/early-access-java-x86-to-graviton: Java x86 to ARM64/Graviton +- AWS/early-access-angular-to-react-migration: Angular to React +- AWS/early-access-jfr-performance-optimization: Java JFR performance optimization + +IMPORTANT: You can execute any transformation from the AWS-managed list above, plus any custom transformations published to the registry. Before executing a non-AWS transformation, use find_transform_agent to verify it exists. Custom transformation names do NOT start with "AWS/". + +# Orchestration Protocol + +Follow this sequence when handling transformation requests: + +1. **If a specific transformation is provided** → Go directly to step 4 (execute) +2. **If no transformation is specified** → Use find_transform_agent to search for the best match +3. **If find_transform_agent does NOT find a suitable transformation** → Use create_transform_agent to generate and publish a new custom transformation based on the requirements +4. **Once you have the transformation name** → Use execute_transform_agent to execute it on the repository + +This find → create → execute chain ensures every request gets handled, even if no existing transformation matches. + +# How to Handle Different Requests + +**"Execute transformation X on repo Y"** → Skip to step 4, use execute_transform_agent with the EXACT transformation name (e.g., "AWS/python-version-upgrade" for AWS-managed, or "add-error-handling" for custom - do NOT add "AWS/" prefix to custom transforms) +**"Transform repo Y" (no transformation specified)** → Follow steps 2-4: find best match, create if needed, then execute +**"Check status of job "** → Use execute_transform_agent and ask it to check the job status +**"Show results for job "** → Use execute_transform_agent and ask it to list the job results +**"What transformations are available?"** → Use find_transform_agent +**"Find a transformation for "** → Use find_transform_agent +**"Create a custom transformation that..."** → Use create_transform_agent +**"Publish transformation "** → Use create_transform_agent + +# Response Format +Always provide clear status, details, and next steps. Do NOT ask follow-up questions like "Would you like me to..." or "Is there anything else..." - this is a one-shot API, not a chatbot. Just report what was done and the results.""" + + +def create_orchestrator(session_id: str = None, actor_id: str = None) -> Agent: + """Create the ATX Transform orchestrator agent.""" + _init_memory() + _load_tools() + + region = os.getenv("AWS_REGION", "us-east-1") + model_id = os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-sonnet-4-20250514-v1:0") + + bedrock_model = BedrockModel( + model_id=model_id, + region_name=region, + temperature=0.5, + max_tokens=4096 + ) + + hooks = [] + if memory_id: + from tools.memory_hooks import ShortTermMemoryHook + hooks.append(ShortTermMemoryHook(memory_client, memory_id)) + + orchestrator = Agent( + model=bedrock_model, + system_prompt=ORCHESTRATOR_PROMPT, + tools=[find_transform_agent, execute_transform_agent, create_transform_agent], + hooks=hooks, + state={"actor_id": actor_id, "session_id": session_id} + ) + + return orchestrator + + +@app.entrypoint +def invoke(payload): + """Bedrock AgentCore entrypoint.""" + try: + user_message = payload.get("prompt", payload.get("message", "")) + + timestamp = datetime.now().strftime("%Y%m%d%H%M%S") + session_id = f"atx-transform-{timestamp}" + + orchestrator = create_orchestrator( + session_id=session_id, + actor_id="atx_user" + ) + + logger.info("Starting ATX Transform orchestration") + response = orchestrator(user_message) + logger.info("Orchestration completed") + + if hasattr(response, 'message'): + response_content = response.message + elif hasattr(response, 'content'): + response_content = response.content + else: + response_content = str(response) + + return {"result": response_content} + + except Exception as e: + logger.error(f"Orchestration failed: {e}", exc_info=True) + return { + "statusCode": 500, + "body": json.dumps({"error": str(e)}) + } + + +if __name__ == "__main__": + print("Starting ATX Transform Orchestrator...") + print("Server will be available at http://localhost:8080") + app.run() diff --git a/agentic-atx-platform/orchestrator/requirements.txt b/agentic-atx-platform/orchestrator/requirements.txt new file mode 100644 index 0000000..fd1520c --- /dev/null +++ b/agentic-atx-platform/orchestrator/requirements.txt @@ -0,0 +1,5 @@ +bedrock-agentcore +strands-agents>=0.3.0 +boto3>=1.34.0 +pyyaml>=6.0 +aws_opentelemetry_distro_genai_beta>=0.1.2 diff --git a/agentic-atx-platform/orchestrator/tools/__init__.py b/agentic-atx-platform/orchestrator/tools/__init__.py new file mode 100644 index 0000000..44029ef --- /dev/null +++ b/agentic-atx-platform/orchestrator/tools/__init__.py @@ -0,0 +1 @@ +# tools package diff --git a/agentic-atx-platform/orchestrator/tools/createtransform.py b/agentic-atx-platform/orchestrator/tools/createtransform.py new file mode 100644 index 0000000..42b528c --- /dev/null +++ b/agentic-atx-platform/orchestrator/tools/createtransform.py @@ -0,0 +1,493 @@ +""" +CreateTransform Sub-Agent + +Creates custom transformation definitions by: +1. Cloning repo to S3 via Batch job +2. AI-driven file selection: list files → pick relevant ones → read them +3. Generating transformation_definition.md using Bedrock with full source context +4. Publishing to ATX registry via Batch job +""" + +import os +import json +import time +import logging +import boto3 +from typing import Any, Dict + +from strands import Agent, tool +from strands.models import BedrockModel + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +region = os.getenv("AWS_REGION", "us-east-1") +bedrock_runtime = boto3.client('bedrock-runtime', region_name=region) +s3_client = boto3.client('s3', region_name=region) +batch_client = boto3.client('batch', region_name=region) + +_account_id = None +def _get_account(): + global _account_id + if not _account_id: + _account_id = boto3.client('sts').get_caller_identity()['Account'] + return _account_id + +def _get_source_bucket(): + return f"atx-source-code-{_get_account()}" + + +@tool +def upload_repo_to_s3(source_url: str, name: str) -> Dict[str, Any]: + """ + Submit a Batch job to clone a repository and upload all source files to S3. + Files are uploaded to s3://{bucket}/repo-snapshots/{name}/ for AI-driven browsing. + + Args: + source_url: Git repository URL (e.g., 'https://github.com/user/repo') + name: Transformation name (used as S3 prefix) + + Returns: + Dictionary with job ID and S3 prefix + """ + bucket = _get_source_bucket() + s3_prefix = f"repo-snapshots/{name}" + job_name = f"upload-repo-{name}-{int(time.time())}" + job_queue = os.environ.get('JOB_QUEUE_NAME', 'atx-job-queue') + job_definition = os.environ.get('JOB_DEFINITION_NAME', 'atx-transform-job') + + # Clone repo and sync all source files to S3 (exclude .git) + cmd = ( + f"git clone {source_url} /source/repo && " + f"cd /source/repo && " + f"aws s3 sync . s3://{bucket}/{s3_prefix}/ --exclude '.git/*'" + ) + + try: + response = batch_client.submit_job( + jobName=job_name, jobQueue=job_queue, jobDefinition=job_definition, + containerOverrides={'command': ['--command', cmd]} + ) + job_id = response['jobId'] + + logger.info(f"Waiting for repo upload job {job_id}...") + for _ in range(60): # Max 5 minutes + time.sleep(5) + status = batch_client.describe_jobs(jobs=[job_id]) + if not status['jobs']: + break + job_status = status['jobs'][0]['status'] + if job_status == 'SUCCEEDED': + return { + "status": "success", + "s3_prefix": f"s3://{bucket}/{s3_prefix}/", + "message": f"Repository uploaded to S3. Use list_repo_files and read_repo_file to browse.", + } + if job_status == 'FAILED': + reason = status['jobs'][0].get('statusReason', 'Unknown') + return {"status": "error", "error": f"Upload job failed: {reason}"} + + return {"status": "error", "error": "Upload job timed out after 5 minutes"} + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def list_repo_files(name: str) -> Dict[str, Any]: + """ + List all files in a repository snapshot uploaded to S3. + + Args: + name: Transformation name (matches the S3 prefix from upload_repo_to_s3) + + Returns: + Dictionary with list of file paths and sizes + """ + bucket = _get_source_bucket() + prefix = f"repo-snapshots/{name}/" + try: + paginator = s3_client.get_paginator('list_objects_v2') + files = [] + for page in paginator.paginate(Bucket=bucket, Prefix=prefix): + for obj in page.get('Contents', []): + rel_path = obj['Key'][len(prefix):] + if rel_path and not rel_path.startswith('.git/'): + files.append({'path': rel_path, 'size': obj['Size']}) + return { + "status": "success", + "file_count": len(files), + "files": files, + } + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def read_repo_file(name: str, file_path: str) -> Dict[str, Any]: + """ + Read the content of a specific file from the repository snapshot in S3. + + Args: + name: Transformation name (matches the S3 prefix) + file_path: Relative file path within the repo (e.g., 'src/app.py') + + Returns: + Dictionary with file content (truncated to 50KB for context window safety) + """ + bucket = _get_source_bucket() + key = f"repo-snapshots/{name}/{file_path}" + try: + obj = s3_client.get_object(Bucket=bucket, Key=key) + content = obj['Body'].read().decode('utf-8', errors='replace') + truncated = False + if len(content) > 50000: + content = content[:50000] + truncated = True + return { + "status": "success", + "path": file_path, + "content": content, + "size": obj['ContentLength'], + "truncated": truncated, + } + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def generate_transformation_definition(name: str, description: str, requirements: str, + source_context: str = "") -> Dict[str, Any]: + """ + Generate a transformation_definition.md file using Bedrock AI and upload to S3. + + Args: + name: Name for the transformation (e.g., 'add-structured-logging') + description: Short description of what the transformation does + requirements: Detailed requirements for the transformation + source_context: Source code context (file contents read from the repo) + + Returns: + Dictionary with the generated definition and S3 location + """ + bucket = _get_source_bucket() + + prompt = f"""Create a transformation_definition.md file for AWS Transform custom. + +Name: {name} +Description: {description} +Requirements: {requirements} +""" + if source_context: + prompt += f""" +The following is the actual source code from the target repository. +Use this to make the transformation definition specific and accurate for this codebase. +Reference actual file names, function names, class names, and patterns you see. + +{source_context} +""" + + prompt += """ +The file should contain clear, detailed instructions that an AI agent will follow to transform code. +Include: +- What changes to make (be specific based on the actual code patterns found) +- Specific files and functions to modify +- Patterns to look for in the source code +- How to validate the changes +- Edge cases to handle + +Output ONLY the markdown content, no code fences.""" + + try: + response = bedrock_runtime.invoke_model( + modelId=os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-sonnet-4-20250514-v1:0"), + body=json.dumps({ + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 8192, + "temperature": 0.3, + "messages": [{"role": "user", "content": prompt}] + }) + ) + body = json.loads(response['body'].read()) + definition_md = body['content'][0]['text'].strip() + + s3_key = f"custom-definitions/{name}/transformation_definition.md" + s3_client.put_object( + Bucket=bucket, Key=s3_key, + Body=definition_md.encode('utf-8'), + ContentType='text/markdown' + ) + + return { + "status": "success", + "name": name, + "s3_uri": f"s3://{bucket}/{s3_key}", + "source_analyzed": bool(source_context), + "definition_preview": definition_md[:500] + "..." if len(definition_md) > 500 else definition_md, + } + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def publish_transformation(name: str, description: str) -> Dict[str, Any]: + """ + Publish a transformation definition to the ATX registry by submitting a Batch job. + + Args: + name: Name of the transformation to publish + description: Description for the registry + + Returns: + Dictionary with the Batch job ID for the publish operation + """ + bucket = _get_source_bucket() + s3_key = f"custom-definitions/{name}/transformation_definition.md" + + try: + s3_client.head_object(Bucket=bucket, Key=s3_key) + except Exception: + return {"status": "error", "error": f"Definition not found: s3://{bucket}/{s3_key}. Generate it first."} + + job_name = f"publish-{name}-{int(time.time())}" + job_queue = os.environ.get('JOB_QUEUE_NAME', 'atx-job-queue') + job_definition = os.environ.get('JOB_DEFINITION_NAME', 'atx-transform-job') + + cmd = ( + f"mkdir -p /tmp/{name} && " + f"aws s3 cp s3://{bucket}/custom-definitions/{name}/transformation_definition.md /tmp/{name}/transformation_definition.md && " + f"atx custom def publish -n {name} --description '{description}' --sd /tmp/{name}" + ) + + try: + response = batch_client.submit_job( + jobName=job_name, jobQueue=job_queue, jobDefinition=job_definition, + containerOverrides={'command': ['--command', cmd]} + ) + + status_data = { + "status": "publishing", + "job_id": response['jobId'], + "job_name": job_name, + "name": name, + "description": description, + "created_at": time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), + } + s3_client.put_object( + Bucket=bucket, Key=f"custom-definitions/{name}/status.json", + Body=json.dumps(status_data).encode(), ContentType='application/json' + ) + + return { + "status": "success", + "action": "publish", + "job_id": response['jobId'], + "transformation_name": name, + "message": f"Publish job submitted. '{name}' will be available once complete.", + } + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def list_registry_transformations() -> Dict[str, Any]: + """Submit a Batch job to list all transformations in the ATX registry.""" + job_name = f"list-transforms-{int(time.time())}" + job_queue = os.environ.get('JOB_QUEUE_NAME', 'atx-job-queue') + job_definition = os.environ.get('JOB_DEFINITION_NAME', 'atx-transform-job') + try: + response = batch_client.submit_job( + jobName=job_name, jobQueue=job_queue, jobDefinition=job_definition, + containerOverrides={'command': ['--command', 'atx custom def list --json']} + ) + return {"status": "success", "job_id": response['jobId'], "message": "List job submitted."} + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def create_transform_agent(query: str) -> Dict[str, Any]: + """ + Creates and publishes custom transformation definitions to the ATX registry. + When a source repository is provided, uses AI-driven file selection to read + relevant source files and generate a definition tailored to the actual codebase. + + Args: + query: Natural language request describing the custom transformation to create. + + Returns: + Dictionary with results + """ + logger.info("CREATE TRANSFORM AGENT INVOKED") + + try: + # Step 1: Extract parameters from natural language + extract_prompt = f"""Extract the following from this request. Return ONLY valid JSON, no other text. + +Request: {query} + +Return JSON with these fields: +- "action": one of "create", "publish", "list" (default: "create") +- "name": transformation name (lowercase, hyphenated, e.g., "add-logging") +- "description": short description +- "requirements": detailed requirements +- "source_url": repository URL if mentioned, or empty string + +Example: {{"action": "create", "name": "add-logging", "description": "Add logging", "requirements": "Add structured logging to all functions", "source_url": "https://github.com/user/repo"}}""" + + response = bedrock_runtime.invoke_model( + modelId=os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-sonnet-4-20250514-v1:0"), + body=json.dumps({ + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 2048, "temperature": 0.1, + "messages": [{"role": "user", "content": extract_prompt}] + }) + ) + raw_text = json.loads(response['body'].read())['content'][0]['text'].strip() + if '```' in raw_text: + raw_text = raw_text.split('```')[1] + if raw_text.startswith('json'): raw_text = raw_text[4:] + raw_text = raw_text.strip() + params = json.loads(raw_text) + + action = params.get('action', 'create') + name = params.get('name', '') + description = params.get('description', name) + requirements = params.get('requirements', '') + source_url = params.get('source_url', '') + + if action == 'list': + return list_registry_transformations() + if action == 'publish' and name: + return publish_transformation(name, description) + if not name or not requirements: + return {"status": "error", "error": "Could not extract transformation name and requirements."} + + results = [] + source_context = "" + + # Step 2: Upload repo to S3 if source URL provided + if source_url: + logger.info(f"Uploading repo to S3: {source_url}") + upload_result = upload_repo_to_s3(source_url=source_url, name=name) + results.append(f"Repo upload: {upload_result.get('status')}") + + if upload_result.get('status') == 'success': + # Step 3: List files + file_list = list_repo_files(name=name) + if file_list.get('status') == 'success': + files = file_list['files'] + results.append(f"Files found: {file_list['file_count']}") + + max_context = 400000 # ~100K tokens + # Filter to source code files only (skip binaries, images, etc.) + SOURCE_EXTS = {'.py', '.java', '.js', '.ts', '.jsx', '.tsx', '.go', '.rb', '.rs', + '.c', '.cpp', '.h', '.cs', '.kt', '.scala', '.swift', + '.json', '.yaml', '.yml', '.toml', '.xml', '.properties', + '.md', '.txt', '.html', '.css', '.scss', '.sql', + '.gradle', '.cfg', '.ini', '.env', '.sh', '.bat'} + source_files = [f for f in files if any(f['path'].endswith(ext) for ext in SOURCE_EXTS) + or '.' not in f['path'].split('/')[-1] # files without extension (Makefile, Dockerfile, etc.) + or f['path'].split('/')[-1] in ('Makefile', 'Dockerfile', 'Gemfile', 'Rakefile')] + total_source_size = sum(f['size'] for f in source_files) + + if total_source_size <= max_context: + # Small repo: read ALL source files, skip AI selection + results.append(f"Small repo ({total_source_size} chars) — reading all {len(source_files)} source files") + selected_files = [f['path'] for f in source_files] + else: + # Large repo: AI selects files, budget-aware + avg_file_size = total_source_size // max(len(source_files), 1) + max_files = max(10, min(30, max_context // max(avg_file_size, 1))) + results.append(f"Large repo ({total_source_size} chars) — AI selecting up to {max_files} files") + + file_paths = [f['path'] for f in source_files] + select_prompt = f"""Given these files in a repository and the transformation requirements below, +select the most relevant files to read (max {max_files} files). Prioritize: +1. Main source files related to the transformation requirements +2. Configuration/dependency files (requirements.txt, package.json, pom.xml) +3. README or documentation files +4. Test files if relevant + +Return ONLY a JSON array of file paths. + +Requirements: {requirements} + +Files: +{json.dumps(file_paths, indent=2)}""" + + select_response = bedrock_runtime.invoke_model( + modelId=os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-sonnet-4-20250514-v1:0"), + body=json.dumps({ + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 4096, "temperature": 0.1, + "messages": [{"role": "user", "content": select_prompt}] + }) + ) + select_text = json.loads(select_response['body'].read())['content'][0]['text'].strip() + if '```' in select_text: + select_text = select_text.split('```')[1] + if select_text.startswith('json'): select_text = select_text[4:] + select_text = select_text.strip() + selected_files = json.loads(select_text) + results.append(f"AI selected {len(selected_files)} files") + + # Step 4: Read selected files + context_parts = [] + total_chars = 0 + for fp in selected_files: + if total_chars >= max_context: + break + file_data = read_repo_file(name=name, file_path=fp) + if file_data.get('status') == 'success': + content = file_data['content'] + context_parts.append(f"=== {fp} ===\n{content}") + total_chars += len(content) + + source_context = "\n\n".join(context_parts) + results.append(f"Read {len(context_parts)} files ({total_chars} chars)") + + # Step 5: Generate definition + logger.info(f"Generating definition for: {name}") + gen_result = generate_transformation_definition( + name=name, description=description, + requirements=requirements, source_context=source_context + ) + if gen_result.get('status') == 'error': + return gen_result + results.append(f"Definition generated: {gen_result.get('s3_uri')}") + + # Check if this is a generate-only request (preview mode) + generate_only = 'do not publish' in query.lower() or 'don\'t publish' in query.lower() + + if generate_only: + return { + "status": "success", + "result": f"Custom transformation '{name}' definition generated (preview mode, not published).\n" + + f"Definition location: {gen_result.get('s3_uri')}\n" + + f"Source analyzed: {bool(source_context)}\n" + + f"Definition preview: {gen_result.get('definition_preview', '')}\n\n" + + "\n".join(results), + } + + # Step 6: Publish + logger.info(f"Publishing: {name}") + pub_result = publish_transformation(name=name, description=description) + if pub_result.get('status') == 'error': + return pub_result + results.append(f"Publish job: {pub_result.get('job_id')}") + + return { + "status": "success", + "result": f"Custom transformation '{name}' created and publish job submitted.\n" + + f"Publish job ID: {pub_result.get('job_id')}\n" + + f"Source analyzed: {bool(source_context)}\n" + + f"Definition preview: {gen_result.get('definition_preview', '')}\n\n" + + "\n".join(results), + } + + except json.JSONDecodeError as e: + logger.error(f"Failed to parse response: {e}") + return {"status": "error", "error": f"Failed to parse parameters: {e}"} + except Exception as e: + logger.error(f"Create transform agent failed: {e}", exc_info=True) + return {"status": "error", "error": str(e)} diff --git a/agentic-atx-platform/orchestrator/tools/executetransform.py b/agentic-atx-platform/orchestrator/tools/executetransform.py new file mode 100644 index 0000000..f0cae84 --- /dev/null +++ b/agentic-atx-platform/orchestrator/tools/executetransform.py @@ -0,0 +1,257 @@ +""" +ExecuteTransform Sub-Agent + +Executes transformations on source repositories via AWS Batch, +monitors job progress, and retrieves results. No Lambda dependency. +""" + +import os +import time +import json +import logging +import boto3 +from typing import Any, Dict +from datetime import datetime + +from strands import Agent, tool + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +region = os.getenv("AWS_REGION", "us-east-1") +batch_client = boto3.client('batch', region_name=region) +s3_client = boto3.client('s3', region_name=region) + + +def _get_output_bucket() -> str: + account = boto3.client('sts').get_caller_identity()['Account'] + return f"atx-custom-output-{account}" + + +def _extract_repo_name(source: str) -> str: + if not source: + return 'unknown' + if 'github.com' in source: + return source.rstrip('/').rstrip('.git').split('/')[-1] + if source.startswith('s3://'): + return source.split('/')[-1].replace('.zip', '').replace('.tar.gz', '') + return 'unknown' + + +@tool +def execute_transformation(transformation: str, source: str, configuration: str = "") -> Dict[str, Any]: + """ + Execute a transformation on a source code repository. + + Args: + transformation: Transformation name (e.g., 'AWS/python-version-upgrade') + source: Source code URL (GitHub URL or S3 path) + configuration: Optional comma-separated config (e.g., 'validationCommands=pytest,additionalPlanContext=Target Python 3.13') + + Returns: + Dictionary with job ID and submission details + """ + config = {} + if configuration: + for pair in configuration.split(","): + if "=" in pair: + k, v = pair.split("=", 1) + config[k.strip()] = v.strip() + + repo_name = _extract_repo_name(source) + job_name = f"{repo_name}-{transformation.split('/')[-1]}-{int(time.time())}" + + # Validate transformation name - strip incorrect AWS/ prefix for custom transforms + AWS_MANAGED = [ + 'AWS/python-version-upgrade', 'AWS/java-version-upgrade', 'AWS/nodejs-version-upgrade', + 'AWS/python-boto2-to-boto3', 'AWS/java-aws-sdk-v1-to-v2', 'AWS/nodejs-aws-sdk-v2-to-v3', + 'AWS/early-access-comprehensive-codebase-analysis', 'AWS/early-access-java-x86-to-graviton', + 'AWS/early-access-angular-to-react-migration', 'AWS/early-access-jfr-performance-optimization', + ] + if transformation.startswith('AWS/') and transformation not in AWS_MANAGED: + transformation = transformation.replace('AWS/', '', 1) + + cmd = f"atx custom def exec -n {transformation} -p /source/repo" + if config: + # Ensure version upgrades have additionalPlanContext + if 'version-upgrade' in transformation and 'additionalPlanContext' not in config: + version_map = { + 'python-version-upgrade': 'The target Python version to upgrade to is Python 3.13. Python 3.13 is already installed at /usr/bin/python3.13', + 'java-version-upgrade': 'The target Java version to upgrade to is Java 21. Java 21 is already installed at /usr/lib/jvm/java-21-amazon-corretto', + 'nodejs-version-upgrade': 'The target nodejs version to upgrade to is 22. Node.js 22 is already installed at /home/atxuser/.nvm/versions/node/v22.12.0/bin/node', + } + for key, default_ctx in version_map.items(): + if key in transformation: + config['additionalPlanContext'] = default_ctx + break + config_str = ','.join(f"{k}={v}" for k, v in config.items()) + cmd += f" --configuration '{config_str}'" + elif 'version-upgrade' in transformation: + # Version upgrades REQUIRE additionalPlanContext in non-interactive mode + version_map = { + 'python-version-upgrade': 'The target Python version to upgrade to is Python 3.13. Python 3.13 is already installed at /usr/bin/python3.13', + 'java-version-upgrade': 'The target Java version to upgrade to is Java 21. Java 21 is already installed at /usr/lib/jvm/java-21-amazon-corretto', + 'nodejs-version-upgrade': 'The target nodejs version to upgrade to is 22. Node.js 22 is already installed at /home/atxuser/.nvm/versions/node/v22.12.0/bin/node', + } + for key, default_ctx in version_map.items(): + if key in transformation: + cmd += f" --configuration 'additionalPlanContext={default_ctx}'" + break + cmd += " -x -t" + + job_queue = os.environ.get('JOB_QUEUE_NAME', 'atx-job-queue') + job_definition = os.environ.get('JOB_DEFINITION_NAME', 'atx-transform-job') + + container_overrides = {'command': ['--source', source, '--output', f'transformations/{job_name}/', '--command', cmd]} if source else {'command': ['--command', cmd]} + + try: + response = batch_client.submit_job(jobName=job_name, jobQueue=job_queue, jobDefinition=job_definition, containerOverrides=container_overrides) + return { + "status": "success", "action": "execute", + "job_id": response['jobId'], "job_name": job_name, + "transformation": transformation, "source": source, + "command": cmd, "submitted_at": datetime.utcnow().isoformat() + 'Z', + } + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def get_job_status(job_id: str) -> Dict[str, Any]: + """ + Check the status of a transformation job. + + Args: + job_id: The job ID returned from execute_transformation + + Returns: + Dictionary with job status, timestamps, and log stream info + """ + try: + response = batch_client.describe_jobs(jobs=[job_id]) + if not response['jobs']: + return {"status": "error", "error": f"Job not found: {job_id}"} + job = response['jobs'][0] + result = { + "status": "success", "action": "status", + "job_id": job_id, "job_name": job['jobName'], + "job_status": job['status'], + "created_at": job.get('createdAt'), + "started_at": job.get('startedAt'), + "stopped_at": job.get('stoppedAt'), + "log_stream": job.get('container', {}).get('logStreamName'), + } + if job['status'] == 'SUCCEEDED': + bucket = _get_output_bucket() + result['results_location'] = f"s3://{bucket}/transformations/{job['jobName']}/" + if job['status'] == 'FAILED' and 'statusReason' in job: + result['failure_reason'] = job['statusReason'] + return result + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def list_job_results(job_id: str) -> Dict[str, Any]: + """ + List the output files from a completed transformation job. + + Args: + job_id: The job ID to get results for + + Returns: + Dictionary with list of output files in S3 + """ + try: + response = batch_client.describe_jobs(jobs=[job_id]) + if not response['jobs']: + return {"status": "error", "error": f"Job not found: {job_id}"} + job_name = response['jobs'][0]['jobName'] + bucket = _get_output_bucket() + prefix = f"transformations/{job_name}/" + objects = s3_client.list_objects_v2(Bucket=bucket, Prefix=prefix, MaxKeys=100) + files = [{"key": obj['Key'], "size": obj['Size'], "last_modified": obj['LastModified'].isoformat(), "s3_uri": f"s3://{bucket}/{obj['Key']}"} for obj in objects.get('Contents', [])] + return {"status": "success", "action": "list_results", "job_id": job_id, "job_name": job_name, "results_location": f"s3://{bucket}/{prefix}", "file_count": len(files), "files": files} + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def execute_transform_agent(query: str) -> Dict[str, Any]: + """ + Handles transformation execution, job status checks, and result retrieval. + + Args: + query: Natural language request. Examples: + - "Execute AWS/python-version-upgrade on https://github.com/user/repo with configuration validationCommands=pytest" + - "Check status of job abc-123-def-456" + - "List results for job abc-123-def-456" + + Returns: + Dictionary with results + """ + logger.info("EXECUTE TRANSFORM AGENT INVOKED") + + # Use direct Bedrock call to extract parameters (avoids Strands streaming bug) + try: + bedrock_rt = boto3.client('bedrock-runtime', region_name=os.getenv("AWS_REGION", "us-east-1")) + extract_prompt = f"""Extract the following from this request. Return ONLY valid JSON, no other text. + +Request: {query} + +Return JSON with these fields: +- "action": one of "execute", "status", "results" (default: "execute") +- "transformation": transformation name (e.g., "AWS/python-version-upgrade" or "add-logging") +- "source": repository URL or S3 path +- "configuration": comma-separated config string (e.g., "validationCommands=pytest,additionalPlanContext=Target Python 3.13") +- "job_id": job ID if checking status or results + +Example: {{"action": "execute", "transformation": "AWS/python-version-upgrade", "source": "https://github.com/user/repo", "configuration": "validationCommands=pytest", "job_id": ""}}""" + + response = bedrock_rt.invoke_model( + modelId=os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-sonnet-4-20250514-v1:0"), + body=json.dumps({ + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 2048, "temperature": 0.1, + "messages": [{"role": "user", "content": extract_prompt}] + }) + ) + raw_text = json.loads(response['body'].read())['content'][0]['text'].strip() + if '```' in raw_text: + raw_text = raw_text.split('```')[1] + if raw_text.startswith('json'): raw_text = raw_text[4:] + raw_text = raw_text.strip() + params = json.loads(raw_text) + + action = params.get('action', 'execute') + job_id = params.get('job_id', '') + + if action == 'status' and job_id: + result = get_job_status(job_id=job_id) + return {"status": "success", "result": json.dumps(result)} + + if action == 'results' and job_id: + result = list_job_results(job_id=job_id) + return {"status": "success", "result": json.dumps(result)} + + # Execute transformation + transformation = params.get('transformation', '') + source = params.get('source', '') + configuration = params.get('configuration', '') + + if not transformation or not source: + return {"status": "error", "error": "Could not extract transformation name and source from the request."} + + result = execute_transformation( + transformation=transformation, + source=source, + configuration=configuration + ) + return {"status": "success", "result": json.dumps(result)} + + except json.JSONDecodeError as e: + logger.error(f"Failed to parse response: {e}") + return {"status": "error", "error": f"Failed to parse parameters: {e}"} + except Exception as e: + logger.error(f"Execute transform agent failed: {e}", exc_info=True) + return {"status": "error", "error": str(e)} diff --git a/agentic-atx-platform/orchestrator/tools/findtransform.py b/agentic-atx-platform/orchestrator/tools/findtransform.py new file mode 100644 index 0000000..7a37832 --- /dev/null +++ b/agentic-atx-platform/orchestrator/tools/findtransform.py @@ -0,0 +1,151 @@ +""" +FindTransform Sub-Agent + +Discovers the best matching AWS-managed transformation +by searching and listing the available catalog. +""" + +import os +import json +import logging +import boto3 +from typing import Any, Dict + +from strands import Agent, tool + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +MANAGED_TRANSFORMATIONS = [ + {'name': 'AWS/python-version-upgrade', 'language': 'python', 'description': 'Upgrade Python applications from 3.8/3.9 to 3.11/3.12/3.13', 'tags': ['python', 'upgrade', 'migration']}, + {'name': 'AWS/java-version-upgrade', 'language': 'java', 'description': 'Upgrade Java applications from any source JDK to any target JDK', 'tags': ['java', 'upgrade', 'migration']}, + {'name': 'AWS/nodejs-version-upgrade', 'language': 'nodejs', 'description': 'Upgrade Node.js applications from any source to any target version', 'tags': ['nodejs', 'javascript', 'upgrade', 'migration']}, + {'name': 'AWS/python-boto2-to-boto3', 'language': 'python', 'description': 'Migrate Python applications from boto2 to boto3', 'tags': ['python', 'aws', 'sdk', 'migration', 'boto']}, + {'name': 'AWS/java-aws-sdk-v1-to-v2', 'language': 'java', 'description': 'Upgrade AWS SDK from v1 to v2 for Java (Maven or Gradle)', 'tags': ['java', 'aws', 'sdk', 'migration']}, + {'name': 'AWS/nodejs-aws-sdk-v2-to-v3', 'language': 'nodejs', 'description': 'Upgrade Node.js from AWS SDK v2 to v3 modular architecture', 'tags': ['nodejs', 'javascript', 'aws', 'sdk', 'migration']}, + {'name': 'AWS/early-access-comprehensive-codebase-analysis', 'language': 'all', 'description': 'Deep static analysis with technical debt, security, and modernization insights', 'tags': ['analysis', 'security', 'modernization']}, + {'name': 'AWS/early-access-java-x86-to-graviton', 'language': 'java', 'description': 'Validate and migrate Java applications to ARM64 for AWS Graviton', 'tags': ['java', 'graviton', 'arm64', 'migration']}, + {'name': 'AWS/early-access-angular-to-react-migration', 'language': 'nodejs', 'description': 'Transform Angular applications to React', 'tags': ['angular', 'react', 'javascript', 'migration']}, + {'name': 'AWS/early-access-jfr-performance-optimization', 'language': 'java', 'description': 'Optimize Java performance using JFR profiling data', 'tags': ['java', 'performance', 'jfr', 'optimization']}, +] + + +@tool +def search_transformations(query: str, language: str = "") -> Dict[str, Any]: + """Search transformations by keyword or language. + + Args: + query: Search keyword (e.g., 'sdk', 'upgrade', 'migration') + language: Optional language filter (python, java, nodejs) + """ + q = query.lower() + lang = language.lower() if language else "" + results = [t for t in MANAGED_TRANSFORMATIONS + if (q in t['name'].lower() or q in t['description'].lower() or any(q in tag for tag in t['tags'])) + and (not lang or lang in t['language'] or t['language'] == 'all')] + return {"status": "success", "result_count": len(results), "results": results} + + +@tool +def list_transformations(language: str = "all") -> Dict[str, Any]: + """List all available code transformations. + + Args: + language: Filter by language (python, java, nodejs, or all) + """ + filtered = MANAGED_TRANSFORMATIONS if language == "all" else [t for t in MANAGED_TRANSFORMATIONS if t['language'] == language.lower() or t['language'] == 'all'] + return {"status": "success", "transformation_count": len(filtered), "transformations": filtered} + + +@tool +def list_published_custom() -> Dict[str, Any]: + """List custom transformations that have been published to the ATX registry. + + Returns: + Dictionary with published custom transformation names + """ + try: + account = boto3.client('sts').get_caller_identity()['Account'] + bucket = f"atx-source-code-{account}" + s3 = boto3.client('s3', region_name=os.getenv("AWS_REGION", "us-east-1")) + paginator = s3.get_paginator('list_objects_v2') + published = [] + for page in paginator.paginate(Bucket=bucket, Prefix='custom-definitions/', Delimiter='/'): + for prefix in page.get('CommonPrefixes', []): + name = prefix['Prefix'].replace('custom-definitions/', '').rstrip('/') + if not name: + continue + try: + obj = s3.get_object(Bucket=bucket, Key=f'custom-definitions/{name}/status.json') + status = json.loads(obj['Body'].read().decode('utf-8')) + if status.get('status') == 'published': + published.append({'name': name, 'description': status.get('description', '')}) + except Exception: + pass + return {"status": "success", "published_custom": published, "count": len(published)} + except Exception as e: + return {"status": "error", "error": str(e)} + + +@tool +def find_transform_agent(query: str, language: str = "") -> Dict[str, Any]: + """Finds the best matching AWS-managed transformation for an application's needs. + + Args: + query: Description of the application and requirements + language: Optional language filter (python, java, nodejs) + """ + logger.info("FIND TRANSFORM AGENT INVOKED") + + # Use direct Bedrock call to reason about the best match (avoids Strands streaming bug) + try: + # Get all available transformations + managed = list_transformations(language=language or "all") + custom = list_published_custom() + + all_transforms = [] + for t in managed.get('transformations', []): + all_transforms.append(f"- {t['name']}: {t['description']} (language: {t['language']})") + for c in custom.get('published_custom', []): + all_transforms.append(f"- {c['name']}: {c.get('description', 'Custom transformation')} (custom)") + + if not all_transforms: + return {"status": "success", "result": "No transformations available."} + + bedrock_rt = boto3.client('bedrock-runtime', region_name=os.getenv("AWS_REGION", "us-east-1")) + select_prompt = f"""Given these available transformations and the user's requirements, select the BEST matching transformation. +Return ONLY valid JSON with: {{"name": "transformation-name", "reason": "why this is the best match"}} +If no transformation matches, return: {{"name": "", "reason": "No matching transformation found"}} + +User requirements: {query} +Language: {language or 'Not specified'} + +Available transformations: +{chr(10).join(all_transforms)}""" + + response = bedrock_rt.invoke_model( + modelId=os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-sonnet-4-20250514-v1:0"), + body=json.dumps({ + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 2048, "temperature": 0.1, + "messages": [{"role": "user", "content": select_prompt}] + }) + ) + raw_text = json.loads(response['body'].read())['content'][0]['text'].strip() + if '```' in raw_text: + raw_text = raw_text.split('```')[1] + if raw_text.startswith('json'): raw_text = raw_text[4:] + raw_text = raw_text.strip() + result = json.loads(raw_text) + + name = result.get('name', '') + reason = result.get('reason', '') + + if name: + return {"status": "success", "result": f"Best match: {name}. {reason}"} + else: + return {"status": "success", "result": f"No matching transformation found. {reason}"} + + except Exception as e: + logger.error(f"Find transform agent failed: {e}", exc_info=True) + return {"status": "error", "error": str(e)} diff --git a/agentic-atx-platform/orchestrator/tools/memory_client.py b/agentic-atx-platform/orchestrator/tools/memory_client.py new file mode 100644 index 0000000..1c48416 --- /dev/null +++ b/agentic-atx-platform/orchestrator/tools/memory_client.py @@ -0,0 +1,60 @@ +"""Memory client for ATX Transform using AWS Bedrock AgentCore Memory.""" + +import os +from bedrock_agentcore.memory import MemoryClient + +_memory_client = None +_memory_id = None + +MEMORY_NAME = "ATX_Transform_Memory" + + +def get_memory_client() -> MemoryClient: + """Get or create memory client singleton.""" + global _memory_client + if _memory_client is None: + region = os.getenv("AWS_REGION", "us-east-1") + _memory_client = MemoryClient(region_name=region) + return _memory_client + + +def initialize_memory() -> str: + """Initialize or get existing memory resource.""" + global _memory_id + if _memory_id: + return _memory_id + + client = get_memory_client() + + try: + memories = client.list_memories() + for memory in memories: + mid = memory.get('id') or memory.get('memoryId') + if mid and MEMORY_NAME in mid: + _memory_id = mid + print(f"Using existing memory: {_memory_id}") + return _memory_id + + memory = client.create_memory_and_wait( + name=MEMORY_NAME, + strategies=[], + description="Short-term memory for ATX Transform orchestrator", + event_expiry_days=30 + ) + _memory_id = memory['id'] + print(f"Created new memory: {_memory_id}") + return _memory_id + + except Exception as e: + if "already exists" in str(e): + try: + memories = client.list_memories() + for memory in memories: + mid = memory.get('id') or memory.get('memoryId') + if mid and MEMORY_NAME in mid: + _memory_id = mid + return _memory_id + except Exception: + pass + print(f"Warning: Memory init failed: {e}") + return None diff --git a/agentic-atx-platform/orchestrator/tools/memory_hooks.py b/agentic-atx-platform/orchestrator/tools/memory_hooks.py new file mode 100644 index 0000000..b7c0ed1 --- /dev/null +++ b/agentic-atx-platform/orchestrator/tools/memory_hooks.py @@ -0,0 +1,103 @@ +"""Short-term memory hooks for ATX Transform orchestrator.""" + +import logging +import json +from bedrock_agentcore.memory import MemoryClient +from strands.hooks import AgentInitializedEvent, MessageAddedEvent, AfterToolCallEvent, HookProvider, HookRegistry + +logger = logging.getLogger(__name__) + + +class ShortTermMemoryHook(HookProvider): + """Hook to store and retrieve short-term conversation memory.""" + + def __init__(self, memory_client: MemoryClient, memory_id: str): + self.memory_client = memory_client + self.memory_id = memory_id + self.tool_results = [] + + def on_agent_initialized(self, event: AgentInitializedEvent): + """Load conversation history when agent starts.""" + try: + actor_id = event.agent.state.get("actor_id") + session_id = event.agent.state.get("session_id") + if not actor_id or not session_id or not self.memory_id: + return + + recent_turns = self.memory_client.get_last_k_turns( + memory_id=self.memory_id, + actor_id=actor_id, + session_id=session_id, + k=10, + branch_name="main" + ) + + if recent_turns: + context_messages = [] + for turn in recent_turns: + for message in turn: + role = message['role'].title() + content = message['content']['text'] + context_messages.append(f"{role}: {content}") + + context = "\n".join(context_messages) + event.agent.system_prompt += f"\n\n## Previous Conversation Context:\n{context}\n" + logger.info(f"Loaded {len(recent_turns)} turns from memory") + + except Exception as e: + logger.error(f"Failed to load memory: {e}") + + def on_tool_executed(self, event: AfterToolCallEvent): + """Track tool execution results.""" + try: + tool_name = event.tool_use.name if hasattr(event.tool_use, 'name') else "unknown" + self.tool_results.append({ + "tool": tool_name, + "result": str(event.result)[:500] + }) + except Exception as e: + logger.error(f"Failed to track tool: {e}") + + def on_message_added(self, event: MessageAddedEvent): + """Store new messages to memory.""" + try: + actor_id = event.agent.state.get("actor_id") + session_id = event.agent.state.get("session_id") + if not actor_id or not session_id or not self.memory_id: + return + + messages = event.agent.messages + if not messages: + return + + last_message = messages[-1] + content = last_message.get("content", "") + if isinstance(content, list): + content = content[0].get("text", "") if content else "" + + if not content: + return + + # Append tool context to assistant messages + if last_message['role'] == 'assistant' and self.tool_results: + tool_ctx = "\n\n[Tool Context]\n" + "\n".join( + f"- {t['tool']}: {t['result'][:200]}" for t in self.tool_results + ) + content += tool_ctx + self.tool_results = [] + + self.memory_client.create_event( + memory_id=self.memory_id, + actor_id=actor_id, + session_id=session_id, + messages=[(content, last_message["role"])] + ) + + except Exception as e: + logger.error(f"Failed to store message: {e}") + + def register_hooks(self, registry: HookRegistry) -> None: + """Register hook callbacks.""" + registry.add_callback(AgentInitializedEvent, self.on_agent_initialized) + registry.add_callback(MessageAddedEvent, self.on_message_added) + registry.add_callback(AfterToolCallEvent, self.on_tool_executed) diff --git a/agentic-atx-platform/sam/Dockerfile.deploy b/agentic-atx-platform/sam/Dockerfile.deploy new file mode 100644 index 0000000..5f7b72b --- /dev/null +++ b/agentic-atx-platform/sam/Dockerfile.deploy @@ -0,0 +1,19 @@ +# Dockerfile for AgentCore Deploy Lambda (SDK-based) +# Uses boto3 bedrock-agentcore-control SDK directly — no CLI subprocess calls. +# Packages orchestrator code into a ZIP, uploads to S3, calls create/update_agent_runtime. + +FROM public.ecr.aws/lambda/python:3.11 + +# Install only what's needed — no starter toolkit required +COPY requirements-deploy.txt ${LAMBDA_TASK_ROOT}/requirements-deploy.txt +RUN pip install --no-cache-dir -r ${LAMBDA_TASK_ROOT}/requirements-deploy.txt + +# Copy the deploy Lambda handler +COPY deploy_agentcore.py ${LAMBDA_TASK_ROOT}/ + +# Copy orchestrator agent code (will be packaged into ZIP and uploaded to S3) +COPY orchestrator-bundle/agent.py ${LAMBDA_TASK_ROOT}/ +COPY orchestrator-bundle/tools/ ${LAMBDA_TASK_ROOT}/tools/ +COPY orchestrator-bundle/requirements.txt ${LAMBDA_TASK_ROOT}/ + +CMD ["deploy_agentcore.lambda_handler"] diff --git a/agentic-atx-platform/sam/deploy.sh b/agentic-atx-platform/sam/deploy.sh new file mode 100755 index 0000000..6f0dd57 --- /dev/null +++ b/agentic-atx-platform/sam/deploy.sh @@ -0,0 +1,110 @@ +#!/bin/bash +# Option B: Deploy AgentCore + API layer via SAM +# Prerequisites: CDK stacks (Container + Infrastructure) already deployed +set -e + +cd "$(dirname "$0")" + +echo "=== ATX Transform - SAM Deployment (AgentCore + API) ===" +echo "" + +# Check prerequisites +if ! aws sts get-caller-identity &>/dev/null; then + echo "❌ AWS CLI not configured. Run: aws configure" + exit 1 +fi + +if ! command -v sam &>/dev/null; then + echo "❌ SAM CLI not found. Install: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html" + exit 1 +fi + +ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +REGION=${AWS_REGION:-us-east-1} +OUTPUT_BUCKET="atx-custom-output-${ACCOUNT_ID}" +SOURCE_BUCKET="atx-source-code-${ACCOUNT_ID}" + +echo "Account: ${ACCOUNT_ID}" +echo "Region: ${REGION}" +echo "Output Bucket: ${OUTPUT_BUCKET}" +echo "Source Bucket: ${SOURCE_BUCKET}" +echo "" + +# Verify CDK infrastructure exists +echo "1. Verifying CDK infrastructure..." +if ! aws s3 ls "s3://${OUTPUT_BUCKET}" &>/dev/null; then + echo "❌ Output bucket not found: ${OUTPUT_BUCKET}" + echo " Deploy CDK infrastructure first: cd cdk && ./deploy.sh (skip AgentCore stack)" + exit 1 +fi +echo " ✅ Infrastructure found" +echo "" + +# Copy orchestrator files into bundle for Docker build +echo "2. Bundling orchestrator code..." +rm -rf orchestrator-bundle +mkdir -p orchestrator-bundle/tools +cp ../orchestrator/agent.py orchestrator-bundle/ +cp ../orchestrator/requirements.txt orchestrator-bundle/ +cp ../orchestrator/tools/*.py orchestrator-bundle/tools/ +echo " ✅ Orchestrator bundled" +echo "" + +# Build and push orchestrator container to ECR +echo "3. Building orchestrator container..." +ORCH_ECR_REPO="atx-orchestrator" +aws ecr describe-repositories --repository-names ${ORCH_ECR_REPO} --region ${REGION} &>/dev/null || \ + aws ecr create-repository --repository-name ${ORCH_ECR_REPO} --region ${REGION} --image-scanning-configuration scanOnPush=true &>/dev/null +ORCH_ECR_URI="${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com/${ORCH_ECR_REPO}" +aws ecr get-login-password --region ${REGION} | docker login --username AWS --password-stdin ${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com 2>/dev/null +docker build -t ${ORCH_ECR_REPO}:latest ../orchestrator/ 2>&1 | tail -3 +docker tag ${ORCH_ECR_REPO}:latest ${ORCH_ECR_URI}:latest +docker push ${ORCH_ECR_URI}:latest 2>&1 | tail -3 +echo " ✅ Orchestrator container pushed: ${ORCH_ECR_URI}:latest" +echo "" + +echo "4. Building SAM application..." +export PATH="$PATH:/Users/batchus/Library/Python/3.9/bin:$HOME/.local/bin" +SAM_CLI_CONTAINER_TOOL=docker sam build 2>&1 +echo "" + +echo "5. Deploying SAM stack..." +sam deploy \ + --stack-name AtxAgentCoreSAM \ + --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM \ + --parameter-overrides \ + OutputBucketName="${OUTPUT_BUCKET}" \ + SourceBucketName="${SOURCE_BUCKET}" \ + AwsRegion="${REGION}" \ + OrchestratorContainerUri="${ORCH_ECR_URI}:latest" \ + --no-confirm-changeset \ + --no-fail-on-empty-changeset \ + --region "${REGION}" \ + --resolve-s3 \ + 2>&1 +echo "" + +# Get outputs +API_ENDPOINT=$(aws cloudformation describe-stacks --stack-name AtxAgentCoreSAM --region "${REGION}" \ + --query 'Stacks[0].Outputs[?OutputKey==`ApiEndpoint`].OutputValue' --output text) + +echo "=== SAM Deployment Complete ===" +echo "" +echo "API Endpoint: ${API_ENDPOINT}" +echo "" +echo "Next steps:" +echo " 1. Deploy orchestrator to AgentCore (direct Lambda invocation):" +echo " aws lambda invoke --function-name atx-deploy-agentcore \\" +echo " --payload '{\"action\": \"deploy\"}' \\" +echo " --cli-read-timeout 900 /tmp/deploy-output.json" +echo " cat /tmp/deploy-output.json" +echo "" +echo " 2. Get the Agent Runtime ARN from the deploy output, then update the Lambda:" +echo " aws lambda update-function-configuration --function-name atx-async-invoke-agent \\" +echo " --environment 'Variables={AGENT_RUNTIME_ARN=,RESULT_BUCKET=${OUTPUT_BUCKET}}'" +echo "" +echo " 3. Build and deploy UI:" +echo " cd ui && npm install" +echo " VITE_API_ENDPOINT=${API_ENDPOINT} npx vite build" +echo " ./deploy-aws.sh" +echo "" diff --git a/agentic-atx-platform/sam/deploy_agentcore.py b/agentic-atx-platform/sam/deploy_agentcore.py new file mode 100644 index 0000000..c6648dc --- /dev/null +++ b/agentic-atx-platform/sam/deploy_agentcore.py @@ -0,0 +1,235 @@ +""" +AgentCore Deploy Lambda (SDK-based) + +Deploys the ATX Transform orchestrator to Bedrock AgentCore Runtime +using the boto3 bedrock-agentcore-control SDK directly. +No CLI subprocess calls — pure SDK. + +Flow: +1. Package orchestrator code into a ZIP +2. Upload ZIP to S3 +3. Call create_agent_runtime or update_agent_runtime via SDK +4. Poll until READY +5. Return the runtime ARN +""" + +import json +import os +import io +import zipfile +import time +import logging +import boto3 +import uuid + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +REGION = os.environ.get('ATX_REGION', os.environ.get('AWS_REGION', 'us-east-1')) +RUNTIME_NAME = os.environ.get('AGENT_RUNTIME_NAME', 'atxTransformOrchestrator') +EXECUTION_ROLE_ARN = os.environ.get('AGENT_EXECUTION_ROLE_ARN', '') +RESULT_BUCKET = os.environ.get('RESULT_BUCKET', '') # S3 bucket for ZIP staging + +agentcore_client = boto3.client('bedrock-agentcore-control', region_name=REGION) +s3_client = boto3.client('s3', region_name=REGION) + + +def lambda_handler(event, context): + """Deploy or update the orchestrator on AgentCore Runtime.""" + try: + body = {} + if event.get('body'): + body = json.loads(event['body']) + elif event.get('action'): + body = event + + action = body.get('action', 'deploy') + + if action == 'status': + return _get_status() + elif action == 'deploy': + return _deploy() + else: + return _response(400, {'error': f'Unknown action: {action}'}) + + except Exception as e: + logger.error(f"Deploy failed: {e}", exc_info=True) + return _response(500, {'error': str(e)}) + + +def _deploy(): + """Package orchestrator code, upload to S3, create/update AgentCore runtime.""" + logger.info("Starting AgentCore deployment via SDK") + + if not EXECUTION_ROLE_ARN: + return _response(400, {'error': 'AGENT_EXECUTION_ROLE_ARN environment variable not set'}) + if not RESULT_BUCKET: + return _response(400, {'error': 'RESULT_BUCKET environment variable not set'}) + + # Step 1: Package orchestrator code into ZIP + logger.info("Packaging orchestrator code...") + zip_buffer = io.BytesIO() + # Look for orchestrator code in the Lambda package (bundled by deploy.sh) + agent_dir = os.path.dirname(os.path.abspath(__file__)) + bundle_dir = os.path.join(agent_dir, 'orchestrator-bundle') + if not os.path.exists(bundle_dir): + bundle_dir = agent_dir # Fallback: code is in the same directory + + with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf: + for root, dirs, files in os.walk(bundle_dir): + dirs[:] = [d for d in dirs if d not in ('__pycache__', '.venv', '.git', 'node_modules')] + for file in files: + if file.endswith('.pyc') or file == 'deploy_agentcore.py': + continue + full_path = os.path.join(root, file) + arcname = os.path.relpath(full_path, bundle_dir) + zf.write(full_path, arcname) + + zip_buffer.seek(0) + zip_size = zip_buffer.getbuffer().nbytes + logger.info(f"ZIP size: {zip_size / 1024:.1f} KB") + + # Step 2: Upload ZIP to S3 + s3_key = f'agentcore-deployments/{RUNTIME_NAME}/agent-{int(time.time())}.zip' + logger.info(f"Uploading ZIP to s3://{RESULT_BUCKET}/{s3_key}") + s3_client.put_object( + Bucket=RESULT_BUCKET, + Key=s3_key, + Body=zip_buffer.getvalue(), + ContentType='application/zip' + ) + + # Step 3: Check if runtime already exists + existing_runtime_id = _find_existing_runtime() + + # Build and push orchestrator container to ECR, then use containerConfiguration + # First, check if we have a pre-built container URI + container_uri = os.environ.get('ORCHESTRATOR_CONTAINER_URI', '') + + if not container_uri: + # Use code configuration (direct deploy) if available, fall back to error + artifact = { + 'containerConfiguration': { + 'containerUri': 'PLACEHOLDER' # Must be set via ORCHESTRATOR_CONTAINER_URI env var + } + } + return _response(400, { + 'error': 'ORCHESTRATOR_CONTAINER_URI environment variable not set. ' + 'Build and push the orchestrator Docker image first, then set this env var.', + 'hint': 'cd orchestrator && docker build -t orchestrator . && docker tag orchestrator:latest :latest && docker push :latest' + }) + + artifact = { + 'containerConfiguration': { + 'containerUri': container_uri + } + } + + network_config = { + 'networkMode': 'PUBLIC' + } + + env_vars = { + 'AWS_REGION': REGION, + 'BEDROCK_MODEL_ID': os.environ.get('BEDROCK_MODEL_ID', 'us.anthropic.claude-sonnet-4-20250514-v1:0'), + } + + if existing_runtime_id: + # Update existing runtime + logger.info(f"Updating existing runtime: {existing_runtime_id}") + response = agentcore_client.update_agent_runtime( + agentRuntimeId=existing_runtime_id, + agentRuntimeArtifact=artifact, + roleArn=EXECUTION_ROLE_ARN, + networkConfiguration=network_config, + environmentVariables=env_vars, + clientToken=str(uuid.uuid4()), + ) + runtime_id = response['agentRuntimeId'] + runtime_arn = response['agentRuntimeArn'] + operation = 'updated' + else: + # Create new runtime + logger.info(f"Creating new runtime: {RUNTIME_NAME}") + response = agentcore_client.create_agent_runtime( + agentRuntimeName=RUNTIME_NAME, + agentRuntimeArtifact=artifact, + roleArn=EXECUTION_ROLE_ARN, + networkConfiguration=network_config, + environmentVariables=env_vars, + clientToken=str(uuid.uuid4()), + ) + runtime_id = response['agentRuntimeId'] + runtime_arn = response['agentRuntimeArn'] + operation = 'created' + + logger.info(f"Runtime {operation}: {runtime_arn}") + + # Step 4: Poll until READY + logger.info("Waiting for runtime to become READY...") + for attempt in range(60): # Max 10 minutes + time.sleep(10) + status_response = agentcore_client.get_agent_runtime(agentRuntimeId=runtime_id) + status = status_response.get('status', 'UNKNOWN') + logger.info(f"Runtime status: {status} (attempt {attempt + 1})") + + if status == 'READY': + return _response(200, { + 'status': 'deployed', + 'operation': operation, + 'runtime_id': runtime_id, + 'runtime_arn': runtime_arn, + 'message': f'AgentCore runtime {operation} and READY. Update the async Lambda AGENT_RUNTIME_ARN env var.', + }) + elif status in ('CREATE_FAILED', 'UPDATE_FAILED'): + return _response(500, { + 'status': 'failed', + 'runtime_id': runtime_id, + 'runtime_status': status, + 'error': f'Runtime {status}. Check CloudWatch logs for details.', + }) + + return _response(500, {'error': 'Timed out waiting for runtime to become READY (10 min)'}) + + +def _find_existing_runtime(): + """Find an existing runtime by name. Returns runtime ID or None.""" + try: + paginator = agentcore_client.get_paginator('list_agent_runtimes') + for page in paginator.paginate(): + for runtime in page.get('agentRuntimes', []): + if runtime.get('agentRuntimeName') == RUNTIME_NAME: + return runtime['agentRuntimeId'] + except Exception as e: + logger.warning(f"Could not list runtimes: {e}") + return None + + +def _get_status(): + """Get current runtime status.""" + runtime_id = _find_existing_runtime() + if not runtime_id: + return _response(200, {'status': 'not_deployed', 'message': f'No runtime named {RUNTIME_NAME} found.'}) + + try: + response = agentcore_client.get_agent_runtime(agentRuntimeId=runtime_id) + return _response(200, { + 'status': response.get('status'), + 'runtime_id': runtime_id, + 'runtime_arn': response.get('agentRuntimeArn'), + 'created_at': str(response.get('createdAt', '')), + 'updated_at': str(response.get('lastUpdatedAt', '')), + }) + except Exception as e: + return _response(500, {'error': str(e)}) + + +def _response(status_code, body): + return { + 'statusCode': status_code, + 'headers': { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + }, + 'body': json.dumps(body), + } diff --git a/agentic-atx-platform/sam/requirements-deploy.txt b/agentic-atx-platform/sam/requirements-deploy.txt new file mode 100644 index 0000000..7efdbd6 --- /dev/null +++ b/agentic-atx-platform/sam/requirements-deploy.txt @@ -0,0 +1,2 @@ +# Dependencies for the deploy Lambda (SDK-based, no CLI) +boto3>=1.34.0 diff --git a/agentic-atx-platform/sam/template.yaml b/agentic-atx-platform/sam/template.yaml new file mode 100644 index 0000000..802c8ec --- /dev/null +++ b/agentic-atx-platform/sam/template.yaml @@ -0,0 +1,262 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + ATX Transform Platform - AgentCore Orchestrator + API Layer. + Deploys the orchestrator to Bedrock AgentCore via Starter Toolkit, + plus the async Lambda bridge and HTTP API for the UI. + +Parameters: + OutputBucketName: + Type: String + Description: S3 bucket for transformation outputs (from CDK InfrastructureStack) + SourceBucketName: + Type: String + Description: S3 bucket for source code and custom definitions (from CDK InfrastructureStack) + BedrockModelId: + Type: String + Default: us.anthropic.claude-sonnet-4-20250514-v1:0 + Description: Bedrock model ID for the orchestrator + AwsRegion: + Type: String + Default: us-east-1 + OrchestratorContainerUri: + Type: String + Default: "" + Description: ECR URI for the orchestrator container (built by deploy.sh) + +Globals: + Function: + Timeout: 300 + MemorySize: 256 + +Resources: + # ======================================== + # 1. AgentCore Deploy Lambda + # ======================================== + # One-time Lambda that deploys the orchestrator to AgentCore Runtime + # using the boto3 bedrock-agentcore-control SDK directly (no CLI). + DeployAgentCoreFunction: + Type: AWS::Serverless::Function + Properties: + FunctionName: atx-deploy-agentcore + Runtime: python3.11 + Handler: deploy_agentcore.lambda_handler + CodeUri: . + Timeout: 900 + MemorySize: 512 + Environment: + Variables: + ATX_REGION: !Ref AwsRegion + BEDROCK_MODEL_ID: !Ref BedrockModelId + AGENT_RUNTIME_NAME: atxTransformOrchestrator + AGENT_EXECUTION_ROLE_ARN: !GetAtt AgentCoreExecutionRole.Arn + RESULT_BUCKET: !Ref OutputBucketName + ORCHESTRATOR_CONTAINER_URI: !Ref OrchestratorContainerUri + Policies: + - Statement: + - Sid: AgentCoreControlAccess + Effect: Allow + Action: + - bedrock-agentcore:* + - bedrock-agentcore-control:* + Resource: "*" + - Sid: IAMPassRole + Effect: Allow + Action: + - iam:PassRole + Resource: !GetAtt AgentCoreExecutionRole.Arn + - Sid: IAMServiceLinkedRole + Effect: Allow + Action: + - iam:CreateServiceLinkedRole + Resource: !Sub "arn:aws:iam::${AWS::AccountId}:role/aws-service-role/*" + - Sid: S3ZipAccess + Effect: Allow + Action: + - s3:PutObject + - s3:GetObject + Resource: + - !Sub "arn:aws:s3:::${OutputBucketName}/agentcore-deployments/*" + - Sid: LogsAccess + Effect: Allow + Action: + - logs:CreateLogGroup + - logs:CreateLogStream + - logs:PutLogEvents + Resource: + - !Sub "arn:aws:logs:${AwsRegion}:${AWS::AccountId}:log-group:/aws/lambda/atx-deploy-agentcore:*" + + # ======================================== + # AgentCore Execution Role + # ======================================== + # IAM role that the AgentCore runtime assumes when running the orchestrator + AgentCoreExecutionRole: + Type: AWS::IAM::Role + Properties: + RoleName: atx-agentcore-execution-role + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: bedrock-agentcore.amazonaws.com + Action: sts:AssumeRole + Policies: + - PolicyName: AtxAgentCorePolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - bedrock:InvokeModel + - bedrock:InvokeModelWithResponseStream + Resource: "*" + - Effect: Allow + Action: + - batch:SubmitJob + - batch:DescribeJobs + Resource: "*" + - Effect: Allow + Action: + - s3:ListBucket + - s3:GetObject + - s3:PutObject + Resource: + - !Sub "arn:aws:s3:::${OutputBucketName}" + - !Sub "arn:aws:s3:::${OutputBucketName}/*" + - !Sub "arn:aws:s3:::${SourceBucketName}" + - !Sub "arn:aws:s3:::${SourceBucketName}/*" + - Effect: Allow + Action: + - sts:GetCallerIdentity + Resource: "*" + - Effect: Allow + Action: + - xray:PutTraceSegments + - xray:PutTelemetryRecords + - xray:GetSamplingRules + - xray:GetSamplingTargets + Resource: "*" + - Effect: Allow + Action: + - ecr:GetAuthorizationToken + - ecr:BatchGetImage + - ecr:GetDownloadUrlForLayer + - ecr:BatchCheckLayerAvailability + Resource: "*" + - Effect: Allow + Action: + - logs:CreateLogGroup + - logs:CreateLogStream + - logs:PutLogEvents + Resource: "*" + + # ======================================== + # 2. Async Invoke Lambda (API Bridge) + # ======================================== + AsyncInvokeRole: + Type: AWS::IAM::Role + Properties: + RoleName: atx-async-invoke-role + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: sts:AssumeRole + ManagedPolicyArns: + - arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole + Policies: + - PolicyName: AtxAsyncInvokePolicy + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: bedrock-agentcore:InvokeAgentRuntime + Resource: !Sub "arn:aws:bedrock-agentcore:${AwsRegion}:${AWS::AccountId}:runtime/*" + - Effect: Allow + Action: + - s3:GetObject + - s3:PutObject + - s3:ListBucket + Resource: + - !Sub "arn:aws:s3:::${OutputBucketName}" + - !Sub "arn:aws:s3:::${OutputBucketName}/*" + - !Sub "arn:aws:s3:::${SourceBucketName}" + - !Sub "arn:aws:s3:::${SourceBucketName}/*" + - Effect: Allow + Action: batch:DescribeJobs + Resource: "*" + - Effect: Allow + Action: sts:GetCallerIdentity + Resource: "*" + - Effect: Allow + Action: lambda:InvokeFunction + Resource: !Sub "arn:aws:lambda:${AwsRegion}:${AWS::AccountId}:function:atx-async-invoke-agent" + - Effect: Allow + Action: + - xray:PutTraceSegments + - xray:PutTelemetryRecords + Resource: "*" + - Effect: Allow + Action: + - dynamodb:GetItem + - dynamodb:PutItem + - dynamodb:UpdateItem + - dynamodb:DeleteItem + - dynamodb:Scan + Resource: !Sub "arn:aws:dynamodb:${AwsRegion}:${AWS::AccountId}:table/atx-transform-jobs" + + AsyncInvokeFunction: + Type: AWS::Serverless::Function + Properties: + FunctionName: atx-async-invoke-agent + Runtime: python3.11 + Handler: async_invoke_agent.lambda_handler + CodeUri: ../api/lambda/ + Role: !GetAtt AsyncInvokeRole.Arn + Timeout: 900 + MemorySize: 1024 + Tracing: Active + Environment: + Variables: + AGENT_RUNTIME_ARN: "" # Updated after deploy-agent is invoked + RESULT_BUCKET: !Ref OutputBucketName + Events: + Orchestrate: + Type: HttpApi + Properties: + Path: /orchestrate + Method: POST + ApiId: !Ref HttpApi + + # ======================================== + # 3. HTTP API Gateway + # ======================================== + HttpApi: + Type: AWS::Serverless::HttpApi + Properties: + StageName: prod + CorsConfiguration: + AllowOrigins: + - "*" + AllowMethods: + - POST + AllowHeaders: + - content-type + MaxAge: 86400 + +Outputs: + ApiEndpoint: + Description: HTTP API endpoint for UI + Value: !Sub "https://${HttpApi}.execute-api.${AwsRegion}.amazonaws.com/prod" + DeployLambdaName: + Description: Deploy Lambda function name (invoke directly via AWS CLI) + Value: !Ref DeployAgentCoreFunction + AsyncLambdaArn: + Description: Async invoke Lambda ARN + Value: !GetAtt AsyncInvokeFunction.Arn + AsyncLambdaName: + Description: Async invoke Lambda function name + Value: !Ref AsyncInvokeFunction diff --git a/agentic-atx-platform/ui/deploy-aws.sh b/agentic-atx-platform/ui/deploy-aws.sh new file mode 100755 index 0000000..4254b97 --- /dev/null +++ b/agentic-atx-platform/ui/deploy-aws.sh @@ -0,0 +1,199 @@ +#!/bin/bash +# Deploy ATX Transform UI to S3 + CloudFront +# Works with both Option A (standalone) and Option B (CDK-managed AtxUiStack) +set -e + +ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +REGION="us-east-1" +BUCKET_NAME="atx-transform-ui-${ACCOUNT_ID}" + +echo "=== ATX Transform UI Deployment ===" +echo "Account: ${ACCOUNT_ID}" +echo "Region: ${REGION}" +echo "" + +cd "$(dirname "$0")" + +# Step 1: Build the UI (if dist/ doesn't exist or --build flag passed) +if [ ! -d "dist" ] || [ "$1" = "--build" ]; then + echo "1. Building UI..." + npx vite build + echo " Done." +else + echo "1. Using existing dist/ build (pass --build to rebuild)" +fi + +# Step 2: Detect existing CDK-managed stack or create standalone +echo "" +echo "2. Checking for existing infrastructure..." + +CDK_STACK="AtxUiStack" +STANDALONE_STACK="atx-ui-standalone" + +if aws cloudformation describe-stacks --stack-name "${CDK_STACK}" --region "${REGION}" &>/dev/null; then + echo " ✅ Found CDK-managed stack: ${CDK_STACK}" + STACK_NAME="${CDK_STACK}" +elif aws cloudformation describe-stacks --stack-name "${STANDALONE_STACK}" --region "${REGION}" &>/dev/null; then + echo " ✅ Found standalone stack: ${STANDALONE_STACK}" + STACK_NAME="${STANDALONE_STACK}" +else + echo " No existing UI stack found. Creating standalone CloudFormation stack..." + STACK_NAME="${STANDALONE_STACK}" + + cat > /tmp/atx-ui-cfn.yaml << 'TEMPLATE' +AWSTemplateFormatVersion: '2010-09-09' +Description: ATX Transform UI - S3 Static Site + CloudFront (Standalone) + +Resources: + WebsiteBucket: + Type: AWS::S3::Bucket + Properties: + BucketName: !Sub 'atx-transform-ui-${AWS::AccountId}' + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + + CloudFrontOAI: + Type: AWS::CloudFront::CloudFrontOriginAccessIdentity + Properties: + CloudFrontOriginAccessIdentityConfig: + Comment: ATX Transform UI OAI + + BucketPolicy: + Type: AWS::S3::BucketPolicy + Properties: + Bucket: !Ref WebsiteBucket + PolicyDocument: + Statement: + - Effect: Allow + Principal: + CanonicalUser: !GetAtt CloudFrontOAI.S3CanonicalUserId + Action: s3:GetObject + Resource: !Sub '${WebsiteBucket.Arn}/*' + + Distribution: + Type: AWS::CloudFront::Distribution + Properties: + DistributionConfig: + Comment: ATX Transform UI + Enabled: true + DefaultRootObject: index.html + Origins: + - Id: S3Origin + DomainName: !GetAtt WebsiteBucket.RegionalDomainName + S3OriginConfig: + OriginAccessIdentity: !Sub 'origin-access-identity/cloudfront/${CloudFrontOAI}' + DefaultCacheBehavior: + TargetOriginId: S3Origin + ViewerProtocolPolicy: redirect-to-https + AllowedMethods: [GET, HEAD] + CachedMethods: [GET, HEAD] + ForwardedValues: + QueryString: false + Compress: true + CustomErrorResponses: + - ErrorCode: 403 + ResponseCode: 200 + ResponsePagePath: /index.html + ErrorCachingMinTTL: 300 + - ErrorCode: 404 + ResponseCode: 200 + ResponsePagePath: /index.html + ErrorCachingMinTTL: 300 + ViewerCertificate: + CloudFrontDefaultCertificate: true + MinimumProtocolVersion: TLSv1.2_2021 + +Outputs: + WebsiteUrl: + Value: !Sub 'https://${Distribution.DomainName}' + Description: Website URL + DistributionId: + Value: !Ref Distribution + Description: CloudFront Distribution ID + BucketName: + Value: !Ref WebsiteBucket + Description: S3 Bucket Name +TEMPLATE + + aws cloudformation deploy \ + --template-file /tmp/atx-ui-cfn.yaml \ + --stack-name "${STACK_NAME}" \ + --region "${REGION}" \ + --no-fail-on-empty-changeset + + echo " Stack created." +fi + +# Step 3: Get outputs from whichever stack exists +echo "" +echo "3. Getting stack outputs..." + +DISTRIBUTION_ID=$(aws cloudformation describe-stacks \ + --stack-name "${STACK_NAME}" \ + --region "${REGION}" \ + --query 'Stacks[0].Outputs[?OutputKey==`DistributionId`].OutputValue' \ + --output text) + +WEBSITE_URL=$(aws cloudformation describe-stacks \ + --stack-name "${STACK_NAME}" \ + --region "${REGION}" \ + --query 'Stacks[0].Outputs[?contains(OutputKey,`WebsiteUrl`) || contains(OutputKey,`WebsiteURL`)].OutputValue' \ + --output text) + +# Fallback: get domain from distribution +if [ -z "$WEBSITE_URL" ] || [ "$WEBSITE_URL" = "None" ]; then + DOMAIN=$(aws cloudformation describe-stacks \ + --stack-name "${STACK_NAME}" \ + --region "${REGION}" \ + --query 'Stacks[0].Outputs[?contains(OutputKey,`Domain`)].OutputValue' \ + --output text) + if [ -n "$DOMAIN" ] && [ "$DOMAIN" != "None" ]; then + WEBSITE_URL="https://${DOMAIN}" + fi +fi + +echo " Distribution ID: ${DISTRIBUTION_ID}" +echo " Website URL: ${WEBSITE_URL}" + +# Step 4: Upload files to S3 +echo "" +echo "4. Uploading UI assets to S3..." +aws s3 sync dist/ "s3://${BUCKET_NAME}/" \ + --delete \ + --region "${REGION}" \ + --cache-control "public, max-age=31536000, immutable" \ + --exclude "index.html" + +aws s3 cp dist/index.html "s3://${BUCKET_NAME}/index.html" \ + --region "${REGION}" \ + --cache-control "no-cache, no-store, must-revalidate" \ + --content-type "text/html" + +echo " Upload complete." + +# Step 5: Invalidate CloudFront cache +echo "" +echo "5. Invalidating CloudFront cache..." +if [ -n "$DISTRIBUTION_ID" ] && [ "$DISTRIBUTION_ID" != "None" ]; then + aws cloudfront create-invalidation \ + --distribution-id "${DISTRIBUTION_ID}" \ + --paths "/*" \ + --query 'Invalidation.Id' \ + --output text +else + echo " ⚠️ No distribution ID found, skipping invalidation" +fi + +echo "" +echo "=== Deployment Complete ===" +echo "" +echo "Website URL: ${WEBSITE_URL}" +echo "" +echo "Note: CloudFront may take 1-2 minutes to propagate the invalidation." diff --git a/agentic-atx-platform/ui/index.html b/agentic-atx-platform/ui/index.html new file mode 100644 index 0000000..22f1960 --- /dev/null +++ b/agentic-atx-platform/ui/index.html @@ -0,0 +1,12 @@ + + + + + + ATX Transform + + +
+ + + diff --git a/agentic-atx-platform/ui/package-lock.json b/agentic-atx-platform/ui/package-lock.json new file mode 100644 index 0000000..7e39736 --- /dev/null +++ b/agentic-atx-platform/ui/package-lock.json @@ -0,0 +1,1684 @@ +{ + "name": "atx-transform-ui", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "atx-transform-ui", + "version": "1.0.0", + "dependencies": { + "papaparse": "^5.4.1", + "react": "^18.2.0", + "react-dom": "^18.2.0" + }, + "devDependencies": { + "@vitejs/plugin-react": "^4.2.1", + "vite": "^5.1.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", + "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001774", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001774.tgz", + "integrity": "sha512-DDdwPGz99nmIEv216hKSgLD+D4ikHQHjBC/seF98N9CPqRX4M5mSxT9eTV6oyisnJcuzxtZy4n17yKKQYmYQOA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.302", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", + "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/papaparse": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.5.3.tgz", + "integrity": "sha512-5QvjGxYVjxO59MGU2lHVYpRWBBtKHnlIAcSe1uNFCkkptUh63NFRj0FJQm7nR67puEruUci/ZkjmEFrjCAyP4A==", + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/agentic-atx-platform/ui/package.json b/agentic-atx-platform/ui/package.json new file mode 100644 index 0000000..a2bf35c --- /dev/null +++ b/agentic-atx-platform/ui/package.json @@ -0,0 +1,20 @@ +{ + "name": "atx-transform-ui", + "private": true, + "version": "1.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "react": "^18.2.0", + "react-dom": "^18.2.0", + "papaparse": "^5.4.1" + }, + "devDependencies": { + "@vitejs/plugin-react": "^4.2.1", + "vite": "^5.1.0" + } +} diff --git a/agentic-atx-platform/ui/sample-batch.csv b/agentic-atx-platform/ui/sample-batch.csv new file mode 100644 index 0000000..f492ba6 --- /dev/null +++ b/agentic-atx-platform/ui/sample-batch.csv @@ -0,0 +1,6 @@ +source,transformation,validationCommands,additionalPlanContext +https://github.com/venuvasu/todoapilambda,AWS/python-version-upgrade,pytest,Target Python 3.13 +https://github.com/aws-samples/aws-appconfig-java-sample,AWS/java-version-upgrade,./gradlew clean build test,Target Java 21 +https://github.com/venuvasu/toapilambdanode16,AWS/nodejs-version-upgrade,,Target Node.js 22 +https://github.com/spring-projects/spring-petclinic,AWS/early-access-comprehensive-codebase-analysis,, +https://github.com/venuvasu/todoapilambda,AWS/early-access-comprehensive-codebase-analysis,, diff --git a/agentic-atx-platform/ui/src/App.css b/agentic-atx-platform/ui/src/App.css new file mode 100644 index 0000000..6754c6b --- /dev/null +++ b/agentic-atx-platform/ui/src/App.css @@ -0,0 +1,280 @@ +* { box-sizing: border-box; margin: 0; padding: 0; } + +body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + background: #0f1117; + color: #e1e4e8; + min-height: 100vh; +} + +.app { + max-width: 1100px; + margin: 0 auto; + padding: 24px; +} + +header { + margin-bottom: 24px; +} + +header h1 { + font-size: 28px; + color: #58a6ff; +} + +.subtitle { + color: #8b949e; + font-size: 14px; + margin-top: 4px; +} + +nav { + display: flex; + gap: 4px; + margin-bottom: 24px; + border-bottom: 1px solid #21262d; + padding-bottom: 8px; +} + +nav button { + background: none; + border: none; + color: #8b949e; + padding: 8px 16px; + cursor: pointer; + font-size: 14px; + border-radius: 6px 6px 0 0; + position: relative; +} + +nav button:hover { color: #e1e4e8; background: #161b22; } +nav button.active { color: #58a6ff; background: #161b22; border-bottom: 2px solid #58a6ff; } + +.badge { + background: #58a6ff; + color: #0f1117; + font-size: 11px; + padding: 1px 6px; + border-radius: 10px; + margin-left: 6px; +} + +/* Cards */ +.card { + background: #161b22; + border: 1px solid #21262d; + border-radius: 8px; + padding: 20px; + margin-bottom: 16px; +} + +.card h3 { + color: #58a6ff; + font-size: 16px; + margin-bottom: 8px; +} + +.card p { color: #8b949e; font-size: 13px; line-height: 1.5; } + +.tag { + display: inline-block; + background: #1f2937; + color: #7ee787; + padding: 2px 8px; + border-radius: 12px; + font-size: 11px; + margin-right: 4px; + margin-top: 8px; +} + +/* Forms */ +.form-group { + margin-bottom: 16px; +} + +.form-group label { + display: block; + color: #c9d1d9; + font-size: 13px; + margin-bottom: 6px; + font-weight: 500; +} + +input, select, textarea { + width: 100%; + background: #0d1117; + border: 1px solid #30363d; + color: #e1e4e8; + padding: 10px 12px; + border-radius: 6px; + font-size: 14px; + font-family: inherit; +} + +input:focus, select:focus, textarea:focus { + outline: none; + border-color: #58a6ff; +} + +textarea { resize: vertical; min-height: 80px; } + +/* Buttons */ +.btn { + padding: 10px 20px; + border: none; + border-radius: 6px; + font-size: 14px; + cursor: pointer; + font-weight: 500; +} + +.btn-primary { background: #238636; color: #fff; } +.btn-primary:hover { background: #2ea043; } +.btn-primary:disabled { background: #21262d; color: #484f58; cursor: not-allowed; } + +.btn-secondary { background: #21262d; color: #c9d1d9; } +.btn-secondary:hover { background: #30363d; } + +.btn-danger { background: #da3633; color: #fff; } +.btn-danger:hover { background: #f85149; } + +.btn-sm { padding: 6px 12px; font-size: 12px; } + +/* Status badges */ +.status { + display: inline-block; + padding: 2px 10px; + border-radius: 12px; + font-size: 12px; + font-weight: 500; +} + +.status-submitted { background: #1f2937; color: #79c0ff; } +.status-starting { background: #1f2937; color: #d2a8ff; } +.status-running { background: #0c2d6b; color: #58a6ff; } +.status-succeeded { background: #0c2d1a; color: #3fb950; } +.status-failed { background: #3d1114; color: #f85149; } + +/* Table */ +.table-wrap { + overflow-x: auto; + margin-top: 16px; +} + +table { + width: 100%; + border-collapse: collapse; + font-size: 13px; +} + +th { + text-align: left; + padding: 10px 12px; + background: #0d1117; + color: #8b949e; + font-weight: 500; + border-bottom: 1px solid #21262d; +} + +td { + padding: 10px 12px; + border-bottom: 1px solid #21262d; + color: #c9d1d9; +} + +tr:hover td { background: #1c2128; } + +/* CSV Preview */ +.csv-preview { + background: #0d1117; + border: 1px solid #30363d; + border-radius: 8px; + padding: 16px; + margin-top: 16px; + overflow-x: auto; +} + +.csv-preview h4 { + color: #58a6ff; + margin-bottom: 12px; + font-size: 14px; +} + +/* Upload area */ +.upload-area { + border: 2px dashed #30363d; + border-radius: 8px; + padding: 40px; + text-align: center; + cursor: pointer; + transition: border-color 0.2s; +} + +.upload-area:hover { border-color: #58a6ff; } +.upload-area.dragover { border-color: #58a6ff; background: #0d1117; } + +.upload-area p { color: #8b949e; font-size: 14px; } +.upload-area .icon { font-size: 36px; margin-bottom: 8px; } + +/* Loading */ +.spinner { + display: inline-block; + width: 16px; + height: 16px; + border: 2px solid #30363d; + border-top-color: #58a6ff; + border-radius: 50%; + animation: spin 0.6s linear infinite; + margin-right: 8px; + vertical-align: middle; +} + +@keyframes spin { to { transform: rotate(360deg); } } + +.loading-text { color: #8b949e; font-size: 13px; } + +/* Response box */ +.response-box { + background: #0d1117; + border: 1px solid #30363d; + border-radius: 8px; + padding: 16px; + margin-top: 16px; + white-space: pre-wrap; + font-size: 13px; + line-height: 1.6; + color: #c9d1d9; + max-height: 400px; + overflow-y: auto; +} + +/* Flex helpers */ +.flex { display: flex; } +.flex-between { display: flex; justify-content: space-between; align-items: center; } +.gap-8 { gap: 8px; } +.gap-16 { gap: 16px; } +.mt-8 { margin-top: 8px; } +.mt-16 { margin-top: 16px; } +.mb-16 { margin-bottom: 16px; } + +/* Filter bar */ +.filter-bar { + display: flex; + gap: 12px; + margin-bottom: 16px; +} + +.filter-bar select { + width: auto; + min-width: 150px; +} + +/* Empty state */ +.empty-state { + text-align: center; + padding: 60px 20px; + color: #484f58; +} + +.empty-state .icon { font-size: 48px; margin-bottom: 12px; } +.empty-state p { font-size: 14px; } diff --git a/agentic-atx-platform/ui/src/App.jsx b/agentic-atx-platform/ui/src/App.jsx new file mode 100644 index 0000000..34aec98 --- /dev/null +++ b/agentic-atx-platform/ui/src/App.jsx @@ -0,0 +1,140 @@ +import React, { useState, useEffect } from 'react' +import TransformationList from './components/TransformationList' +import TransformationForm from './components/TransformationForm' +import CreateCustom from './components/CreateCustom' +import CsvUpload from './components/CsvUpload' +import JobTracker from './components/JobTracker' +import Chat from './components/Chat' + +const TABS = ['Transformations', 'Execute', 'Create Custom', 'CSV Batch', 'Jobs', 'Chat'] +const API_BASE = import.meta.env.VITE_API_ENDPOINT || '/api' + +// Async orchestrator for AI operations +async function orchestrate(prompt, { onStep, pollIntervalMs = 5000, maxPollMs = 300000 } = {}) { + const submitRes = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'submit', prompt }) + }) + const { request_id } = await submitRes.json() + if (!request_id) throw new Error('No request_id returned') + + const deadline = Date.now() + maxPollMs + while (Date.now() < deadline) { + await new Promise(r => setTimeout(r, pollIntervalMs)) + const pollRes = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'poll', request_id }) + }) + const data = await pollRes.json() + if (data.status === 'PROCESSING' && data.step && onStep) { + onStep(data.step) + } + if (data.status === 'COMPLETED') { + const content = data?.result?.result?.content ?? data?.result?.result ?? data?.result + if (typeof content === 'string') return content + if (Array.isArray(content)) return content.map(c => c.text).join('\n') + return JSON.stringify(content) + } + if (data.status === 'FAILED') throw new Error(data.error || 'Orchestration failed') + } + throw new Error('Orchestration timed out') +} + +// Direct calls for fast operations (status, results) - no AI overhead +async function directCall(op, job_id) { + const res = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'direct', op, job_id }) + }) + return res.json() +} + +// Fire-and-forget: submit to orchestrator without waiting for result +async function submitAsync(prompt) { + const res = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'submit', prompt }) + }) + return res.json() // returns { status: 'SUBMITTED', request_id: '...' } +} + +export default function App() { + const [tab, setTab] = useState('Transformations') + const [jobs, setJobs] = useState([]) + const [jobsLoaded, setJobsLoaded] = useState(false) + + // Load jobs from DynamoDB on mount + useEffect(() => { + async function loadJobs() { + try { + const res = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'direct', op: 'list_jobs' }) + }) + const data = await res.json() + if (data.jobs) setJobs(data.jobs) + } catch (e) { console.error('Failed to load jobs:', e) } + setJobsLoaded(true) + } + loadJobs() + }, []) + + function updateJobs(updater) { + setJobs(prev => { + const next = typeof updater === 'function' ? updater(prev) : updater + return next + }) + } + + const addJob = (job) => { + updateJobs(prev => [job, ...prev]) + // Persist to DynamoDB + fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'direct', op: 'save_job', job }) + }).catch(e => console.error('Failed to save job:', e)) + } + + const addJobs = (newJobs) => { + updateJobs(prev => [...newJobs, ...prev]) + // Persist all to DynamoDB + newJobs.forEach(job => { + fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'direct', op: 'save_job', job }) + }).catch(e => console.error('Failed to save job:', e)) + }) + } + + return ( +
+
+

ATX Transform

+

AI-Powered Code Transformation Platform

+
+ +
+ {tab === 'Transformations' && } + {tab === 'Execute' && } + {tab === 'Create Custom' && } + {tab === 'CSV Batch' && } + {tab === 'Jobs' && } + {tab === 'Chat' && } +
+
+ ) +} diff --git a/agentic-atx-platform/ui/src/components/Chat.jsx b/agentic-atx-platform/ui/src/components/Chat.jsx new file mode 100644 index 0000000..7def5be --- /dev/null +++ b/agentic-atx-platform/ui/src/components/Chat.jsx @@ -0,0 +1,162 @@ +import { useState, useRef, useEffect } from 'react' + +const API_BASE = import.meta.env.VITE_API_ENDPOINT || '/api' + +export default function Chat({ orchestrate, jobs }) { + const [messages, setMessages] = useState([ + { role: 'assistant', text: 'Hi! I can help you understand your transformation jobs. Ask me about job status, why a job failed, what changed in the results, or anything about available transformations. To create or execute transformations, use the dedicated tabs above.' } + ]) + const [input, setInput] = useState('') + const [loading, setLoading] = useState(false) + const [selectedJob, setSelectedJob] = useState('') + const messagesEndRef = useRef(null) + + useEffect(() => { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }) + }, [messages]) + + async function handleSend(e) { + e.preventDefault() + if (!input.trim() || loading) return + + const userMsg = input.trim() + setInput('') + setMessages(prev => [...prev, { role: 'user', text: userMsg }]) + setLoading(true) + + try { + // Build context from selected job + let context = 'IMPORTANT: You are in read-only chat mode. You can check job status, analyze results, explain failures, and answer questions about transformations. Do NOT create new transformations, execute transformations, or submit any jobs. If the user asks to create or execute, tell them to use the Create Custom or Execute tabs instead. ' + if (selectedJob) { + const job = jobs.find(j => j.id === selectedJob) + if (job) { + context = `Context: Job ${job.id} (${job.transformation} on ${job.source}, status: ${job.status}, type: ${job.type || 'execution'}). ` + if (job.type === 'preview' || job.type === 'create') { + context += 'This is an orchestrator request (not a Batch job). Do not try to check Batch status for this ID. ' + } + + // If job succeeded, try to get result file list + if (job.status === 'SUCCEEDED' && job.type !== 'create') { + try { + const res = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'direct', op: 'results', job_id: job.id }) + }) + const data = await res.json() + if (data.files && data.files.length > 0) { + const fileList = data.files.slice(0, 20).map(f => f.name || f.key.split('/').pop()).join(', ') + context += `Result files: ${fileList}. Results at: ${data.results_location}. ` + } + } catch {} + } + } + } + + const prompt = context + ? `${context}\n\nUser question: ${userMsg}` + : userMsg + + // Retry up to 3 times on transient Strands streaming errors + let result + let lastErr + for (let attempt = 0; attempt < 3; attempt++) { + try { + result = await orchestrate(prompt) + lastErr = null + break + } catch (err) { + lastErr = err + if (!err.message || !err.message.includes('concatenate')) { + break // Non-retryable error + } + } + } + if (lastErr) throw lastErr + // Clean up raw error responses + if (result && result.includes('"statusCode":500')) { + try { + const parsed = JSON.parse(result) + if (parsed.body) { + const body = JSON.parse(parsed.body) + if (body.error) { + result = `I encountered an issue processing your request. Please try again. (${body.error})` + } + } + } catch {} + } + setMessages(prev => [...prev, { role: 'assistant', text: result }]) + } catch (err) { + setMessages(prev => [...prev, { role: 'assistant', text: `Error: ${err.message}` }]) + } + setLoading(false) + } + + const activeJobs = jobs.filter(j => j.id) + + return ( +
+
+

Chat

+ +
+ +
+ {messages.map((msg, i) => ( +
+

+ {msg.text} +

+
+ ))} + {loading && ( +
+ Thinking... +
+ )} +
+
+ +
+ setInput(e.target.value)} + placeholder={selectedJob ? 'Ask about this job...' : 'Ask about transformations, jobs, or results...'} + style={{ flex: 1 }} + disabled={loading} + /> + +
+ + {selectedJob && ( +

+ Chatting about job {selectedJob.slice(0, 8)}... The AI has access to job details and result files. +

+ )} +
+ ) +} diff --git a/agentic-atx-platform/ui/src/components/CreateCustom.jsx b/agentic-atx-platform/ui/src/components/CreateCustom.jsx new file mode 100644 index 0000000..6dfb957 --- /dev/null +++ b/agentic-atx-platform/ui/src/components/CreateCustom.jsx @@ -0,0 +1,238 @@ +import { useState } from 'react' + +const API_BASE = import.meta.env.VITE_API_ENDPOINT || '/api' + +export default function CreateCustom({ submitAsync, onJobCreated, orchestrate }) { + const [name, setName] = useState('') + const [description, setDescription] = useState('') + const [sourceUrl, setSourceUrl] = useState('') + const [requirements, setRequirements] = useState('') + const [submitting, setSubmitting] = useState(false) + const [submitMode, setSubmitMode] = useState('') // 'publish' or 'review' + const [banner, setBanner] = useState(null) + // Review mode state + const [reviewMode, setReviewMode] = useState(false) + const [definition, setDefinition] = useState('') + const [publishing, setPublishing] = useState(false) + + async function handleAutoPublish(e) { + e.preventDefault() + if (!name.trim() || !requirements.trim()) return + setSubmitting(true) + setSubmitMode('publish') + setBanner(null) + setReviewMode(false) + + try { + const parts = [ + `Create a custom transformation called "${name.trim()}" with description "${description.trim() || name.trim()}".`, + `Requirements: ${requirements.trim()}.`, + ] + if (sourceUrl.trim()) { + parts.push(`Source repository: ${sourceUrl.trim()}. Analyze the source code first to understand the codebase before generating the definition.`) + } + parts.push('Generate the transformation definition and publish it to the ATX registry.') + + const result = await submitAsync(parts.join(' ')) + + if (result.request_id) { + onJobCreated({ + id: result.request_id, type: 'create', + transformation: name.trim(), source: sourceUrl.trim() || 'N/A', + status: 'PROCESSING', submittedAt: new Date().toISOString(), + }) + setBanner({ + type: 'success', + text: `Request submitted. The AI is generating and publishing "${name.trim()}". Track in Jobs tab.`, + requestId: result.request_id, + }) + setName(''); setDescription(''); setSourceUrl(''); setRequirements('') + } else { + setBanner({ type: 'error', text: 'Failed to submit request.' }) + } + } catch (err) { + setBanner({ type: 'error', text: `Error: ${err.message}` }) + } + setSubmitting(false) + } + + async function handleGenerateReview(e) { + e.preventDefault() + if (!name.trim() || !requirements.trim()) return + setSubmitMode('review') + + const savedName = name.trim() + const savedDesc = description.trim() || name.trim() + const savedSource = sourceUrl.trim() + const savedReqs = requirements.trim() + + // Show banner and clear form immediately + setBanner({ + type: 'success', + text: `Generating definition for "${savedName}". You can safely navigate away — track progress in the Jobs tab.`, + }) + setName(''); setDescription(''); setSourceUrl(''); setRequirements('') + setSubmitting(true) + setReviewMode(false) + + try { + const parts = [ + `Create a custom transformation called "${savedName}" with description "${savedDesc}".`, + `Requirements: ${savedReqs}.`, + ] + if (savedSource) { + parts.push(`Source repository: ${savedSource}. Analyze the source code first.`) + } + parts.push('Generate the transformation definition and upload to S3. Do NOT publish it yet.') + + const result = await submitAsync(parts.join(' ')) + const jobId = result.request_id || `preview-${Date.now()}` + + // Create job in tracker immediately + onJobCreated({ + id: jobId, type: 'preview', + transformation: savedName, source: savedSource || 'N/A', + status: 'PROCESSING', submittedAt: new Date().toISOString(), + }) + } catch (err) { + setBanner({ type: 'error', text: `Error: ${err.message}` }) + } + setSubmitting(false) + } + + async function handlePublishReviewed() { + setPublishing(true) + setBanner(null) + try { + // Upload the edited definition back to S3 + const normalized = name.trim().toLowerCase().replace(/\s+/g, '-') + const accountRes = await fetch(`${API_BASE}/orchestrate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ action: 'direct', op: 'list_custom' }) + }) + // Save edited definition + // Use orchestrator to publish (it reads from S3) + const result = await submitAsync( + `Publish the transformation "${normalized}" with description "${description.trim() || name.trim()}" to the ATX registry. The definition is already in S3.` + ) + if (result.request_id) { + onJobCreated({ + id: result.request_id, type: 'create', + transformation: name.trim(), source: sourceUrl.trim() || 'N/A', + status: 'PROCESSING', submittedAt: new Date().toISOString(), + }) + setBanner({ type: 'success', text: `Publishing "${name.trim()}" to the ATX registry. Track in Jobs tab.` }) + setReviewMode(false) + setDefinition('') + setName(''); setDescription(''); setSourceUrl(''); setRequirements('') + } + } catch (err) { + setBanner({ type: 'error', text: `Publish failed: ${err.message}` }) + } + setPublishing(false) + } + + return ( +
+

Create Custom Transformation

+

+ Define a custom transformation using natural language. The AI generates a transformation definition + and publishes it to the ATX registry. Once published, it can be executed like any AWS-managed transformation. +

+ + {banner && ( +
+

+ {banner.type === 'success' ? '✅' : '❌'} {banner.text} +

+ {banner.requestId && ( +

+ Request ID: {banner.requestId} +

+ )} +
+ )} + + {!reviewMode ? ( +
+
+
+ + setName(e.target.value)} required /> +
+
+ + setDescription(e.target.value)} /> +
+
+ + setSourceUrl(e.target.value)} /> +

+ If provided, the repo is cloned and analyzed so the AI generates a definition tailored to the actual code. +

+
+
+ +