From 5d7ddf2860c3938a40f41d469a0e9e64c09c2f8b Mon Sep 17 00:00:00 2001 From: Colin Smith <7762103+colinmxs@users.noreply.github.com> Date: Wed, 1 Apr 2026 15:51:24 -0600 Subject: [PATCH] Release 1.0.0-beta.20: Document soft-delete, displayText, fine-tuning costs, CodeQL remediation, dependency refresh Reliable document deletion, displayText for RAG-augmented messages, fine-tuning cost dashboard, assistant archive removal, and a full dependency refresh across Python, npm, and GitHub Actions. Features: - Soft-delete document lifecycle with background cleanup, retry logic, DynamoDB TTL backstop, and search filtering for mid-deletion docs - Upload failure reporting endpoint for client-side error tracking - DisplayText system preserving original user messages when RAG augmentation or file attachments modify the prompt sent to the agent - Debug output toggle in chat preferences for prompt inspection - Fine-tuning cost dashboard with per-user breakdowns and default monthly quota hours - Shared conversation cascade deletion on session delete Removals: - Assistant archive functionality (ARCHIVED status, archive endpoint, include_archived parameter) replaced with single delete operation Security & Code Quality: - All CodeQL findings resolved (180 log injection fixes, 5 silent exception fixes, cyclic import elimination, 13 unused variables) - Four Dependabot security patches (requests, picomatch, diff) CI/CD: - CDK synth skipped on PRs for app-api and frontend workflows - scripts/common/** removed from frontend workflow path triggers - GitHub Actions bumped (upload-artifact v7, download-artifact v8, setup-node v6, codeql-action latest) Testing: - Analog.js testing migration for frontend (vitest config removed) - fast-check v4.6.0 added for property-based frontend tests - 4,200+ lines of new backend tests for document deletion flows Tooling: - sync-version.sh now auto-updates README badge and current release text - Versioning steering docs updated across Kiro, Cursor, and Claude - Release notes steering doc added (fileMatch on RELEASE_NOTES.md) Dependencies: - Python: uvicorn 0.42.0, strands-agents 1.33.0, strands-agents-tools 0.3.0, aws-opentelemetry-distro 0.16.0, bedrock-agentcore 1.4.8, openai 2.30.0, cachetools downgraded to 6.2.4 for compatibility - Frontend: Angular 21.2.6, @angular/cdk 21.2.4 - Infrastructure: aws-cdk group bumped, constructs bumped --- .claude/launch.json | 4 +- .claude/skills/versioning/SKILL.md | 2 +- .cursor/rules/versioning.mdc | 2 +- .github/ACTIONS-REFERENCE.md | 1 + .github/workflows/app-api.yml | 16 +- .github/workflows/codeql.yml | 4 +- .github/workflows/frontend.yml | 21 +- .github/workflows/gateway.yml | 12 +- .github/workflows/inference-api.yml | 14 +- .github/workflows/infrastructure.yml | 16 +- .github/workflows/nightly.yml | 14 +- .github/workflows/rag-ingestion.yml | 14 +- .github/workflows/sagemaker-fine-tuning.yml | 8 +- .github/workflows/version-check.yml | 2 +- .gitignore | 3 + .../reliable-document-deletion/.config.kiro | 1 + .../reliable-document-deletion/design.md | 771 ++++ .../requirements.md | 133 + .../specs/reliable-document-deletion/tasks.md | 181 + .kiro/specs/share-conversations/.config.kiro | 1 + .kiro/specs/share-conversations/design.md | 355 ++ .../specs/share-conversations/requirements.md | 175 + .kiro/specs/share-conversations/tasks.md | 163 + .kiro/steering/versioning.md | 2 +- README.md | 4 +- RELEASE_NOTES.md | 256 ++ VERSION | 2 +- backend/Dockerfile.rag-ingestion | 7 + .../runtime-provisioner/lambda_function.py | 2 +- backend/pyproject.toml | 24 +- backend/scripts/seed_auth_provider.py | 2 +- .../code_interpreter_diagram_tool.py | 4 +- backend/src/agents/local_tools/url_fetcher.py | 5 +- .../agents/main_agent/core/model_config.py | 4 +- .../integrations/external_mcp_client.py | 8 +- .../main_agent/integrations/oauth_auth.py | 2 +- backend/src/agents/main_agent/main_agent.py | 4 +- .../src/agents/main_agent/quota/checker.py | 2 +- backend/src/agents/main_agent/quota/models.py | 2 +- .../src/agents/main_agent/quota/repository.py | 1 - .../main_agent/session/compaction_models.py | 3 +- .../session/preview_session_manager.py | 9 +- .../main_agent/session/session_factory.py | 16 +- .../session/tests/test_compaction.py | 1 - .../tests/test_compaction_integration.py | 1 - .../main_agent/streaming/event_formatter.py | 5 +- .../streaming/stream_coordinator.py | 19 +- .../main_agent/streaming/stream_processor.py | 10 +- .../streaming/tool_result_processor.py | 3 +- .../main_agent/tools/oauth_tool_service.py | 1 - .../agents/main_agent/tools/tool_catalog.py | 2 +- .../src/agents/main_agent/utils/timezone.py | 11 +- backend/src/agents/utils/config.py | 1 - .../app_api/admin/auth_providers/routes.py | 19 +- .../src/apis/app_api/admin/costs/routes.py | 19 +- .../src/apis/app_api/admin/costs/service.py | 19 +- .../apis/app_api/admin/fine_tuning/models.py | 23 + .../apis/app_api/admin/fine_tuning/routes.py | 134 +- .../src/apis/app_api/admin/oauth/routes.py | 16 +- .../src/apis/app_api/admin/quota/models.py | 2 - .../src/apis/app_api/admin/quota/routes.py | 73 +- .../src/apis/app_api/admin/quota/service.py | 19 +- .../src/apis/app_api/admin/roles/routes.py | 23 +- backend/src/apis/app_api/admin/routes.py | 73 +- .../admin/services/tests/test_model_access.py | 2 +- .../app_api/admin/services/tool_access.py | 2 - .../src/apis/app_api/admin/tools/routes.py | 22 +- .../src/apis/app_api/admin/users/routes.py | 8 +- .../src/apis/app_api/admin/users/service.py | 4 +- .../src/apis/app_api/assistants/TESTING.md | 15 +- backend/src/apis/app_api/assistants/routes.py | 119 +- .../src/apis/app_api/auth/api_keys/models.py | 3 +- .../apis/app_api/auth/api_keys/repository.py | 2 +- .../src/apis/app_api/auth/api_keys/routes.py | 2 +- .../src/apis/app_api/auth/api_keys/service.py | 4 +- backend/src/apis/app_api/auth/routes.py | 13 +- backend/src/apis/app_api/chat/routes.py | 21 +- backend/src/apis/app_api/costs/aggregator.py | 17 +- backend/src/apis/app_api/costs/calculator.py | 2 +- backend/src/apis/app_api/costs/models.py | 1 - backend/src/apis/app_api/costs/routes.py | 6 +- .../app_api/costs/tests/test_calculator.py | 1 - .../ingestion/embeddings/__init__.py | 1 + .../embeddings/bedrock_embeddings.py | 20 +- .../ingestion/processors/docling_processor.py | 3 +- .../app_api/documents/ingestion/status.py | 2 - backend/src/apis/app_api/documents/models.py | 12 +- backend/src/apis/app_api/documents/routes.py | 111 +- .../documents/services/cleanup_service.py | 218 ++ .../documents/services/document_service.py | 219 ++ backend/src/apis/app_api/files/routes.py | 12 +- backend/src/apis/app_api/files/service.py | 11 +- .../apis/app_api/fine_tuning/dependencies.py | 57 +- .../fine_tuning/inference_repository.py | 59 +- .../app_api/fine_tuning/job_repository.py | 56 +- .../src/apis/app_api/fine_tuning/routes.py | 30 +- .../apis/app_api/fine_tuning/s3_service.py | 1 - .../sagemaker_scripts/inference.py | 4 - backend/src/apis/app_api/main.py | 2 - backend/src/apis/app_api/memory/routes.py | 18 +- .../app_api/memory/services/memory_service.py | 21 +- backend/src/apis/app_api/messages/models.py | 1 + backend/src/apis/app_api/sessions/routes.py | 52 +- .../app_api/sessions/services/metadata.py | 41 +- .../sessions/services/session_service.py | 37 +- .../sessions/tests/test_cache_savings.py | 2 +- backend/src/apis/app_api/shares/service.py | 53 +- backend/src/apis/app_api/storage/__init__.py | 28 +- .../apis/app_api/storage/metadata_storage.py | 21 - backend/src/apis/app_api/tools/service.py | 4 +- backend/src/apis/app_api/users/routes.py | 9 +- .../inference_api/chat/converse_routes.py | 10 +- backend/src/apis/inference_api/chat/models.py | 5 - backend/src/apis/inference_api/chat/routes.py | 133 +- .../src/apis/inference_api/chat/service.py | 8 +- backend/src/apis/inference_api/main.py | 7 +- .../src/apis/shared/assistants/__init__.py | 2 - backend/src/apis/shared/assistants/models.py | 6 +- .../src/apis/shared/assistants/rag_service.py | 76 +- backend/src/apis/shared/assistants/service.py | 22 - backend/src/apis/shared/auth/rbac.py | 2 +- .../shared/embeddings/bedrock_embeddings.py | 151 +- .../src/apis/shared/files/file_resolver.py | 2 +- backend/src/apis/shared/files/models.py | 1 - .../apis/shared/oauth/provider_repository.py | 2 +- backend/src/apis/shared/oauth/routes.py | 6 +- backend/src/apis/shared/oauth/service.py | 1 - backend/src/apis/shared/quota.py | 1 - backend/src/apis/shared/rbac/admin_service.py | 1 - backend/src/apis/shared/rbac/models.py | 1 - backend/src/apis/shared/rbac/repository.py | 2 +- backend/src/apis/shared/sessions/metadata.py | 103 +- backend/src/apis/shared/sessions/models.py | 1 + .../session/test_session_factory.py | 10 +- .../property/test_pbt_cleanup_service.py | 384 ++ .../property/test_pbt_document_deletion.py | 447 +++ .../property/test_pbt_search_filtering.py | 130 + .../property/test_pbt_vector_deletion.py | 95 + backend/tests/routes/test_cleanup_service.py | 403 ++ backend/tests/routes/test_delete_endpoints.py | 241 ++ .../tests/routes/test_document_deletion.py | 362 ++ backend/tests/routes/test_sessions.py | 72 + backend/tests/routes/test_share_export.py | 42 +- backend/tests/shared/test_assistants.py | 11 - .../shared/test_list_documents_filtering.py | 146 + backend/tests/shared/test_search_filtering.py | 209 + .../tests/shared/test_sessions_metadata.py | 109 + backend/tests/shared/test_vector_deletion.py | 114 + backend/uv.lock | 546 ++- codeql-alerts.json | 3372 +++++++++++++++++ frontend/ai.client/package-lock.json | 2551 ++++++++++--- frontend/ai.client/package.json | 53 +- .../ai.client/src/app/admin/admin.page.ts | 6 + .../src/app/admin/costs/admin-costs.page.ts | 1 - .../models/fine-tuning-access.models.ts | 20 + .../fine-tuning-admin-http.service.ts | 17 +- .../fine-tuning-admin-state.service.ts | 40 +- .../fine-tuning-costs.page.html | 281 ++ .../fine-tuning-costs.page.ts | 134 + .../services/managed-models.service.ts | 2 +- .../pages/tier-list/tier-list.component.ts | 1 - .../services/quota-state.service.ts | 1 - .../app/admin/tools/pages/tool-form.page.ts | 2 - .../tools/services/admin-tool.service.ts | 1 - frontend/ai.client/src/app/app.config.spec.ts | 6 +- frontend/ai.client/src/app/app.routes.ts | 6 +- .../assistant-form/assistant-form.page.ts | 16 +- .../src/app/assistants/assistants.page.ts | 6 +- .../components/assistant-list.component.ts | 2 - .../app/assistants/models/assistant.model.ts | 4 +- .../services/assistant-api.service.ts | 8 - .../services/assistant.service.spec.ts | 4 +- .../assistants/services/assistant.service.ts | 25 +- .../assistants/services/document.service.ts | 30 + .../ai.client/src/app/auth/auth-pbt.spec.ts | 9 +- .../error-toast/error-toast.component.ts | 2 +- .../components/session-list/session-list.ts | 2 +- .../app/components/toast/toast.component.ts | 2 +- .../components/tooltip/tooltip.directive.ts | 4 +- .../dashboard/fine-tuning-dashboard.page.html | 17 + .../dashboard/fine-tuning-dashboard.page.ts | 4 + .../manage-sessions/manage-sessions.page.ts | 12 +- .../src/app/memory/memory-dashboard.page.ts | 2 +- .../app/services/local-settings.service.ts | 7 + .../tool-rail/tool-rail.component.spec.ts | 83 +- .../components/user-message.component.ts | 25 +- .../message-list/message-list.component.ts | 2 +- .../services/chat/chat-http.service.ts | 4 - .../session/services/models/message.model.ts | 2 +- .../services/session/session.service.ts | 2 +- .../services/connections.service.ts | 1 - .../oauth-callback/oauth-callback.page.ts | 1 - .../chat-preferences-settings.page.ts | 41 +- .../src/app/settings/settings.page.ts | 2 - .../src/app/shared/shared-view.page.spec.ts | 252 +- .../src/app/shared/shared-view.page.ts | 53 +- frontend/ai.client/src/index.html | 2 +- frontend/ai.client/vitest.config.ts | 9 - infrastructure/lib/app-api-stack.ts | 5 +- infrastructure/lib/config.ts | 2 + infrastructure/lib/frontend-stack.ts | 2 +- infrastructure/lib/inference-api-stack.ts | 2 +- infrastructure/lib/infrastructure-stack.ts | 2 +- infrastructure/lib/rag-ingestion-stack.ts | 5 +- infrastructure/package-lock.json | 2430 +++++++----- infrastructure/package.json | 17 +- infrastructure/test/helpers/mock-config.ts | 1 + .../test/rag-ingestion-stack.test.ts | 1 + .../test/sagemaker-fine-tuning-stack.test.ts | 4 +- scripts/common/sync-version.sh | 12 + scripts/stack-rag-ingestion/deploy.sh | 27 +- 211 files changed, 14802 insertions(+), 2796 deletions(-) create mode 100644 .kiro/specs/reliable-document-deletion/.config.kiro create mode 100644 .kiro/specs/reliable-document-deletion/design.md create mode 100644 .kiro/specs/reliable-document-deletion/requirements.md create mode 100644 .kiro/specs/reliable-document-deletion/tasks.md create mode 100644 .kiro/specs/share-conversations/.config.kiro create mode 100644 .kiro/specs/share-conversations/design.md create mode 100644 .kiro/specs/share-conversations/requirements.md create mode 100644 .kiro/specs/share-conversations/tasks.md create mode 100644 backend/src/apis/app_api/documents/services/cleanup_service.py create mode 100644 backend/tests/property/test_pbt_cleanup_service.py create mode 100644 backend/tests/property/test_pbt_document_deletion.py create mode 100644 backend/tests/property/test_pbt_search_filtering.py create mode 100644 backend/tests/property/test_pbt_vector_deletion.py create mode 100644 backend/tests/routes/test_cleanup_service.py create mode 100644 backend/tests/routes/test_delete_endpoints.py create mode 100644 backend/tests/routes/test_document_deletion.py create mode 100644 backend/tests/shared/test_list_documents_filtering.py create mode 100644 backend/tests/shared/test_search_filtering.py create mode 100644 backend/tests/shared/test_vector_deletion.py create mode 100644 codeql-alerts.json create mode 100644 frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.html create mode 100644 frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.ts delete mode 100644 frontend/ai.client/vitest.config.ts diff --git a/.claude/launch.json b/.claude/launch.json index 0689bd6d..94581d61 100644 --- a/.claude/launch.json +++ b/.claude/launch.json @@ -10,14 +10,14 @@ }, { "name": "app-api", - "runtimeExecutable": "/Users/philmerrell/Repos/agentcore-public-stack/backend/venv/bin/python", + "runtimeExecutable": "/Users/philmerrell/Repos/agentcore-public-stack/backend/.venv/bin/python", "runtimeArgs": ["main.py"], "port": 8000, "cwd": "backend/src/apis/app_api" }, { "name": "inference-api", - "runtimeExecutable": "/Users/philmerrell/Repos/agentcore-public-stack/backend/venv/bin/python", + "runtimeExecutable": "/Users/philmerrell/Repos/agentcore-public-stack/backend/.venv/bin/python", "runtimeArgs": ["main.py"], "port": 8001, "cwd": "backend/src/apis/inference_api" diff --git a/.claude/skills/versioning/SKILL.md b/.claude/skills/versioning/SKILL.md index 1af471c3..59193714 100644 --- a/.claude/skills/versioning/SKILL.md +++ b/.claude/skills/versioning/SKILL.md @@ -17,7 +17,7 @@ Example: `1.0.0-beta.1`, `1.1.0` 2. Run `bash scripts/common/sync-version.sh` 3. Commit both the `VERSION` file and the updated manifests -The sync script updates `backend/pyproject.toml`, `frontend/ai.client/package.json`, and `infrastructure/package.json`. +The sync script updates `backend/pyproject.toml`, `frontend/ai.client/package.json`, `infrastructure/package.json`, and the `README.md` version badge and "Current release" text. ## PR Gate diff --git a/.cursor/rules/versioning.mdc b/.cursor/rules/versioning.mdc index d2c50084..8bed1ba4 100644 --- a/.cursor/rules/versioning.mdc +++ b/.cursor/rules/versioning.mdc @@ -17,7 +17,7 @@ Example: `1.0.0-beta.1`, `1.1.0` 2. Run `bash scripts/common/sync-version.sh` 3. Commit both the `VERSION` file and the updated manifests -The sync script updates `backend/pyproject.toml`, `frontend/ai.client/package.json`, and `infrastructure/package.json`. +The sync script updates `backend/pyproject.toml`, `frontend/ai.client/package.json`, `infrastructure/package.json`, and the `README.md` version badge and "Current release" text. ## PR Gate diff --git a/.github/ACTIONS-REFERENCE.md b/.github/ACTIONS-REFERENCE.md index 401333ee..78a0a51b 100644 --- a/.github/ACTIONS-REFERENCE.md +++ b/.github/ACTIONS-REFERENCE.md @@ -35,6 +35,7 @@ GitHub provides two mechanisms for storing configuration values: | CDK_FILE_UPLOAD_CORS_ORIGINS | Variable | No | `http://localhost:4200` | Infrastructure, App API | Comma-separated CORS origins for file upload S3 bucket | | CDK_FILE_UPLOAD_MAX_SIZE_MB | Variable | No | `10` | Infrastructure, App API | Maximum file upload size in megabytes | | CDK_FINE_TUNING_ENABLED | Variable | No | `false` | SageMaker Fine-Tuning, App API | Enable SageMaker fine-tuning stack and App API fine-tuning routes. Must be `true` before deploying the SageMaker Fine-Tuning workflow. | +| CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS | Variable | No | `0` | App API | Default monthly GPU-hour quota for all authenticated users. `0` = whitelist-only (admin must grant each user). Positive value (e.g. `5`) = open access with that default budget. | | CDK_FRONTEND_BUCKET_NAME | Variable | No | None | Frontend | S3 bucket name for frontend assets (defaults to generated name with account ID) | | CDK_FRONTEND_CERTIFICATE_ARN | Variable | No | None | Frontend | ACM certificate ARN for HTTPS on CloudFront (required for custom domain) | | CDK_FRONTEND_CLOUDFRONT_PRICE_CLASS | Variable | No | `PriceClass_100` | Frontend | CloudFront price class (PriceClass_100, PriceClass_200, PriceClass_All) | diff --git a/.github/workflows/app-api.yml b/.github/workflows/app-api.yml index 60e71890..b3df68ae 100644 --- a/.github/workflows/app-api.yml +++ b/.github/workflows/app-api.yml @@ -172,7 +172,7 @@ jobs: echo "Image size: ${IMAGE_SIZE} bytes" - name: Upload Docker image artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: app-api-docker-image path: ${{ runner.temp }}/app-api-image.tar @@ -214,7 +214,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Docker image artifact - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: app-api-docker-image path: ${{ runner.temp }} @@ -281,6 +281,7 @@ jobs: CDK_FILE_UPLOAD_CORS_ORIGINS: ${{ vars.CDK_FILE_UPLOAD_CORS_ORIGINS }} CDK_FILE_UPLOAD_MAX_SIZE_MB: ${{ vars.CDK_FILE_UPLOAD_MAX_SIZE_MB }} CDK_FINE_TUNING_ENABLED: ${{ vars.CDK_FINE_TUNING_ENABLED }} + CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS: ${{ vars.CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS }} CDK_AWS_ACCOUNT: ${{ vars.CDK_AWS_ACCOUNT }} AWS_ROLE_ARN: ${{ secrets.AWS_ROLE_ARN }} @@ -318,7 +319,7 @@ jobs: bash scripts/stack-app-api/synth.sh - name: Upload synthesized templates - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: app-api-cdk-synth path: infrastructure/cdk.out/ @@ -360,7 +361,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: app-api-cdk-synth path: infrastructure/cdk.out/ @@ -418,7 +419,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Docker image artifact - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: app-api-docker-image path: ${{ runner.temp }} @@ -478,6 +479,7 @@ jobs: CDK_FILE_UPLOAD_CORS_ORIGINS: ${{ vars.CDK_FILE_UPLOAD_CORS_ORIGINS }} CDK_FILE_UPLOAD_MAX_SIZE_MB: ${{ vars.CDK_FILE_UPLOAD_MAX_SIZE_MB }} CDK_FINE_TUNING_ENABLED: ${{ vars.CDK_FINE_TUNING_ENABLED }} + CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS: ${{ vars.CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS }} CDK_AWS_ACCOUNT: ${{ vars.CDK_AWS_ACCOUNT }} AWS_ROLE_ARN: ${{ secrets.AWS_ROLE_ARN }} @@ -495,7 +497,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: app-api-cdk-synth path: infrastructure/cdk.out/ @@ -525,7 +527,7 @@ jobs: bash scripts/stack-app-api/tag-latest.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: app-api-deployment-outputs diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 0a7735a3..f1b1ce66 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -53,7 +53,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Initialize CodeQL - uses: github/codeql-action/init@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 + uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 with: languages: ${{ matrix.language }} build-mode: ${{ matrix.build-mode }} @@ -63,7 +63,7 @@ jobs: queries: security-and-quality - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1 + uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 with: category: "/language:${{ matrix.language }}" # Upload SARIF database for debugging and audit trail diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index 2a8590ac..5632498d 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -11,7 +11,6 @@ on: - 'infrastructure/lib/config.ts' - 'infrastructure/bin/infrastructure.ts' - 'scripts/stack-frontend/**' - - 'scripts/common/**' - '.github/workflows/frontend.yml' pull_request: branches: @@ -23,7 +22,6 @@ on: - 'infrastructure/lib/config.ts' - 'infrastructure/bin/infrastructure.ts' - 'scripts/stack-frontend/**' - - 'scripts/common/**' - '.github/workflows/frontend.yml' workflow_dispatch: inputs: @@ -140,7 +138,7 @@ jobs: bash scripts/stack-frontend/build.sh - name: Upload Frontend build artifacts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: frontend-build path: frontend/ai.client/dist/ @@ -201,7 +199,7 @@ jobs: bash scripts/stack-frontend/test.sh - name: Upload Frontend test results - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: frontend-test-results path: frontend/ai.client/coverage/ @@ -212,6 +210,7 @@ jobs: name: Synthesize CDK runs-on: ubuntu-24.04 needs: build-cdk + if: github.event_name != 'pull_request' # Select environment based on trigger # Manual: workflow_dispatch input @@ -267,7 +266,7 @@ jobs: bash scripts/stack-frontend/synth.sh - name: Upload synthesized templates - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: frontend-cdk-synth path: infrastructure/cdk.out/ @@ -277,7 +276,7 @@ jobs: name: Test CDK runs-on: ubuntu-24.04 needs: synth-cdk - if: ${{ github.event.inputs.skip_tests != 'true' }} + if: ${{ github.event_name != 'pull_request' && github.event.inputs.skip_tests != 'true' }} # Select environment based on trigger # Manual: workflow_dispatch input @@ -308,7 +307,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: frontend-cdk-synth path: infrastructure/cdk.out/ @@ -378,7 +377,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: frontend-cdk-synth path: infrastructure/cdk.out/ @@ -401,7 +400,7 @@ jobs: bash scripts/stack-frontend/deploy-cdk.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: frontend-cdk-outputs path: infrastructure/frontend-outputs.json @@ -442,13 +441,13 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Frontend build artifacts - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: frontend-build path: frontend/ai.client/dist/ - name: Download CDK outputs - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: frontend-cdk-outputs path: infrastructure/ diff --git a/.github/workflows/gateway.yml b/.github/workflows/gateway.yml index 883bd0ac..b5ee3d48 100644 --- a/.github/workflows/gateway.yml +++ b/.github/workflows/gateway.yml @@ -142,7 +142,7 @@ jobs: bash scripts/stack-gateway/build-cdk.sh - name: Upload build artifacts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: gateway-cdk-build path: | @@ -189,7 +189,7 @@ jobs: gateway-node- - name: Download build artifacts - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: gateway-cdk-build path: infrastructure @@ -203,7 +203,7 @@ jobs: bash scripts/stack-gateway/synth.sh - name: Upload synthesized templates - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: gateway-cdk-templates path: infrastructure/cdk.out/ @@ -262,7 +262,7 @@ jobs: gateway-node- - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: gateway-cdk-templates path: infrastructure/cdk.out @@ -333,7 +333,7 @@ jobs: gateway-node- - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: gateway-cdk-templates path: infrastructure/cdk.out @@ -347,7 +347,7 @@ jobs: bash scripts/stack-gateway/deploy.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: gateway-deployment-outputs diff --git a/.github/workflows/inference-api.yml b/.github/workflows/inference-api.yml index fdad6232..97e3e9d1 100644 --- a/.github/workflows/inference-api.yml +++ b/.github/workflows/inference-api.yml @@ -188,7 +188,7 @@ jobs: echo "Image size: ${IMAGE_SIZE} bytes" - name: Upload Docker image artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: inference-api-docker-image path: ${{ runner.temp }}/inference-api-image.tar @@ -230,7 +230,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Docker image artifact - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: inference-api-docker-image path: ${{ runner.temp }} @@ -327,7 +327,7 @@ jobs: bash scripts/stack-inference-api/synth.sh - name: Upload synthesized templates - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: inference-api-cdk-synth path: infrastructure/cdk.out/ @@ -368,7 +368,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: inference-api-cdk-synth path: infrastructure/cdk.out/ @@ -428,7 +428,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Docker image artifact - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: inference-api-docker-image path: ${{ runner.temp }} @@ -501,7 +501,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: inference-api-cdk-synth path: infrastructure/cdk.out/ @@ -528,7 +528,7 @@ jobs: bash scripts/stack-inference-api/tag-latest.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: inference-api-deployment-outputs diff --git a/.github/workflows/infrastructure.yml b/.github/workflows/infrastructure.yml index b4120fe8..6191ee16 100644 --- a/.github/workflows/infrastructure.yml +++ b/.github/workflows/infrastructure.yml @@ -125,7 +125,7 @@ jobs: bash scripts/stack-infrastructure/build.sh - name: Upload build artifacts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: infrastructure-cdk-build path: | @@ -178,7 +178,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download build artifacts - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: infrastructure-cdk-build path: infrastructure @@ -202,7 +202,7 @@ jobs: bash scripts/stack-infrastructure/synth.sh - name: Upload synthesized templates - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: infrastructure-cdk-synth path: infrastructure/cdk.out/ @@ -251,13 +251,13 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download build artifacts - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: infrastructure-cdk-build path: infrastructure - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: infrastructure-cdk-synth path: infrastructure/cdk.out/ @@ -336,13 +336,13 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download build artifacts - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: infrastructure-cdk-build path: infrastructure - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: infrastructure-cdk-synth path: infrastructure/cdk.out/ @@ -365,7 +365,7 @@ jobs: bash scripts/stack-infrastructure/deploy.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: infrastructure-outputs diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index ebca1c7c..4bd0eb29 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -296,7 +296,7 @@ jobs: run: bash scripts/stack-app-api/test.sh - name: Upload backend coverage artifacts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: backend-coverage path: | @@ -337,7 +337,7 @@ jobs: run: bash scripts/stack-frontend/test.sh - name: Upload frontend coverage artifacts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: frontend-coverage path: frontend/ai.client/coverage/ @@ -376,14 +376,14 @@ jobs: - name: Download backend coverage if: needs.test-backend.result == 'success' - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: backend-coverage path: backend/ - name: Download frontend coverage if: needs.test-frontend.result == 'success' - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: frontend-coverage path: frontend/ai.client/coverage/ @@ -396,7 +396,7 @@ jobs: run: python3 scripts/nightly/compare-coverage.py - name: Upload coverage comparison report - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: coverage-comparison path: coverage-comparison.json @@ -434,7 +434,7 @@ jobs: python-version: '3.13' - name: Download coverage comparison report - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: coverage-comparison path: . @@ -563,7 +563,7 @@ jobs: - name: Upload scan reports if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: trivy-scan-reports path: trivy-*.txt diff --git a/.github/workflows/rag-ingestion.yml b/.github/workflows/rag-ingestion.yml index 3d4e89af..46d7b507 100644 --- a/.github/workflows/rag-ingestion.yml +++ b/.github/workflows/rag-ingestion.yml @@ -158,7 +158,7 @@ jobs: echo "Image size: ${IMAGE_SIZE} bytes" - name: Upload Docker image artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: rag-ingestion-docker-image path: ${{ runner.temp }}/rag-ingestion-image.tar @@ -200,7 +200,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Docker image artifact - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: rag-ingestion-docker-image path: ${{ runner.temp }} @@ -274,7 +274,7 @@ jobs: bash scripts/stack-rag-ingestion/synth.sh - name: Upload synthesized templates - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: rag-ingestion-cdk-synth path: infrastructure/cdk.out/ @@ -319,7 +319,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: rag-ingestion-cdk-synth path: infrastructure/cdk.out/ @@ -380,7 +380,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Download Docker image artifact - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: rag-ingestion-docker-image path: ${{ runner.temp }} @@ -449,7 +449,7 @@ jobs: key: infrastructure-node-modules-${{ hashFiles('infrastructure/package-lock.json') }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: rag-ingestion-cdk-synth path: infrastructure/cdk.out/ @@ -479,7 +479,7 @@ jobs: bash scripts/stack-rag-ingestion/tag-latest.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: rag-ingestion-deployment-outputs diff --git a/.github/workflows/sagemaker-fine-tuning.yml b/.github/workflows/sagemaker-fine-tuning.yml index 21999115..3f86d37a 100644 --- a/.github/workflows/sagemaker-fine-tuning.yml +++ b/.github/workflows/sagemaker-fine-tuning.yml @@ -210,7 +210,7 @@ jobs: - name: Upload synthesized templates if: steps.check.outputs.enabled == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: sagemaker-fine-tuning-cdk-synth path: infrastructure/cdk.out/ @@ -266,7 +266,7 @@ jobs: key: infrastructure-build-${{ github.sha }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: sagemaker-fine-tuning-cdk-synth path: infrastructure/cdk.out/ @@ -345,7 +345,7 @@ jobs: key: infrastructure-build-${{ github.sha }} - name: Download synthesized templates - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: name: sagemaker-fine-tuning-cdk-synth path: infrastructure/cdk.out/ @@ -368,7 +368,7 @@ jobs: bash scripts/stack-sagemaker-fine-tuning/deploy.sh - name: Upload stack outputs - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: sagemaker-fine-tuning-deployment-outputs diff --git a/.github/workflows/version-check.yml b/.github/workflows/version-check.yml index 4045d47b..66580e9e 100644 --- a/.github/workflows/version-check.yml +++ b/.github/workflows/version-check.yml @@ -46,7 +46,7 @@ jobs: bash scripts/common/sync-version.sh --check - name: Setup Node.js - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 with: node-version: '22' diff --git a/.gitignore b/.gitignore index b5fd730d..8d641be6 100644 --- a/.gitignore +++ b/.gitignore @@ -121,3 +121,6 @@ coverage/ /.vs .vs/* .kiro/steering/dev-environment.md + +# Local dev scripts +start.sh diff --git a/.kiro/specs/reliable-document-deletion/.config.kiro b/.kiro/specs/reliable-document-deletion/.config.kiro new file mode 100644 index 00000000..6c2396db --- /dev/null +++ b/.kiro/specs/reliable-document-deletion/.config.kiro @@ -0,0 +1 @@ +{"specId": "d6b6e353-c84a-4d97-8620-ac980235db3e", "workflowType": "design-first", "specType": "feature"} \ No newline at end of file diff --git a/.kiro/specs/reliable-document-deletion/design.md b/.kiro/specs/reliable-document-deletion/design.md new file mode 100644 index 00000000..d51f4952 --- /dev/null +++ b/.kiro/specs/reliable-document-deletion/design.md @@ -0,0 +1,771 @@ +# Design Document: Reliable Document Deletion + +## Overview + +The current document deletion pipeline deletes from three independent stores (DynamoDB, S3, S3 Vectors) sequentially, with failures silently swallowed. When S3 Vectors deletion fails, orphaned vectors remain in the index. The RAG search path (`search_assistant_knowledgebase`) only filters by `assistant_id`, so orphaned vectors from deleted documents still appear as search results — returning stale citations to users. + +This design introduces a **soft-delete + inline cleanup with retries + DynamoDB TTL** pattern that treats DynamoDB as the single source of truth for document existence. Documents are atomically marked as `deleting`, immediately hidden from search results, then cleaned up with retries. A TTL backstop auto-expires records if cleanup fails, while orphaned S3/vector data becomes harmless since search always cross-checks document status. + +The same pattern applies to both the single-document DELETE endpoint and the assistant deletion endpoint, which bulk-deletes all documents for an assistant. + +## Architecture + +```mermaid +graph TD + subgraph "Delete Request" + A[DELETE /documents/:id] --> B[Soft-Delete: status → deleting + set TTL] + B --> C[Return 204 Immediately] + end + + subgraph "Inline Cleanup with Retries" + B --> D[Delete Vectors from S3 Vectors] + D --> E[Delete Source File from S3] + E --> F[Hard-Delete DynamoDB Record] + end + + subgraph "Backstop" + B -.->|If cleanup fails| G[DynamoDB TTL auto-expires in 7 days] + end + + subgraph "Search Path" + H[RAG Query] --> I[S3 Vectors query_vectors] + I --> J[Cross-check document status in DynamoDB] + J --> K[Filter out status != complete] + K --> L[Return only valid chunks] + end + + style B fill:#f9f,stroke:#333 + style J fill:#ff9,stroke:#333 + style G fill:#f96,stroke:#333 +``` + +## Sequence Diagrams + +### Single Document Deletion + +```mermaid +sequenceDiagram + participant Client + participant API as DELETE /documents/:id + participant DDB as DynamoDB + participant S3V as S3 Vectors + participant S3 as S3 + + Client->>API: DELETE /assistants/:aid/documents/:did + API->>DDB: get_document(aid, did, owner_id) + DDB-->>API: Document (with chunk_count, s3_key) + + API->>DDB: update status → "deleting", set TTL + DDB-->>API: OK (atomic update) + API-->>Client: 204 No Content + + Note over API,S3: Inline cleanup (after response) + + loop Retry up to 3 times with backoff + API->>S3V: delete_vectors (deterministic keys from chunk_count) + S3V-->>API: success/failure + end + + loop Retry up to 3 times with backoff + API->>S3: delete_object(s3_key) + S3-->>API: success/failure + end + + alt All cleanup succeeded + API->>DDB: hard-delete record + else Cleanup failed + Note over DDB: TTL auto-expires record in 7 days + end +``` + +### RAG Search with Document Status Filtering + +```mermaid +sequenceDiagram + participant Chat as Chat Endpoint + participant RAG as rag_service + participant S3V as S3 Vectors + participant DDB as DynamoDB + + Chat->>RAG: search_assistant_knowledgebase_with_formatting(aid, query) + RAG->>S3V: query_vectors(filter=assistant_id) + S3V-->>RAG: top-K vector results with metadata + + RAG->>DDB: batch_get documents by document_ids + DDB-->>RAG: document records with status + + RAG->>RAG: Filter: keep only chunks where doc.status == "complete" + RAG-->>Chat: filtered context chunks +``` + +### Assistant Deletion (Bulk Document Cleanup) + +```mermaid +sequenceDiagram + participant Client + participant API as DELETE /assistants/:id + participant DDB as DynamoDB + participant S3V as S3 Vectors + participant S3 as S3 + + Client->>API: DELETE /assistants/:aid + API->>DDB: list_assistant_documents(aid) + DDB-->>API: [doc1, doc2, ...] + + API->>DDB: batch soft-delete all docs (status → deleting, set TTL) + API->>DDB: hard-delete assistant record + API-->>Client: 204 No Content + + Note over API,S3: Background cleanup (asyncio.ensure_future) + + loop For each document + API->>S3V: delete_vectors (deterministic keys) + API->>S3: delete_object(s3_key) + alt Cleanup succeeded + API->>DDB: hard-delete document record + end + end +``` + +## Components and Interfaces + +### Component 1: Document Model (models.py) + +**Purpose**: Extend `DocumentStatus` to include `deleting` status and add TTL field. + +```python +# Extended status type +DocumentStatus = Literal["uploading", "chunking", "embedding", "complete", "failed", "deleting"] + +class Document(BaseModel): + # ... existing fields ... + ttl: Optional[int] = Field(None, alias="ttl", description="DynamoDB TTL epoch timestamp for auto-expiry") +``` + +**Responsibilities**: +- Define the `deleting` status as a valid document lifecycle state +- Carry TTL epoch timestamp for DynamoDB auto-expiry + +### Component 2: Document Service — Soft Delete (document_service.py) + +**Purpose**: Atomic status transition to `deleting` with TTL, replacing the current hard-delete. + +```python +async def soft_delete_document( + assistant_id: str, + document_id: str, + owner_id: str, + ttl_days: int = 7, +) -> Optional[Document]: + """ + Atomically mark a document as 'deleting' and set a TTL for auto-expiry. + Returns the document (with chunk_count, s3_key) needed for cleanup. + Returns None if document not found or not owned by user. + """ + ... +``` + +```python +async def hard_delete_document( + assistant_id: str, + document_id: str, +) -> bool: + """ + Unconditionally remove the DynamoDB record. Called after successful + cleanup of S3 and vectors. No ownership check needed — caller has + already verified ownership during soft-delete. + """ + ... +``` + +```python +async def batch_soft_delete_documents( + assistant_id: str, + document_ids: list[str], + ttl_days: int = 7, +) -> int: + """ + Batch soft-delete multiple documents for an assistant. + Used during assistant deletion. Returns count of documents marked. + """ + ... +``` + +**Responsibilities**: +- Atomic `status → deleting` transition with conditional expression (only if current status allows) +- Set `ttl` attribute to `now + ttl_days` as epoch seconds +- Return full document record for cleanup (chunk_count, s3_key needed) + +### Component 3: Cleanup with Retries (new: cleanup_service.py) + +**Purpose**: Orchestrate deletion of vectors and S3 objects with retry logic. + +```python +async def cleanup_document_resources( + document_id: str, + assistant_id: str, + s3_key: str, + chunk_count: Optional[int], + max_retries: int = 3, + base_delay: float = 0.5, +) -> bool: + """ + Delete vectors and S3 source file with exponential backoff retries. + Returns True if all resources cleaned up successfully. + """ + ... +``` + +```python +async def cleanup_assistant_documents( + assistant_id: str, + documents: list[Document], + max_retries: int = 3, +) -> tuple[int, int]: + """ + Bulk cleanup for assistant deletion. Processes documents concurrently. + Returns (success_count, failure_count). + """ + ... +``` + +**Responsibilities**: +- Retry vector deletion and S3 deletion independently (up to `max_retries`) +- Use exponential backoff with jitter +- Call `hard_delete_document` only when both vector and S3 cleanup succeed +- Log failures but never raise — cleanup is best-effort after soft-delete + +### Component 4: Deterministic Vector Deletion (bedrock_embeddings.py) + +**Purpose**: Replace probe-and-scan with deterministic key generation using `chunk_count`. + +```python +async def delete_vectors_for_document_deterministic( + document_id: str, + chunk_count: int, +) -> int: + """ + Delete vectors using deterministic keys: {document_id}#{i} for i in range(chunk_count). + No probing, no list-scan. O(chunk_count) with a single batch delete call. + """ + ... +``` + +**Responsibilities**: +- Generate keys deterministically from `chunk_count` +- Batch delete in groups of 500 (S3 Vectors API limit) +- Fall back to existing `delete_vectors_for_document` (probe+scan) only when `chunk_count is None` + +### Component 5: Search Path Filtering (rag_service.py) + +**Purpose**: Cross-check vector results against DynamoDB to filter out non-`complete` documents. + +```python +async def search_assistant_knowledgebase_with_formatting( + assistant_id: str, + query: str, + top_k: int = 5, +) -> list[dict[str, Any]]: + """ + Search vector store, then cross-check document status in DynamoDB. + Only return chunks from documents with status='complete'. + """ + ... +``` + +**Responsibilities**: +- After vector search, extract unique `document_id` values from results +- Batch-get document records from DynamoDB +- Filter out chunks whose document has `status != 'complete'` (or doesn't exist) +- Return only valid chunks to the caller + +### Component 6: DynamoDB TTL Configuration (rag-ingestion-stack.ts) + +**Purpose**: Enable TTL on the assistants table using the `ttl` attribute. + +```typescript +this.assistantsTable = new dynamodb.Table(this, 'RagAssistantsTable', { + // ... existing config ... + timeToLiveAttribute: 'ttl', // NEW: enable TTL +}); +``` + +**Responsibilities**: +- Enable DynamoDB TTL on the `ttl` attribute +- DynamoDB automatically deletes expired items (typically within 48 hours of TTL epoch) + +## Data Models + +### Document Record (DynamoDB) + +```python +# Existing fields (unchanged) +{ + "PK": "AST#{assistant_id}", + "SK": "DOC#{document_id}", + "documentId": "DOC-abc123", + "assistantId": "AST-xyz789", + "filename": "report.pdf", + "contentType": "application/pdf", + "sizeBytes": 1048576, + "s3Key": "assistants/AST-xyz789/DOC-abc123/report.pdf", + "status": "complete", # NEW: "deleting" added as valid value + "chunkCount": 42, # Used for deterministic vector key generation + "createdAt": "2024-01-15T10:30:00Z", + "updatedAt": "2024-01-15T10:31:00Z", + + # NEW field + "ttl": 1737504600 # Epoch seconds, set only when status="deleting" +} +``` + +**Validation Rules**: +- `ttl` is only set when `status = "deleting"` +- `ttl` value = current epoch + (7 * 86400) seconds +- `chunk_count` may be `None` if Lambda crashed before the embedding phase +- `status` transitions: any terminal state → `deleting` (but not from `deleting` → anything else via API) + +### Vector Key Pattern + +```python +# Deterministic key format +key = f"{document_id}#{chunk_index}" +# Example: "DOC-abc123#0", "DOC-abc123#1", ..., "DOC-abc123#41" + +# Total keys = chunk_count (stored in DynamoDB document record) +keys = [f"{document_id}#{i}" for i in range(chunk_count)] +``` + +## Key Functions with Formal Specifications + +### Function 1: soft_delete_document() + +```python +async def soft_delete_document( + assistant_id: str, + document_id: str, + owner_id: str, + ttl_days: int = 7, +) -> Optional[Document]: +``` + +**Preconditions:** +- `assistant_id` is a valid assistant ID owned by `owner_id` +- `document_id` exists under the given assistant +- `ttl_days > 0` + +**Postconditions:** +- Document `status` is atomically set to `"deleting"` in DynamoDB +- `ttl` attribute is set to `int(now_epoch + ttl_days * 86400)` +- `updatedAt` is refreshed to current timestamp +- Returns the full Document record (including `chunk_count`, `s3_key`) for cleanup +- Returns `None` if document not found or ownership check fails +- If document is already in `"deleting"` status, the update is idempotent + +**Loop Invariants:** N/A (single atomic DynamoDB update) + +### Function 2: cleanup_document_resources() + +```python +async def cleanup_document_resources( + document_id: str, + assistant_id: str, + s3_key: str, + chunk_count: Optional[int], + max_retries: int = 3, + base_delay: float = 0.5, +) -> bool: +``` + +**Preconditions:** +- Document has already been soft-deleted (status = `"deleting"`) +- `s3_key` is the S3 object key for the source file +- `chunk_count` is the number of vector chunks (may be `None`) +- `max_retries >= 1`, `base_delay > 0` + +**Postconditions:** +- If `chunk_count is not None`: vectors with keys `{document_id}#0` through `{document_id}#{chunk_count-1}` are deleted from S3 Vectors +- If `chunk_count is None`: falls back to probe-and-scan deletion +- S3 object at `s3_key` is deleted +- Returns `True` if and only if both vector deletion and S3 deletion succeeded +- On `True`, the DynamoDB record is hard-deleted +- On `False`, the DynamoDB record remains with `status="deleting"` and TTL will auto-expire it +- Never raises exceptions — all failures are logged and swallowed + +**Loop Invariants:** +- For retry loop: `attempt < max_retries` and previous attempt failed +- Delay between retries = `base_delay * 2^attempt + random_jitter` + +### Function 3: delete_vectors_for_document_deterministic() + +```python +async def delete_vectors_for_document_deterministic( + document_id: str, + chunk_count: int, +) -> int: +``` + +**Preconditions:** +- `document_id` is a valid document ID +- `chunk_count >= 0` +- Vector keys follow the pattern `{document_id}#{i}` for `i in range(chunk_count)` + +**Postconditions:** +- All vectors with keys `{document_id}#0` through `{document_id}#{chunk_count-1}` are deleted +- Returns the number of keys sent for deletion (= `chunk_count`) +- Deletion is idempotent — deleting non-existent keys is a no-op in S3 Vectors API +- Raises on S3 Vectors API errors (caller handles retries) + +**Loop Invariants:** +- For batch loop: all keys in batches `[0..i]` have been submitted for deletion +- Batch size ≤ 500 + +### Function 4: search_assistant_knowledgebase_with_formatting() (modified) + +```python +async def search_assistant_knowledgebase_with_formatting( + assistant_id: str, + query: str, + top_k: int = 5, +) -> list[dict[str, Any]]: +``` + +**Preconditions:** +- `assistant_id` is a valid assistant ID +- `query` is a non-empty string +- `top_k > 0` + +**Postconditions:** +- Returns only chunks from documents where `status == "complete"` in DynamoDB +- Chunks from documents with `status == "deleting"`, `"failed"`, or missing records are excluded +- Result count ≤ `top_k` +- Each result contains `text`, `distance`, `metadata`, and `key` +- On DynamoDB lookup failure, falls back to returning unfiltered results (graceful degradation) + +**Loop Invariants:** N/A + +## Algorithmic Pseudocode + +### Document Deletion Algorithm + +```python +# Main deletion flow (inline in route handler) +async def delete_document_endpoint(assistant_id, document_id, user_id): + # Step 1: Soft-delete (atomic, fast) + document = await soft_delete_document(assistant_id, document_id, user_id) + if document is None: + raise HTTPException(404) + + # Step 2: Return immediately — document is now invisible to search + # (response sent to client here) + + # Step 3: Inline cleanup with retries (after response) + success = await cleanup_document_resources( + document_id=document.document_id, + assistant_id=assistant_id, + s3_key=document.s3_key, + chunk_count=document.chunk_count, + ) + + if success: + await hard_delete_document(assistant_id, document_id) + # else: TTL will auto-expire the record +``` + +### Cleanup with Retries Algorithm + +```python +async def cleanup_document_resources(document_id, assistant_id, s3_key, chunk_count, max_retries=3, base_delay=0.5): + vectors_deleted = False + s3_deleted = False + + # Phase 1: Delete vectors + for attempt in range(max_retries): + try: + if chunk_count is not None: + await delete_vectors_for_document_deterministic(document_id, chunk_count) + else: + await delete_vectors_for_document(document_id) # fallback probe+scan + vectors_deleted = True + break + except Exception as e: + delay = base_delay * (2 ** attempt) + random.uniform(0, 0.1) + logger.warning(f"Vector deletion attempt {attempt+1} failed: {e}, retrying in {delay:.1f}s") + await asyncio.sleep(delay) + + # Phase 2: Delete S3 source file + for attempt in range(max_retries): + try: + s3_client.delete_object(Bucket=bucket, Key=s3_key) + s3_deleted = True + break + except Exception as e: + delay = base_delay * (2 ** attempt) + random.uniform(0, 0.1) + logger.warning(f"S3 deletion attempt {attempt+1} failed: {e}, retrying in {delay:.1f}s") + await asyncio.sleep(delay) + + return vectors_deleted and s3_deleted +``` + +### Deterministic Vector Deletion Algorithm + +```python +async def delete_vectors_for_document_deterministic(document_id, chunk_count): + keys = [f"{document_id}#{i}" for i in range(chunk_count)] + batch_size = 500 + deleted = 0 + + for i in range(0, len(keys), batch_size): + batch = keys[i:i + batch_size] + client.delete_vectors( + vectorBucketName=vector_bucket, + indexName=vector_index, + keys=batch, + ) + deleted += len(batch) + + return deleted +``` + +### Search Filtering Algorithm + +```python +async def search_with_document_status_filter(assistant_id, query, top_k=5): + # Step 1: Vector search (unchanged) + response = await search_assistant_knowledgebase(assistant_id, query) + vectors = response.get("vectors", []) + + if not vectors: + return [] + + # Step 2: Extract unique document IDs from results + doc_ids = set() + for v in vectors: + doc_id = v.get("metadata", {}).get("document_id") + if doc_id: + doc_ids.add(doc_id) + + # Step 3: Batch-get document records from DynamoDB + valid_doc_ids = set() + try: + table = dynamodb.Table(table_name) + for doc_id in doc_ids: + response = table.get_item(Key={"PK": f"AST#{assistant_id}", "SK": f"DOC#{doc_id}"}) + item = response.get("Item") + if item and item.get("status") == "complete": + valid_doc_ids.add(doc_id) + except Exception: + # Graceful degradation: return unfiltered results + valid_doc_ids = doc_ids + + # Step 4: Filter vector results + filtered = [] + for v in vectors[:top_k * 2]: # Over-fetch to account for filtering + doc_id = v.get("metadata", {}).get("document_id") + if doc_id in valid_doc_ids: + filtered.append(v) + if len(filtered) >= top_k: + break + + return filtered +``` + +## Example Usage + +```python +# Example 1: Single document deletion +@router.delete("/{document_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_document(assistant_id: str, document_id: str, user_id: str = Depends(get_current_user_id)): + document = await soft_delete_document(assistant_id, document_id, user_id) + if not document: + raise HTTPException(status_code=404, detail="Document not found") + + # Fire-and-forget cleanup (response already sent as 204) + asyncio.ensure_future( + _cleanup_and_hard_delete(document) + ) + return None + + +async def _cleanup_and_hard_delete(document: Document): + success = await cleanup_document_resources( + document_id=document.document_id, + assistant_id=document.assistant_id, + s3_key=document.s3_key, + chunk_count=document.chunk_count, + ) + if success: + await hard_delete_document(document.assistant_id, document.document_id) + + +# Example 2: Assistant deletion with bulk document cleanup +@router.delete("/{assistant_id}", status_code=204) +async def delete_assistant_endpoint(assistant_id: str, current_user: User = Depends(get_current_user)): + docs, _ = await list_assistant_documents(assistant_id, current_user.user_id) + + # Soft-delete all documents first + await batch_soft_delete_documents(assistant_id, [d.document_id for d in docs]) + + # Hard-delete assistant record + await delete_assistant(assistant_id=assistant_id, owner_id=current_user.user_id) + + # Background cleanup + asyncio.ensure_future(cleanup_assistant_documents(assistant_id, docs)) + return None + + +# Example 3: RAG search now filters by document status +chunks = await search_assistant_knowledgebase_with_formatting( + assistant_id="AST-xyz789", + query="What are the quarterly results?", + top_k=5, +) +# chunks only contains results from documents with status="complete" +# Documents in "deleting" status are invisible +``` + +## Correctness Properties + +*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* + +### Property 1: Soft-delete postconditions + +*For any* document in a valid pre-delete status (uploading, chunking, embedding, complete, failed), after calling soft_delete_document, the returned document record SHALL have status="deleting", a TTL equal to the current epoch plus 604800 seconds, an updated updatedAt timestamp, and the original chunk_count and s3_key values preserved. + +**Validates: Requirements 1.1, 1.2, 1.3, 1.4** + +### Property 2: Idempotent soft-delete + +*For any* document already in "deleting" status, calling soft_delete_document again SHALL succeed without error and the document SHALL remain in "deleting" status. + +**Validates: Requirement 1.6** + +### Property 3: Search results only contain complete documents + +*For any* set of vector search results containing chunks from documents with mixed statuses (complete, deleting, failed, uploading, or missing), the RAG_Search_Service SHALL return only chunks whose document_id maps to a document with status="complete" in DynamoDB. + +**Validates: Requirements 3.1, 3.2, 3.3** + +### Property 4: Cleanup retry bounded by max_retries + +*For any* cleanup operation (vector deletion or S3 deletion) that encounters transient failures, the Cleanup_Service SHALL attempt at most max_retries attempts, with each retry delay following exponential backoff (base_delay * 2^attempt + jitter). + +**Validates: Requirements 4.1, 4.2** + +### Property 5: Failed cleanup preserves DynamoDB record + +*For any* document where vector deletion or S3 deletion fails after all retries, the DynamoDB record SHALL remain with status="deleting" and a valid TTL attribute, and hard-delete SHALL NOT be invoked. + +**Validates: Requirement 4.4** + +### Property 6: Successful cleanup triggers hard-delete + +*For any* document where both vector deletion and S3 deletion succeed, the Cleanup_Service SHALL invoke hard-delete, and the DynamoDB record for that document SHALL no longer exist. + +**Validates: Requirements 4.3, 9.1** + +### Property 7: Deterministic vector key generation + +*For any* document_id and chunk_count >= 0, the deterministic deletion function SHALL generate exactly chunk_count keys matching the pattern "{document_id}#{i}" for i in range(chunk_count), batched into groups of at most 500. + +**Validates: Requirement 5.1** + +### Property 8: Bulk soft-delete covers all documents + +*For any* assistant with N documents, batch_soft_delete_documents SHALL mark all N documents as "deleting" with TTL before the assistant record is hard-deleted. + +**Validates: Requirement 8.1** + +### Property 9: Bulk cleanup counts are consistent + +*For any* set of documents processed by cleanup_assistant_documents, the sum of success_count and failure_count SHALL equal the total number of documents processed. + +**Validates: Requirement 8.3** + +### Property 10: List documents excludes deleting status + +*For any* assistant with documents in mixed statuses, listing documents SHALL never return documents with status="deleting". + +**Validates: Requirement 11.1** + +## Error Handling + +### Error Scenario 1: Vector Deletion Fails (All Retries Exhausted) + +**Condition**: S3 Vectors API returns errors for all 3 retry attempts +**Response**: Log error, skip S3 deletion, leave DynamoDB record with `status="deleting"` and TTL +**Recovery**: Document is invisible to search. TTL auto-expires the DynamoDB record. Orphaned vectors are harmless (search filters by document status). No manual intervention needed. + +### Error Scenario 2: S3 Source File Deletion Fails + +**Condition**: S3 `delete_object` fails for all retry attempts +**Response**: Log error, leave DynamoDB record with `status="deleting"` and TTL +**Recovery**: Orphaned S3 object is harmless (no path references it after DynamoDB record expires). S3 lifecycle policies can clean up orphaned objects if desired. + +### Error Scenario 3: Soft-Delete Fails (DynamoDB Error) + +**Condition**: DynamoDB `update_item` fails during soft-delete +**Response**: Return 500 to client. No cleanup attempted. +**Recovery**: Document remains in its original state. User can retry the delete. + +### Error Scenario 4: DynamoDB Lookup Fails During Search + +**Condition**: Batch-get for document status fails during RAG search +**Response**: Fall back to returning unfiltered vector results (graceful degradation) +**Recovery**: Temporary — next search attempt will likely succeed. Worst case: user sees a stale result once. + +### Error Scenario 5: chunk_count is None (Lambda Crashed Mid-Processing) + +**Condition**: Document record has no `chunk_count` (Lambda crashed before embedding phase) +**Response**: Fall back to existing probe-and-scan `delete_vectors_for_document` function +**Recovery**: Slower but correct. Probe-and-scan handles unknown chunk counts. + +## Testing Strategy + +### Unit Testing Approach + +- Test `soft_delete_document` with mocked DynamoDB: verify status transition, TTL calculation, conditional expression +- Test `cleanup_document_resources` with mocked S3/S3Vectors: verify retry logic, backoff timing, success/failure paths +- Test `delete_vectors_for_document_deterministic`: verify key generation, batch splitting at 500 +- Test search filtering: verify chunks from `deleting` documents are excluded +- Test `batch_soft_delete_documents`: verify all documents are marked + +### Property-Based Testing Approach + +**Property Test Library**: hypothesis + +- For any `chunk_count >= 0`, `delete_vectors_for_document_deterministic` generates exactly `chunk_count` keys matching the pattern `{document_id}#{i}` +- For any set of vector results, the search filter never returns chunks from documents with `status != "complete"` +- TTL epoch is always in the future (within `ttl_days` range) relative to the soft-delete timestamp +- Retry backoff delay is always `>= base_delay * 2^attempt` and `< base_delay * 2^attempt + max_jitter` + +### Integration Testing Approach + +- End-to-end: delete a document, verify it disappears from search results immediately +- Simulate vector deletion failure: verify document is invisible to search, DynamoDB record has TTL +- Simulate S3 deletion failure: verify document is invisible, TTL is set +- Assistant deletion: verify all documents are soft-deleted, then cleaned up +- Verify DynamoDB TTL attribute is enabled on the table (CDK synth test) + +## Performance Considerations + +- **Soft-delete is a single DynamoDB update**: ~5ms latency, same as current hard-delete. No user-facing latency increase. +- **Search path adds one DynamoDB batch-get**: For top-5 results, this is at most 5 `get_item` calls (or 1 `batch_get_item`). Adds ~10-20ms to search latency. Acceptable given search already takes ~200-500ms for embedding generation. +- **Deterministic vector deletion eliminates probe-and-scan**: Current approach probes in batches of 500 with `get_vectors` calls. Deterministic approach skips probing entirely — just generates keys and deletes. For a 42-chunk document, this is 1 `delete_vectors` call instead of 1 `get_vectors` + 1 `delete_vectors`. +- **Retry overhead is bounded**: Max 3 retries with exponential backoff. Worst case adds ~3.5s to cleanup. This happens after the 204 response, so no user impact. +- **TTL cleanup is free**: DynamoDB TTL deletion is handled by AWS at no additional cost. + +## Security Considerations + +- **Ownership verification happens during soft-delete**: The `soft_delete_document` function verifies assistant ownership before marking the document. Cleanup functions (`hard_delete_document`, `cleanup_document_resources`) skip ownership checks since they operate on already-verified documents. +- **No new API surface**: The change is internal to existing endpoints. No new routes or permissions needed. +- **TTL prevents indefinite data retention**: Documents marked for deletion are guaranteed to be removed within 7 days + DynamoDB TTL processing time (~48h), even if cleanup fails. + +## Dependencies + +- **boto3**: DynamoDB `update_item` with conditional expressions, `batch_get_item` for search filtering +- **S3 Vectors API**: `delete_vectors` (existing dependency, no new API calls) +- **S3 API**: `delete_object` (existing dependency) +- **AWS CDK**: `dynamodb.Table` `timeToLiveAttribute` property for TTL configuration +- **asyncio**: `ensure_future` for fire-and-forget cleanup, `sleep` for retry backoff +- **random**: Jitter for retry backoff diff --git a/.kiro/specs/reliable-document-deletion/requirements.md b/.kiro/specs/reliable-document-deletion/requirements.md new file mode 100644 index 00000000..a8b31dff --- /dev/null +++ b/.kiro/specs/reliable-document-deletion/requirements.md @@ -0,0 +1,133 @@ +# Requirements Document + +## Introduction + +This document specifies the requirements for reliable document deletion in the RAG assistant system. The current deletion pipeline deletes from DynamoDB, S3, and S3 Vectors sequentially, silently swallowing failures. When vector deletion fails, orphaned vectors cause stale RAG search results. The solution introduces a soft-delete + inline cleanup with retries + DynamoDB TTL pattern that treats DynamoDB as the single source of truth for document existence, ensuring deleted documents are immediately invisible to search and eventually cleaned up from all stores. + +## Glossary + +- **Document_Service**: The backend service layer responsible for DynamoDB operations on document records (create, read, update, delete). +- **Cleanup_Service**: The new service module that orchestrates deletion of vectors and S3 objects with retry logic and exponential backoff. +- **RAG_Search_Service**: The service that searches the S3 vector store for relevant chunks and returns formatted results to the chat endpoint. +- **Vector_Store**: The S3 Vectors index that stores document chunk embeddings for similarity search. +- **Documents_Bucket**: The S3 bucket that stores uploaded source document files. +- **Assistants_Table**: The DynamoDB table storing assistant and document records using an adjacency list pattern (PK=AST#{assistant_id}, SK=DOC#{document_id}). +- **Soft_Delete**: An atomic DynamoDB update that transitions a document's status to "deleting" and sets a TTL, without removing the record. +- **Hard_Delete**: The unconditional removal of a DynamoDB document record, performed only after successful cleanup of all associated resources. +- **TTL**: DynamoDB Time-To-Live, an epoch-second attribute that causes DynamoDB to automatically delete expired items. +- **Deterministic_Key_Deletion**: Generating vector keys from the stored chunk_count rather than probing or scanning the vector index. +- **Chunk_Count**: The number of embedding chunks created for a document, stored in the DynamoDB document record. +- **Delete_Endpoint**: The HTTP DELETE route at /assistants/{assistant_id}/documents/{document_id}. +- **Assistant_Delete_Endpoint**: The HTTP DELETE route at /assistants/{assistant_id} that removes an assistant and all its documents. + +## Requirements + +### Requirement 1: Soft-Delete Document Status Transition + +**User Story:** As a user, I want document deletion to immediately mark the document as being deleted, so that the document becomes invisible to search results without waiting for full resource cleanup. + +#### Acceptance Criteria + +1. WHEN a user issues a delete request for a document, THE Document_Service SHALL atomically update the document status to "deleting" in the Assistants_Table. +2. WHEN the Document_Service performs a soft-delete, THE Document_Service SHALL set the TTL attribute to the current epoch time plus 7 days (604800 seconds). +3. WHEN the Document_Service performs a soft-delete, THE Document_Service SHALL update the updatedAt timestamp to the current time. +4. WHEN the Document_Service performs a soft-delete, THE Document_Service SHALL return the full document record including chunk_count and s3_key for use by the Cleanup_Service. +5. IF the document does not exist or the user does not own the parent assistant, THEN THE Document_Service SHALL return None and the Delete_Endpoint SHALL respond with HTTP 404. +6. WHEN a soft-delete is performed on a document already in "deleting" status, THE Document_Service SHALL treat the operation as idempotent and succeed without error. + +### Requirement 2: Immediate API Response After Soft-Delete + +**User Story:** As a user, I want the delete endpoint to respond immediately after marking the document, so that I do not experience delays from resource cleanup. + +#### Acceptance Criteria + +1. WHEN the soft-delete succeeds, THE Delete_Endpoint SHALL return HTTP 204 No Content to the client before initiating resource cleanup. +2. WHEN the soft-delete succeeds, THE Delete_Endpoint SHALL initiate inline resource cleanup as a background task after sending the response. + +### Requirement 3: Search Path Document Status Filtering + +**User Story:** As a user, I want RAG search results to exclude documents that have been deleted, so that I only see citations from valid, complete documents. + +#### Acceptance Criteria + +1. WHEN the RAG_Search_Service receives vector search results, THE RAG_Search_Service SHALL extract unique document_id values from the result metadata and look up their status in the Assistants_Table. +2. THE RAG_Search_Service SHALL return only chunks from documents where the status equals "complete" in the Assistants_Table. +3. WHEN a document record does not exist in the Assistants_Table for a given document_id, THE RAG_Search_Service SHALL exclude chunks from that document. +4. IF the Assistants_Table lookup fails due to a DynamoDB error, THEN THE RAG_Search_Service SHALL fall back to returning unfiltered vector results. + +### Requirement 4: Inline Cleanup with Retries + +**User Story:** As a system operator, I want resource cleanup to retry on transient failures, so that vectors and S3 objects are reliably removed without manual intervention. + +#### Acceptance Criteria + +1. WHEN the Cleanup_Service deletes vectors for a document, THE Cleanup_Service SHALL retry up to 3 times on failure using exponential backoff with jitter. +2. WHEN the Cleanup_Service deletes the S3 source file for a document, THE Cleanup_Service SHALL retry up to 3 times on failure using exponential backoff with jitter. +3. WHEN both vector deletion and S3 deletion succeed, THE Cleanup_Service SHALL invoke hard-delete to remove the DynamoDB document record. +4. IF vector deletion or S3 deletion fails after all retries, THEN THE Cleanup_Service SHALL log the failure and leave the DynamoDB record with status "deleting" for TTL auto-expiry. +5. THE Cleanup_Service SHALL process vector deletion and S3 deletion as independent phases, so that failure of one does not prevent attempting the other. + +### Requirement 5: Deterministic Vector Key Deletion + +**User Story:** As a system operator, I want vector deletion to use deterministic keys derived from chunk_count, so that deletion is efficient and does not require probing or scanning the vector index. + +#### Acceptance Criteria + +1. WHEN chunk_count is available on the document record, THE Cleanup_Service SHALL generate vector keys using the pattern "{document_id}#{i}" for i in range(chunk_count) and delete them in batches of 500. +2. WHEN chunk_count is not available on the document record, THE Cleanup_Service SHALL fall back to the existing probe-and-scan deletion method. +3. THE Cleanup_Service SHALL treat deletion of non-existent vector keys as a successful no-op. + +### Requirement 6: DynamoDB TTL Backstop + +**User Story:** As a system operator, I want documents stuck in "deleting" status to be automatically expired by DynamoDB TTL, so that failed cleanups do not leave permanent orphaned records. + +#### Acceptance Criteria + +1. THE Assistants_Table SHALL have DynamoDB TTL enabled on the "ttl" attribute. +2. WHEN a document is soft-deleted, THE Document_Service SHALL set the ttl attribute to an epoch-second value 7 days in the future. +3. WHILE a document record has a ttl attribute with an epoch value in the past, THE Assistants_Table SHALL automatically delete that record. + +### Requirement 7: Document Status Model Extension + +**User Story:** As a developer, I want the document model to support the "deleting" status and a TTL field, so that the soft-delete pattern can be implemented consistently. + +#### Acceptance Criteria + +1. THE Document model SHALL include "deleting" as a valid value in the DocumentStatus type. +2. THE Document model SHALL include an optional integer ttl field representing a DynamoDB TTL epoch timestamp. + +### Requirement 8: Assistant Deletion with Bulk Document Cleanup + +**User Story:** As a user, I want deleting an assistant to reliably clean up all associated documents, vectors, and S3 objects, so that no orphaned resources remain. + +#### Acceptance Criteria + +1. WHEN a user deletes an assistant, THE Assistant_Delete_Endpoint SHALL list all documents for the assistant and batch soft-delete them with TTL before deleting the assistant record. +2. WHEN the assistant record is hard-deleted, THE Assistant_Delete_Endpoint SHALL initiate background cleanup for all soft-deleted documents. +3. WHEN performing bulk document cleanup, THE Cleanup_Service SHALL process documents concurrently and return counts of successes and failures. +4. WHEN cleanup succeeds for an individual document during bulk cleanup, THE Cleanup_Service SHALL hard-delete that document's DynamoDB record. + +### Requirement 9: Hard-Delete Document Record + +**User Story:** As a system operator, I want a hard-delete operation that unconditionally removes a document record from DynamoDB, so that successfully cleaned-up documents do not linger in the table. + +#### Acceptance Criteria + +1. WHEN the Cleanup_Service has successfully deleted all vectors and the S3 source file for a document, THE Document_Service SHALL unconditionally remove the DynamoDB record for that document. +2. THE hard-delete operation SHALL NOT perform ownership verification, as ownership was already verified during the preceding soft-delete. + +### Requirement 10: CDK Infrastructure TTL Configuration + +**User Story:** As a DevOps engineer, I want the CDK stack to enable DynamoDB TTL on the assistants table, so that the TTL backstop mechanism functions correctly in all environments. + +#### Acceptance Criteria + +1. THE RagIngestionStack SHALL configure the Assistants_Table with timeToLiveAttribute set to "ttl". + +### Requirement 11: List Documents Excludes Deleting Documents + +**User Story:** As a user, I want the document list endpoint to exclude documents in "deleting" status, so that I only see documents that are active and available. + +#### Acceptance Criteria + +1. WHEN listing documents for an assistant, THE Document_Service SHALL exclude documents with status "deleting" from the returned list. diff --git a/.kiro/specs/reliable-document-deletion/tasks.md b/.kiro/specs/reliable-document-deletion/tasks.md new file mode 100644 index 00000000..c6049cd9 --- /dev/null +++ b/.kiro/specs/reliable-document-deletion/tasks.md @@ -0,0 +1,181 @@ +# Implementation Plan: Reliable Document Deletion + +## Overview + +Implement a soft-delete + inline cleanup with retries + DynamoDB TTL pattern for document deletion. Documents are atomically marked as "deleting" (immediately invisible to search), cleaned up with retries, and auto-expired by TTL if cleanup fails. Applies to both single-document and assistant-level deletion. + +## Tasks + +- [x] 1. Extend Document model and enable DynamoDB TTL + - [x] 1.1 Add "deleting" status and TTL field to Document model + - In `backend/src/apis/app_api/documents/models.py`, add `"deleting"` to the `DocumentStatus` Literal type + - Add `ttl: Optional[int] = Field(None, alias="ttl", description="DynamoDB TTL epoch timestamp for auto-expiry")` to the `Document` model + - _Requirements: 7.1, 7.2_ + + - [x] 1.2 Enable TTL on the DynamoDB assistants table in CDK + - In `infrastructure/lib/rag-ingestion-stack.ts`, add `timeToLiveAttribute: 'ttl'` to the `RagAssistantsTable` definition + - _Requirements: 10.1, 6.1_ + +- [x] 2. Implement soft-delete and hard-delete in Document Service + - [x] 2.1 Implement `soft_delete_document` function + - In `backend/src/apis/app_api/documents/services/document_service.py`, add `soft_delete_document(assistant_id, document_id, owner_id, ttl_days=7)` that atomically sets `status="deleting"`, `ttl=now+604800`, `updatedAt=now` via DynamoDB `update_item` with conditional expression + - Verify assistant ownership via `get_assistant` + - Return full `Document` record (including `chunk_count`, `s3_key`) on success, `None` on not found / access denied + - Treat re-deleting a document already in "deleting" status as idempotent (no error) + - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6_ + + - [x] 2.2 Write property test for soft-delete postconditions + - **Property 1: Soft-delete postconditions** + - Using `hypothesis`, for any valid document status and ttl_days > 0, verify the returned document has `status="deleting"`, TTL = `now_epoch + ttl_days * 86400`, updated `updatedAt`, and preserved `chunk_count` / `s3_key` + - **Validates: Requirements 1.1, 1.2, 1.3, 1.4** + + - [x] 2.3 Write property test for idempotent soft-delete + - **Property 2: Idempotent soft-delete** + - Using `hypothesis`, verify that calling `soft_delete_document` on a document already in "deleting" status succeeds without error + - **Validates: Requirement 1.6** + + - [x] 2.4 Implement `hard_delete_document` function + - In `backend/src/apis/app_api/documents/services/document_service.py`, add `hard_delete_document(assistant_id, document_id)` that unconditionally removes the DynamoDB record (no ownership check) + - _Requirements: 9.1, 9.2_ + + - [x] 2.5 Implement `batch_soft_delete_documents` function + - In `backend/src/apis/app_api/documents/services/document_service.py`, add `batch_soft_delete_documents(assistant_id, document_ids, ttl_days=7)` that soft-deletes multiple documents for an assistant, returns count of documents marked + - _Requirements: 8.1_ + + - [x] 2.6 Write property test for bulk soft-delete coverage + - **Property 8: Bulk soft-delete covers all documents** + - Using `hypothesis`, for any list of N document IDs, verify `batch_soft_delete_documents` marks all N as "deleting" with TTL + - **Validates: Requirement 8.1** + + - [x] 2.7 Write unit tests for soft-delete, hard-delete, and batch soft-delete + - Test `soft_delete_document` with mocked DynamoDB: verify status transition, TTL calculation, conditional expression, not-found case + - Test `hard_delete_document`: verify unconditional delete, no ownership check + - Test `batch_soft_delete_documents`: verify all documents are marked + - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 8.1, 9.1, 9.2_ + +- [x] 3. Implement deterministic vector deletion + - [x] 3.1 Add `delete_vectors_for_document_deterministic` function + - In `backend/src/apis/shared/embeddings/bedrock_embeddings.py`, add `delete_vectors_for_document_deterministic(document_id, chunk_count)` that generates keys `{document_id}#{i}` for `i in range(chunk_count)` and deletes in batches of 500 + - Treat deletion of non-existent keys as a no-op + - _Requirements: 5.1, 5.3_ + + - [x] 3.2 Write property test for deterministic vector key generation + - **Property 7: Deterministic vector key generation** + - Using `hypothesis`, for any `document_id` (text) and `chunk_count >= 0` (integers), verify exactly `chunk_count` keys are generated matching `{document_id}#{i}`, batched into groups of at most 500 + - **Validates: Requirement 5.1** + + - [x] 3.3 Write unit tests for deterministic vector deletion + - Test key generation correctness, batch splitting at 500, zero chunk_count edge case + - _Requirements: 5.1, 5.2, 5.3_ + +- [x] 4. Checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 5. Implement Cleanup Service with retries + - [x] 5.1 Create `cleanup_service.py` with `cleanup_document_resources` function + - Create new file `backend/src/apis/app_api/documents/services/cleanup_service.py` + - Implement `cleanup_document_resources(document_id, assistant_id, s3_key, chunk_count, max_retries=3, base_delay=0.5)` with exponential backoff + jitter + - Phase 1: Delete vectors (use deterministic if `chunk_count` available, fallback to probe-and-scan) + - Phase 2: Delete S3 source file + - Phases are independent — failure of one does not prevent the other + - Return `True` only if both succeed; on `True`, call `hard_delete_document` + - On failure, log and leave DynamoDB record for TTL auto-expiry + - Never raise exceptions + - _Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 5.1, 5.2_ + + - [x] 5.2 Write property test for cleanup retry bounds + - **Property 4: Cleanup retry bounded by max_retries** + - Using `hypothesis`, for any `max_retries >= 1` and `base_delay > 0`, verify at most `max_retries` attempts are made, with delay following `base_delay * 2^attempt + jitter` + - **Validates: Requirements 4.1, 4.2** + + - [x] 5.3 Write property test for failed cleanup preserving DynamoDB record + - **Property 5: Failed cleanup preserves DynamoDB record** + - Verify that when cleanup fails after all retries, `hard_delete_document` is NOT called and the record remains with `status="deleting"` and valid TTL + - **Validates: Requirement 4.4** + + - [x] 5.4 Write property test for successful cleanup triggering hard-delete + - **Property 6: Successful cleanup triggers hard-delete** + - Verify that when both vector and S3 deletion succeed, `hard_delete_document` IS called + - **Validates: Requirements 4.3, 9.1** + + - [x] 5.5 Implement `cleanup_assistant_documents` function + - In `cleanup_service.py`, add `cleanup_assistant_documents(assistant_id, documents, max_retries=3)` that processes documents concurrently, returns `(success_count, failure_count)`, and hard-deletes each successfully cleaned document + - _Requirements: 8.2, 8.3, 8.4_ + + - [x] 5.6 Write property test for bulk cleanup count consistency + - **Property 9: Bulk cleanup counts are consistent** + - Using `hypothesis`, verify `success_count + failure_count == len(documents)` for any set of documents + - **Validates: Requirement 8.3** + + - [x] 5.7 Write unit tests for cleanup_service + - Test retry logic with mocked S3/S3Vectors failures, backoff timing, success/failure paths, independent phases + - Test `cleanup_assistant_documents` concurrent processing and count consistency + - _Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 8.2, 8.3, 8.4_ + +- [x] 6. Checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 7. Add search path document status filtering + - [x] 7.1 Modify `search_assistant_knowledgebase_with_formatting` in rag_service.py + - After vector search, extract unique `document_id` values from result metadata + - Batch-get document records from DynamoDB to check status + - Filter out chunks from documents where `status != "complete"` or record doesn't exist + - On DynamoDB lookup failure, fall back to returning unfiltered results (graceful degradation) + - _Requirements: 3.1, 3.2, 3.3, 3.4_ + + - [x] 7.2 Write property test for search filtering + - **Property 3: Search results only contain complete documents** + - Using `hypothesis`, for any mix of document statuses (complete, deleting, failed, uploading, missing), verify only chunks from `status="complete"` documents are returned + - **Validates: Requirements 3.1, 3.2, 3.3** + + - [x] 7.3 Write unit tests for search filtering + - Test filtering with mixed statuses, all-deleting, all-complete, missing records, DynamoDB error fallback + - _Requirements: 3.1, 3.2, 3.3, 3.4_ + +- [x] 8. Update list documents to exclude deleting status + - [x] 8.1 Modify `list_assistant_documents` in document_service.py + - After querying DynamoDB, filter out documents with `status="deleting"` from the returned list + - _Requirements: 11.1_ + + - [x] 8.2 Write property test for list documents filtering + - **Property 10: List documents excludes deleting status** + - Using `hypothesis`, for any set of documents with mixed statuses, verify listing never returns documents with `status="deleting"` + - **Validates: Requirement 11.1** + + - [x] 8.3 Write unit tests for list documents filtering + - Test with mix of statuses, verify "deleting" documents are excluded + - _Requirements: 11.1_ + +- [x] 9. Refactor delete endpoints to use soft-delete + background cleanup + - [x] 9.1 Refactor single document DELETE endpoint + - In `backend/src/apis/app_api/documents/routes.py`, replace the current `delete_document` handler: + - Call `soft_delete_document` instead of `delete_document_service` + - Return 204 immediately after soft-delete + - Fire-and-forget `cleanup_document_resources` + `hard_delete_document` via `asyncio.ensure_future` + - Remove inline S3 and vector deletion code from the route handler + - _Requirements: 2.1, 2.2_ + + - [x] 9.2 Refactor assistant DELETE endpoint + - In `backend/src/apis/app_api/assistants/routes.py`, update `delete_assistant_endpoint`: + - List all documents, batch soft-delete them with TTL + - Hard-delete assistant record + - Fire-and-forget `cleanup_assistant_documents` via `asyncio.ensure_future` + - Remove the existing `_cleanup_assistant_resources` inline function + - _Requirements: 8.1, 8.2_ + + - [x] 9.3 Write unit tests for refactored delete endpoints + - Test single document delete returns 204 after soft-delete, cleanup runs in background + - Test assistant delete soft-deletes all docs before deleting assistant record + - _Requirements: 2.1, 2.2, 8.1, 8.2_ + +- [x] 10. Final checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +## Notes + +- Tasks marked with `*` are optional and can be skipped for faster MVP +- Each task references specific requirements for traceability +- Checkpoints ensure incremental validation +- Property tests validate universal correctness properties from the design document +- Unit tests validate specific examples and edge cases +- The design uses Python throughout, so all implementation tasks use Python (backend) and TypeScript (CDK infrastructure) diff --git a/.kiro/specs/share-conversations/.config.kiro b/.kiro/specs/share-conversations/.config.kiro new file mode 100644 index 00000000..6fe6554c --- /dev/null +++ b/.kiro/specs/share-conversations/.config.kiro @@ -0,0 +1 @@ +{"specId": "9e3d4ceb-9be3-42aa-bc7f-11a9de57b050", "workflowType": "requirements-first", "specType": "feature"} \ No newline at end of file diff --git a/.kiro/specs/share-conversations/design.md b/.kiro/specs/share-conversations/design.md new file mode 100644 index 00000000..c2d78def --- /dev/null +++ b/.kiro/specs/share-conversations/design.md @@ -0,0 +1,355 @@ +# Design: Share Conversations via Shareable URL + +## Overview + +This feature enables users to share point-in-time snapshots of conversations via shareable URLs. When a user shares a conversation, the system captures the current state (metadata + messages) and stores it independently in a dedicated DynamoDB table. Recipients access the snapshot through a read-only view at `/shared/{share_id}`. + +Three access levels control visibility: +- **Private**: Only the owner can view (default) +- **Public**: Any authenticated user with the link can view +- **Specific**: Only designated email addresses (owner always included) can view + +The design follows existing patterns: FastAPI router + service layer on the backend, Angular standalone component + injectable service on the frontend, and CDK-defined DynamoDB table in the infrastructure stack. + +## Architecture + +```mermaid +graph TD + subgraph Frontend + SM[Share Modal Component] + SV[Shared View Component] + SS[Share Service] + SL[Session List Menu] + end + + subgraph Backend + SR[Share Router - /conversations] + SVR[Shared View Router - /shared] + SVC[Share Service Layer] + DDB[(Shared Conversations Table)] + end + + subgraph Existing + MSG[get_messages - AgentCore Memory] + META[get_session_metadata - DynamoDB] + AUTH[Auth Dependencies] + end + + SL -->|opens| SM + SM -->|create/update/revoke| SS + SV -->|retrieve| SS + SS -->|HTTP| SR + SS -->|HTTP| SVR + SR -->|business logic| SVC + SVR -->|business logic| SVC + SVC -->|snapshot on create| MSG + SVC -->|snapshot on create| META + SVC -->|CRUD| DDB + SR -->|auth| AUTH + SVR -->|auth| AUTH +``` + +### Request Flow + +1. **Create Share**: User clicks "Share" in session menu → Modal opens → User selects access level → POST `/conversations/{session_id}/share` → Service calls `get_messages()` and `get_session_metadata()` to build snapshot → Stores in DynamoDB → Returns share_id + URL +2. **View Share**: Recipient navigates to `/shared/{share_id}` → Frontend loads SharedView component → GET `/shared/{share_id}` → Service checks access control → Returns snapshot data → Renders read-only view +3. **Update Share**: Owner changes access level in modal → PATCH `/conversations/{session_id}/share` → Service updates access_level/allowed_emails in DynamoDB +4. **Revoke Share**: Owner clicks revoke → DELETE `/conversations/{session_id}/share` → Service deletes record from DynamoDB + +## Components and Interfaces + +### Backend Components + +#### 1. Share Router (`backend/src/apis/app_api/shares/routes.py`) + +New FastAPI router registered in `main.py` with prefix `/conversations`. + +```python +# Endpoints: +POST /conversations/{session_id}/share # Create share snapshot +GET /shared/{share_id} # Retrieve shared conversation +PATCH /conversations/{session_id}/share # Update access level / emails +DELETE /conversations/{session_id}/share # Revoke share +``` + +The GET `/shared/{share_id}` endpoint is on a separate router with prefix `/shared` since it's a different resource path. + +All endpoints require authentication via `get_current_user` dependency. The create/update/delete endpoints verify session ownership. The GET endpoint checks access based on access_level. + +#### 2. Share Service (`backend/src/apis/app_api/shares/service.py`) + +Business logic layer handling: +- **Snapshot creation**: Calls `get_messages(session_id, user_id)` and `get_session_metadata(session_id, user_id)` to capture current state, serializes to JSON-safe dicts +- **Access control**: Validates requester against access_level and allowed_emails +- **Owner email auto-inclusion**: When access_level is "specific", ensures owner email is in allowed_emails +- **Replace existing share**: Checks SessionShareIndex GSI for existing share; if found, deletes the old share before creating a new snapshot with a new share_id and URL (one active share per session) +- **DynamoDB operations**: Put, get, update, delete on the shared-conversations table + +#### 3. Share Models (`backend/src/apis/app_api/shares/models.py`) + +Pydantic models following existing patterns in `apis/shared/sessions/models.py`: + +```python +class CreateShareRequest(BaseModel): + access_level: Literal["public", "specific", "private"] + allowed_emails: Optional[List[str]] = None # Required when access_level == "specific" + +class UpdateShareRequest(BaseModel): + access_level: Optional[Literal["public", "specific", "private"]] = None + allowed_emails: Optional[List[str]] = None # Required when access_level is/becomes "specific" + +class ShareResponse(BaseModel): + share_id: str + session_id: str + owner_id: str + access_level: Literal["public", "specific", "private"] + allowed_emails: Optional[List[str]] = None + created_at: str + share_url: str + +class SharedConversationResponse(BaseModel): + share_id: str + title: str + access_level: Literal["public", "specific", "private"] + created_at: str + owner_id: str + messages: List[MessageResponse] # Reuses existing MessageResponse model +``` + +### Frontend Components + +#### 4. Share Service (`frontend/ai.client/src/app/session/services/share/share.service.ts`) + +Angular injectable service following the pattern in `session.service.ts`: + +```typescript +@Injectable({ providedIn: 'root' }) +export class ShareService { + createShare(sessionId: string, accessLevel: string, allowedEmails?: string[]): Promise + getSharedConversation(shareId: string): Promise + updateShare(sessionId: string, accessLevel?: string, allowedEmails?: string[]): Promise + revokeShare(sessionId: string): Promise +} +``` + +Uses `HttpClient`, `ConfigService` for base URL, and `AuthService.ensureAuthenticated()` before each call. + +#### 5. Share Modal Component (`frontend/ai.client/src/app/session/components/share-modal/`) + +Standalone Angular component rendered as a dialog overlay. Contains: +- Three radio-style access options (private/public/specific) +- Email input with tag-style chips when "specific" is selected +- Owner email shown as non-removable chip +- "Create share link" / "Update" button +- Generated URL display with "Copy link" button +- "Chat shared" confirmation with "Future messages aren't included" note +- Error state with retry + +Opens via the session list ellipsis menu. When opened for a session with an existing share, displays current settings for editing plus a "Create new share link" button to replace the existing share with a fresh snapshot (note: "This will replace the existing share link"). + +#### 6. Shared View Component (`frontend/ai.client/src/app/shared/shared-view.page.ts`) + +Standalone page component at route `/shared/:shareId`: +- Fetches shared conversation via `ShareService.getSharedConversation()` +- Renders conversation title, creation timestamp, and messages +- Reuses existing message rendering components (text, code blocks, images, tool results) +- No message input field or editing controls +- Displays "Shared read-only snapshot" banner +- Shows error states for 404/403 + +#### 7. Session List Menu Update + +Add "Share" menu item between "Rename" and "Delete" in `session-list.html`: +- Icon: `heroArrowUpOnSquare` +- Click handler opens Share Modal for the selected session + +#### 8. Route Configuration + +Add to `app.routes.ts`: +```typescript +{ + path: 'shared/:shareId', + loadComponent: () => import('./shared/shared-view.page').then(m => m.SharedViewPage), + canActivate: [authGuard], +} +``` + +### Infrastructure Components + +#### 9. Shared Conversations DynamoDB Table + +Defined in `infrastructure/lib/infrastructure-stack.ts` following existing table patterns: + +- Table name: `{projectPrefix}-shared-conversations` +- Partition key: `share_id` (String) +- Billing: PAY_PER_REQUEST +- Point-in-time recovery: enabled +- Encryption: AWS_MANAGED +- GSI `SessionShareIndex`: PK = `session_id` (String) — lookup shares by original session +- GSI `OwnerShareIndex`: PK = `owner_id` (String), SK = `created_at` (String) — list shares by owner +- SSM exports: `/{projectPrefix}/shares/shared-conversations-table-name` and `/{projectPrefix}/shares/shared-conversations-table-arn` +- Table permissions granted to App API Fargate task role + +## Data Models + +### DynamoDB Item Schema (Shared Conversations Table) + +| Field | Type | Description | +|-------|------|-------------| +| `share_id` | String (PK) | UUID, unique identifier for the share | +| `session_id` | String | Original session ID (GSI `SessionShareIndex` PK) | +| `owner_id` | String | User ID of the share creator (GSI `OwnerShareIndex` PK) | +| `access_level` | String | "public", "specific", or "private" | +| `allowed_emails` | List\ | Email addresses allowed access (only when access_level = "specific") | +| `created_at` | String | ISO 8601 timestamp (GSI `OwnerShareIndex` SK) | +| `metadata` | Map | Snapshot of session metadata (title, created_at, message_count, etc.) | +| `messages` | List\ | Snapshot of all messages at share time (role, content blocks, timestamps) | + +### Access Control Matrix + +| access_level | Owner | Email in allowed_emails | Other authenticated user | +|-------------|-------|------------------------|------------------------| +| private | ✅ | ❌ | ❌ | +| public | ✅ | ✅ | ✅ | +| specific | ✅ | ✅ | ❌ | + + +## Correctness Properties + +*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* + +### Property 1: Share creation snapshot round-trip + +*For any* valid session with metadata and messages, creating a share (with any access_level) and then retrieving it should return a snapshot whose metadata title and messages content match the original session data at the time of creation, and the stored record should contain all required fields (share_id, session_id, owner_id, access_level, created_at, metadata, messages). + +**Validates: Requirements 1.1, 1.4, 1.8** + +### Property 2: Owner email auto-inclusion invariant + +*For any* create or update operation where access_level is "specific" and any set of allowed_emails, the resulting stored allowed_emails list shall always contain the owner's email address, regardless of whether it was explicitly included in the input. + +**Validates: Requirements 1.2, 4.2, 4.5** + +### Property 3: "Specific" access requires non-empty allowed_emails + +*For any* create or update request where access_level is "specific" and allowed_emails is empty or missing, the API shall return a 422 validation error and no share record shall be created or modified. + +**Validates: Requirements 1.3, 4.3** + +### Property 4: Non-owner operations return 403 + +*For any* share operation (create, update, or delete) attempted by a user who does not own the target session, the API shall return a 403 Forbidden error and the share state shall remain unchanged. + +**Validates: Requirements 1.5, 3.2, 4.6** + +### Property 5: Access control matrix + +*For any* shared conversation and any authenticated requesting user: if access_level is "public", retrieval succeeds; if access_level is "specific", retrieval succeeds if and only if the requester's email is in allowed_emails or the requester is the owner; if access_level is "private", retrieval succeeds if and only if the requester is the owner. + +**Validates: Requirements 2.1, 2.2, 2.3, 2.4, 2.5** + +### Property 6: Re-share replaces existing share + +*For any* session that already has an active share, sending a create share request shall delete the old share record, create a new share with a different share_id, and the old share_id shall return 404 on subsequent retrieval. + +**Validates: Requirements 1.7** + +### Property 7: Revocation removes access + +*For any* existing share, after the owner sends a delete request, subsequent GET requests for that share_id shall return 404 Not Found. + +**Validates: Requirements 3.1, 3.4** + +### Property 8: Non-specific access levels clear allowed_emails + +*For any* existing share updated to access_level "public" or "private", the resulting stored record shall have the allowed_emails field cleared (empty or absent). + +**Validates: Requirements 4.1, 4.4** + +### Property 9: ShareResponse serialization round-trip + +*For any* valid ShareResponse object, serializing to JSON and then deserializing back shall produce an equivalent object with all fields preserved. + +**Validates: Requirements 10.5** + +### Property 10: Shared view renders all snapshot data + +*For any* SharedConversationResponse, the rendered shared view shall display the conversation title, the share creation timestamp, and all messages from the snapshot. + +**Validates: Requirements 7.2** + +## Error Handling + +### Backend Error Handling + +| Scenario | HTTP Status | Error Detail | +|----------|-------------|-------------| +| Missing/invalid auth token | 401 | "Authentication required" (handled by `get_current_user` dependency) | +| User doesn't own the session | 403 | "You do not have permission to share this session" | +| User not in allowed_emails for specific share | 403 | "Access denied" | +| Non-owner accessing private share | 403 | "Access denied" | +| Session not found | 404 | "Session not found: {session_id}" | +| Share not found | 404 | "Share not found" | +| Revoked share accessed | 404 | "Share not found" | +| access_level "specific" with empty allowed_emails | 422 | "allowed_emails is required when access_level is 'specific'" | +| Invalid access_level value | 422 | Pydantic validation error (automatic from Literal type) | +| DynamoDB write failure | 503 | "Failed to create share" | +| AgentCore Memory retrieval failure | 500 | "Failed to snapshot conversation messages" | + +### Frontend Error Handling + +- **Share creation failure**: Modal displays error message with retry button; does not close modal +- **Share retrieval failure (403)**: Shared view displays "Access denied — you don't have permission to view this conversation" +- **Share retrieval failure (404)**: Shared view displays "Conversation not found — this share link may have been revoked" +- **Network errors**: Toast notification with retry option +- **Clipboard copy failure**: Fallback to selecting the URL text for manual copy + +### Retry Strategy + +- Frontend retries are user-initiated (retry button in error states) +- No automatic retries on 4xx errors (client errors are deterministic) +- DynamoDB operations use boto3's built-in retry with exponential backoff + +## Testing Strategy + +### Unit Tests (pytest / Vitest) + +Focus on specific examples, edge cases, and integration points: + +- **Backend unit tests** (`backend/tests/apis/app_api/shares/`): + - Pydantic model validation (valid/invalid inputs for CreateShareRequest, UpdateShareRequest) + - Access control logic with specific user/email combinations + - Share service methods with mocked DynamoDB and AgentCore Memory + - Router endpoint integration tests with TestClient + - Edge cases: empty session (no messages), very long email lists, unicode in metadata + +- **Frontend unit tests** (`frontend/ai.client/src/app/`): + - Share service HTTP calls with mocked HttpClient + - Share modal component: radio selection, email input, validation states + - Shared view component: loading, success, error states + - Session list menu: "Share" item presence and click handler + +### Property-Based Tests (Hypothesis for Python / fast-check for TypeScript) + +Each property test runs a minimum of 100 iterations with randomly generated inputs. Each test is tagged with its corresponding design property. + +- **Backend property tests** (`backend/tests/apis/app_api/shares/test_share_properties.py`): + - **Feature: share-conversations, Property 1**: Generate random session metadata + messages, create share, retrieve, verify round-trip + - **Feature: share-conversations, Property 2**: Generate random email lists and owner emails, verify owner always in result + - **Feature: share-conversations, Property 3**: Generate requests with access_level "specific" and empty/None allowed_emails, verify 422 + - **Feature: share-conversations, Property 4**: Generate random non-owner user IDs, verify 403 on all operations + - **Feature: share-conversations, Property 5**: Generate random (access_level, requester_email, owner_id, allowed_emails) tuples, verify access decision matches matrix + - **Feature: share-conversations, Property 6**: Create share, then create again for same session, verify new share_id returned and old share_id returns 404 + - **Feature: share-conversations, Property 7**: Create then delete share, verify retrieval returns 404 + - **Feature: share-conversations, Property 8**: Generate random shares, update to public/private, verify allowed_emails cleared + - **Feature: share-conversations, Property 9**: Generate random valid ShareResponse objects, serialize/deserialize, verify equality + +- **Frontend property tests** (fast-check): + - **Feature: share-conversations, Property 10**: Generate random SharedConversationResponse data, render component, verify title/timestamp/messages present + +### Testing Libraries + +- **Backend**: `hypothesis` (already in use — `.hypothesis/` directory exists in `backend/`) +- **Frontend**: `fast-check` (to be added to `package.json`) +- **Minimum iterations**: 100 per property test +- **Tag format**: `# Feature: share-conversations, Property {N}: {title}` diff --git a/.kiro/specs/share-conversations/requirements.md b/.kiro/specs/share-conversations/requirements.md new file mode 100644 index 00000000..685e63c7 --- /dev/null +++ b/.kiro/specs/share-conversations/requirements.md @@ -0,0 +1,175 @@ +# Requirements: Share Conversations via Shareable URL + +## Introduction + +Enable users to share conversations by generating a shareable URL that provides a read-only snapshot of the conversation at the time of sharing. Users control access via a modal with three options: "Keep private" (only the owner can access, default), "Public link" (any authenticated user with the link), or "Specific people" (only designated email addresses). The snapshot is point-in-time; future messages are not included. + +## Glossary + +- **Share_Modal**: The dialog overlay component that presents access options, email input controls, and displays the generated shareable URL +- **Share_Service**: The frontend Angular service responsible for making API calls to the share endpoints +- **Share_API**: The backend FastAPI router handling share creation, retrieval, update, and revocation +- **Shared_Conversations_Table**: The DynamoDB table storing shared conversation snapshots (partition key: share_id) +- **Snapshot**: A point-in-time copy of conversation metadata and messages, frozen at the moment of sharing +- **Share_ID**: A UUID that uniquely identifies a shared conversation snapshot +- **Access_Level**: The visibility setting for a shared conversation: "public" (any authenticated user with the link), "specific" (only designated email addresses), or "private" (only the owner can access) +- **Allowed_Emails**: A list of email addresses permitted to view a shared conversation when access_level is "specific"; the owner's email is always automatically included +- **Session_List**: The sidebar component displaying the user's conversation list with ellipsis menus +- **Shared_View**: The read-only page component that renders a shared conversation snapshot at the `/shared/{share_id}` route + +## Requirements + +### Requirement 1: Share Conversation Snapshot Creation + +**User Story:** As a user, I want to create a shareable snapshot of a conversation, so that I can share a point-in-time copy with others. + +#### Acceptance Criteria + +1. WHEN a user sends a POST request to `/conversations/{session_id}/share` with access_level "public", THE Share_API SHALL create a Snapshot containing the conversation metadata and all messages at that point in time, generate a unique Share_ID, store the Snapshot in the Shared_Conversations_Table, and return the Share_ID and shareable URL in the response. +2. WHEN a user sends a POST request to `/conversations/{session_id}/share` with access_level "specific" and a non-empty allowed_emails list, THE Share_API SHALL automatically include the owner's email in the allowed_emails list if not already present, create a Snapshot, store the Snapshot along with the allowed_emails list in the Shared_Conversations_Table, and return the Share_ID and shareable URL in the response. +3. WHEN a user sends a POST request to `/conversations/{session_id}/share` with access_level "specific" and an empty or missing allowed_emails list, THE Share_API SHALL return a 422 Validation Error indicating that allowed_emails is required for "specific" access. +4. WHEN a user sends a POST request to `/conversations/{session_id}/share` with access_level "private", THE Share_API SHALL create a Snapshot accessible only to the owner, store the Snapshot in the Shared_Conversations_Table without an allowed_emails list, and return the Share_ID and shareable URL in the response. +5. WHEN a user sends a POST request to `/conversations/{session_id}/share` for a session the user does not own, THE Share_API SHALL return a 403 Forbidden error. +6. WHEN a user sends a POST request to `/conversations/{session_id}/share` for a session that does not exist, THE Share_API SHALL return a 404 Not Found error. +7. WHEN a user sends a POST request to `/conversations/{session_id}/share` for a session that already has an active share, THE Share_API SHALL revoke the existing share (delete the old Snapshot from the Shared_Conversations_Table), create a fresh Snapshot with a new Share_ID and URL, and return the new share details. The old share link SHALL no longer be accessible. +8. THE Share_API SHALL store the following fields in the Shared_Conversations_Table for each Snapshot: share_id (PK), session_id, owner_id, access_level, allowed_emails (list, stored only when access_level is "specific"), created_at (ISO 8601), metadata (conversation metadata snapshot), and messages (conversation messages snapshot). + +--- + +### Requirement 2: Shared Conversation Retrieval + +**User Story:** As an authenticated user, I want to view a shared conversation via its shareable URL, so that I can read the conversation content. + +#### Acceptance Criteria + +1. WHEN an authenticated user sends a GET request to `/shared/{share_id}` for a share with access_level "public", THE Share_API SHALL return the Snapshot metadata and messages in a read-only format. +2. WHEN an authenticated user sends a GET request to `/shared/{share_id}` for a share with access_level "specific" and the requesting user's email is in the allowed_emails list, THE Share_API SHALL return the Snapshot metadata and messages in a read-only format. +3. WHEN an authenticated user sends a GET request to `/shared/{share_id}` for a share with access_level "specific" and the requesting user's email is not in the allowed_emails list and the user is not the owner, THE Share_API SHALL return a 403 Forbidden error. +4. WHEN an authenticated user sends a GET request to `/shared/{share_id}` for a share with access_level "private" and the user is not the owner, THE Share_API SHALL return a 403 Forbidden error. +5. WHEN an authenticated user sends a GET request to `/shared/{share_id}` for a share the user owns, THE Share_API SHALL return the Snapshot metadata and messages regardless of access_level. +6. WHEN an unauthenticated request is sent to GET `/shared/{share_id}`, THE Share_API SHALL return a 401 Unauthorized error. +7. WHEN a GET request is sent to `/shared/{share_id}` with a Share_ID that does not exist, THE Share_API SHALL return a 404 Not Found error. + +--- + +### Requirement 3: Share Link Revocation + +**User Story:** As a user, I want to revoke a shared link, so that the conversation is no longer accessible to others. + +#### Acceptance Criteria + +1. WHEN a user sends a DELETE request to `/conversations/{session_id}/share`, THE Share_API SHALL remove the Snapshot from the Shared_Conversations_Table and return a 204 No Content response. +2. WHEN a user sends a DELETE request to `/conversations/{session_id}/share` for a session the user does not own, THE Share_API SHALL return a 403 Forbidden error. +3. WHEN a user sends a DELETE request to `/conversations/{session_id}/share` for a session with no active share, THE Share_API SHALL return a 404 Not Found error. +4. WHEN a share has been revoked, THE Share_API SHALL return a 404 Not Found error for subsequent GET requests to `/shared/{share_id}` using the revoked Share_ID. + +--- + +### Requirement 4: Share Access Level and Allowed Emails Update + +**User Story:** As a user, I want to change the access level and allowed email list of an existing share, so that I can adjust who can view the shared conversation without creating a new link. + +#### Acceptance Criteria + +1. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` with a new access_level value of "public", THE Share_API SHALL update the access_level field in the Shared_Conversations_Table, clear the allowed_emails field, and return the updated share details. +2. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` with access_level "specific" and a non-empty allowed_emails list, THE Share_API SHALL automatically include the owner's email in the allowed_emails list if not already present, update both the access_level and allowed_emails fields in the Shared_Conversations_Table, and return the updated share details. +3. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` with access_level "specific" and an empty or missing allowed_emails list, THE Share_API SHALL return a 422 Validation Error indicating that allowed_emails is required for "specific" access. +4. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` with a new access_level value of "private", THE Share_API SHALL update the access_level field in the Shared_Conversations_Table, clear the allowed_emails field, and return the updated share details. +5. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` with only an updated allowed_emails list (no access_level change), THE Share_API SHALL automatically include the owner's email in the allowed_emails list if not already present, update the allowed_emails field, and return the updated share details. +6. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` for a session the user does not own, THE Share_API SHALL return a 403 Forbidden error. +7. WHEN a user sends a PATCH request to `/conversations/{session_id}/share` for a session with no active share, THE Share_API SHALL return a 404 Not Found error. +8. WHEN a user sends a PATCH request with an invalid access_level value (not "public", "specific", or "private"), THE Share_API SHALL return a 422 Validation Error. + +--- + +### Requirement 5: Share Modal UI + +**User Story:** As a user, I want a modal dialog to configure and create a share link, so that I can control access and copy the URL conveniently. + +#### Acceptance Criteria + +1. WHEN a user clicks "Share conversation" from the ellipsis menu on a session in the Session_List, THE Share_Modal SHALL open as a dialog overlay. +2. THE Share_Modal SHALL display three access options as radio-style selections: "Keep private" (only the owner can access, selected by default), "Public link" (any authenticated user with the link can view), and "Specific people" (only designated email addresses can view). +3. WHEN the user selects "Specific people", THE Share_Modal SHALL display an email input field that allows the user to add one or more email addresses to the allowed list, with the current user's email shown as a non-removable entry. +4. THE Share_Modal SHALL allow the user to remove individual email addresses from the allowed list by clicking a remove control next to each email. +5. WHEN the user clicks "Create share link" with "Specific people" selected and no email addresses added, THE Share_Modal SHALL display a validation message indicating at least one email address is required. +6. WHEN the user clicks "Create share link", THE Share_Modal SHALL call the Share_Service to create the share with the selected access_level and allowed_emails (when applicable) and display the generated URL with a "Copy link" button. +7. WHEN the user clicks the "Copy link" button, THE Share_Modal SHALL copy the shareable URL to the clipboard and display a confirmation indicator. +8. WHEN a share is successfully created, THE Share_Modal SHALL display a "Chat shared" confirmation with the note "Future messages aren't included". +9. IF the Share_Service returns an error during share creation, THEN THE Share_Modal SHALL display an error message and allow the user to retry. +10. WHEN the Share_Modal opens for a session that already has an active share, THE Share_Modal SHALL display the existing share URL, current access level, and current allowed email list (when access_level is "specific"), along with a "Create new share link" button to replace the existing share with a fresh snapshot. +11. WHEN the user modifies the access level or allowed email list for an existing share, THE Share_Modal SHALL call the Share_Service to update the share and reflect the updated settings. +12. WHEN the user clicks "Create new share link" for a session with an existing share, THE Share_Modal SHALL display a note "This will replace the existing share link" and call the Share_Service to create a new share, which revokes the old link and generates a fresh snapshot with a new URL. + +--- + +### Requirement 6: Session List Menu Integration + +**User Story:** As a user, I want a "Share" option in the conversation ellipsis menu, so that I can initiate sharing from the sidebar. + +#### Acceptance Criteria + +1. THE Session_List SHALL display a "Share" menu item in the ellipsis menu for each conversation, positioned between the "Rename" and "Delete" options. +2. WHEN the user clicks the "Share" menu item, THE Session_List SHALL open the Share_Modal for the selected conversation. +3. THE "Share" menu item SHALL display a share icon (heroArrowUpOnSquare or equivalent) alongside the text label. + +--- + +### Requirement 7: Shared Conversation Read-Only View + +**User Story:** As a user viewing a shared conversation, I want a read-only view that displays the conversation content, so that I can read the shared messages without editing capabilities. + +#### Acceptance Criteria + +1. THE Shared_View SHALL be accessible at the route `/shared/{share_id}` and protected by the existing auth guard. +2. THE Shared_View SHALL display the conversation title, the share creation timestamp, and all messages from the Snapshot. +3. THE Shared_View SHALL render message content identically to the existing conversation view, including text, code blocks, images, and tool results. +4. THE Shared_View SHALL NOT display a message input field or any controls that allow modifying the conversation. +5. THE Shared_View SHALL display a visual indicator (banner or badge) that the conversation is a shared read-only snapshot. +6. IF the Share_API returns a 404 or 403 for the requested Share_ID, THEN THE Shared_View SHALL display an appropriate error message ("Conversation not found" or "Access denied"). + +--- + +### Requirement 8: Share Frontend Service + +**User Story:** As a frontend developer, I want a dedicated service for share API calls, so that share logic is encapsulated and reusable. + +#### Acceptance Criteria + +1. THE Share_Service SHALL expose a method to create a share (POST `/conversations/{session_id}/share`) that accepts a session ID, access level, and an optional list of allowed emails, and returns the share details including the shareable URL. +2. THE Share_Service SHALL expose a method to retrieve a shared conversation (GET `/shared/{share_id}`) that accepts a Share_ID and returns the Snapshot data. +3. THE Share_Service SHALL expose a method to revoke a share (DELETE `/conversations/{session_id}/share`) that accepts a session ID. +4. THE Share_Service SHALL expose a method to update share settings (PATCH `/conversations/{session_id}/share`) that accepts a session ID, an optional new access level, and an optional updated allowed emails list. +5. THE Share_Service SHALL ensure the user is authenticated before making API calls by calling `AuthService.ensureAuthenticated()`. + +--- + +### Requirement 9: Shared Conversations DynamoDB Table + +**User Story:** As a platform operator, I want a DynamoDB table for shared conversation snapshots, so that shared data is stored reliably and independently from the original conversation. + +#### Acceptance Criteria + +1. THE Shared_Conversations_Table SHALL be defined in the CDK InfrastructureStack with the table name `{projectPrefix}-shared-conversations`. +2. THE Shared_Conversations_Table SHALL use `share_id` (String) as the partition key. +3. THE Shared_Conversations_Table SHALL use PAY_PER_REQUEST billing mode. +4. THE Shared_Conversations_Table SHALL have point-in-time recovery enabled. +5. THE Shared_Conversations_Table SHALL use AWS managed encryption. +6. THE Shared_Conversations_Table SHALL have a Global Secondary Index named `SessionShareIndex` with partition key `session_id` (String) to enable lookup of shares by original session. +7. THE Shared_Conversations_Table name and ARN SHALL be exported to SSM parameters at `/{projectPrefix}/shares/shared-conversations-table-name` and `/{projectPrefix}/shares/shared-conversations-table-arn`. +8. THE Shared_Conversations_Table SHALL have a Global Secondary Index named `OwnerShareIndex` with partition key `owner_id` (String) and sort key `created_at` (String) to enable listing shares by owner. +9. THE Shared_Conversations_Table SHALL store the allowed_emails field as a List of Strings for items where access_level is "specific". + +--- + +### Requirement 10: Backend Share Data Models + +**User Story:** As a backend developer, I want Pydantic models for share requests and responses, so that API contracts are validated and documented. + +#### Acceptance Criteria + +1. THE Share_API SHALL define a `CreateShareRequest` model with a required `access_level` field accepting "public", "specific", or "private", and an optional `allowed_emails` field (List of strings) that is required when access_level is "specific". +2. THE Share_API SHALL define a `ShareResponse` model containing: share_id, session_id, owner_id, access_level, allowed_emails (optional list), created_at, and share_url. +3. THE Share_API SHALL define a `SharedConversationResponse` model containing: share_id, title, access_level, created_at, owner_id, and messages (list of MessageResponse). +4. THE Share_API SHALL define an `UpdateShareRequest` model with an optional `access_level` field accepting "public", "specific", or "private", and an optional `allowed_emails` field (List of strings) that is required when access_level is "specific". +5. FOR ALL valid ShareResponse objects, serializing to JSON then deserializing back SHALL produce an equivalent object (round-trip property). \ No newline at end of file diff --git a/.kiro/specs/share-conversations/tasks.md b/.kiro/specs/share-conversations/tasks.md new file mode 100644 index 00000000..8f88f47c --- /dev/null +++ b/.kiro/specs/share-conversations/tasks.md @@ -0,0 +1,163 @@ +# Implementation Plan: Share Conversations via Shareable URL + +## Overview + +Incremental implementation starting with infrastructure (DynamoDB table + SSM exports), then backend (models → service → routes → registration), then frontend (service → modal → session list integration → shared view → routing). Each task builds on the previous, ensuring no orphaned code. + +## Tasks + +- [x] 1. Add Shared Conversations DynamoDB table to infrastructure stack + - [x] 1.1 Define the `{projectPrefix}-shared-conversations` DynamoDB table in `infrastructure/lib/infrastructure-stack.ts` + - Partition key: `share_id` (String), PAY_PER_REQUEST billing, point-in-time recovery enabled, AWS_MANAGED encryption + - Add GSI `SessionShareIndex` with partition key `session_id` (String) + - Add GSI `OwnerShareIndex` with partition key `owner_id` (String) and sort key `created_at` (String) + - Export table name and ARN to SSM at `/{projectPrefix}/shares/shared-conversations-table-name` and `/{projectPrefix}/shares/shared-conversations-table-arn` + - Follow existing table patterns (e.g., UsersTable, AppRolesTable) in the same file + - _Requirements: 9.1, 9.2, 9.3, 9.4, 9.5, 9.6, 9.7, 9.8, 9.9_ + +- [x] 2. Implement backend share data models + - [x] 2.1 Create `backend/src/apis/app_api/shares/__init__.py` and `backend/src/apis/app_api/shares/models.py` + - Define `CreateShareRequest` with `access_level: Literal["public", "specific", "private"]` and `allowed_emails: Optional[List[str]]` + - Define `UpdateShareRequest` with optional `access_level` and optional `allowed_emails` + - Define `ShareResponse` with share_id, session_id, owner_id, access_level, allowed_emails, created_at, share_url + - Define `SharedConversationResponse` with share_id, title, access_level, created_at, owner_id, messages (List[MessageResponse]) + - Add Pydantic validator: when access_level is "specific", allowed_emails must be non-empty + - Follow patterns from `backend/src/apis/shared/sessions/models.py` (ConfigDict, Field aliases, by_alias) + - _Requirements: 10.1, 10.2, 10.3, 10.4_ + + - [ ]* 2.2 Write property test for ShareResponse serialization round-trip + - **Property 9: ShareResponse serialization round-trip** + - Generate random valid ShareResponse objects with Hypothesis, serialize to JSON, deserialize back, verify equality + - Place test in `backend/tests/apis/app_api/shares/test_share_properties.py` + - **Validates: Requirements 10.5** + +- [x] 3. Implement backend share service + - [x] 3.1 Create `backend/src/apis/app_api/shares/service.py` with `ShareService` class + - `create_share(session_id, user_id, user_email, access_level, allowed_emails)`: snapshot messages via `get_messages()` and metadata via `get_session_metadata()`, generate UUID share_id, store in DynamoDB, return ShareResponse + - `get_shared_conversation(share_id, requester_id, requester_email)`: fetch from DynamoDB, check access control, return SharedConversationResponse + - `update_share(session_id, user_id, user_email, access_level, allowed_emails)`: lookup via SessionShareIndex GSI, verify ownership, update fields in DynamoDB + - `revoke_share(session_id, user_id)`: lookup via SessionShareIndex GSI, verify ownership, delete from DynamoDB + - Auto-include owner email in allowed_emails when access_level is "specific" + - On create: check SessionShareIndex for existing share, delete old share before creating new one + - Clear allowed_emails when access_level changes to "public" or "private" + - Read table name from `SHARED_CONVERSATIONS_TABLE_NAME` environment variable + - _Requirements: 1.1, 1.2, 1.4, 1.5, 1.6, 1.7, 1.8, 2.1, 2.2, 2.3, 2.4, 2.5, 3.1, 3.2, 3.3, 3.4, 4.1, 4.2, 4.4, 4.5_ + + - [ ]* 3.2 Write property test for owner email auto-inclusion + - **Property 2: Owner email auto-inclusion invariant** + - Generate random email lists and owner emails with Hypothesis, verify owner always in resulting allowed_emails + - **Validates: Requirements 1.2, 4.2, 4.5** + + - [ ]* 3.3 Write property test for access control matrix + - **Property 5: Access control matrix** + - Generate random (access_level, requester_email, owner_id, allowed_emails) tuples, verify access decision matches the matrix + - **Validates: Requirements 2.1, 2.2, 2.3, 2.4, 2.5** + + - [ ]* 3.4 Write property test for non-specific access levels clearing allowed_emails + - **Property 8: Non-specific access levels clear allowed_emails** + - Generate random shares, update to public/private, verify allowed_emails cleared + - **Validates: Requirements 4.1, 4.4** + +- [x] 4. Implement backend share routes and register in main.py + - [x] 4.1 Create `backend/src/apis/app_api/shares/routes.py` with FastAPI router + - `POST /conversations/{session_id}/share` — create share, validate request, call service, return ShareResponse + - `GET /shared/{share_id}` — retrieve shared conversation (on a separate router with `/shared` prefix) + - `PATCH /conversations/{session_id}/share` — update access level/emails + - `DELETE /conversations/{session_id}/share` — revoke share, return 204 + - All endpoints use `Depends(get_current_user)` for authentication + - Return proper HTTP status codes: 403 for non-owner, 404 for not found, 422 for validation errors + - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 3.1, 3.2, 3.3, 4.1, 4.2, 4.3, 4.4, 4.5, 4.6, 4.7, 4.8_ + + - [x] 4.2 Register share routers in `backend/src/apis/app_api/main.py` + - Import and include the conversations share router and the shared view router + - Follow existing router registration pattern + - _Requirements: 1.1, 2.1_ + + - [ ]* 4.3 Write property test for "specific" access requires non-empty allowed_emails + - **Property 3: "Specific" access requires non-empty allowed_emails** + - Generate create/update requests with access_level "specific" and empty/None allowed_emails, verify 422 response + - **Validates: Requirements 1.3, 4.3** + + - [ ]* 4.4 Write property test for non-owner operations return 403 + - **Property 4: Non-owner operations return 403** + - Generate random non-owner user IDs, verify 403 on create/update/delete operations + - **Validates: Requirements 1.5, 3.2, 4.6** + + - [ ]* 4.5 Write property test for re-share replaces existing share + - **Property 6: Re-share replaces existing share** + - Create share, then create again for same session, verify new share_id and old share_id returns 404 + - **Validates: Requirements 1.7** + + - [ ]* 4.6 Write property test for revocation removes access + - **Property 7: Revocation removes access** + - Create then delete share, verify retrieval returns 404 + - **Validates: Requirements 3.1, 3.4** + +- [x] 5. Checkpoint - Backend complete + - Ensure all tests pass, ask the user if questions arise. + +- [x] 6. Implement frontend share service + - [x] 6.1 Create `frontend/ai.client/src/app/session/services/share/share.service.ts` + - Injectable service with `HttpClient`, `ConfigService`, `AuthService` + - `createShare(sessionId, accessLevel, allowedEmails?)`: POST to `/conversations/{sessionId}/share` + - `getSharedConversation(shareId)`: GET `/shared/{shareId}` + - `updateShare(sessionId, accessLevel?, allowedEmails?)`: PATCH `/conversations/{sessionId}/share` + - `revokeShare(sessionId)`: DELETE `/conversations/{sessionId}/share` + - Call `AuthService.ensureAuthenticated()` before each request + - Define TypeScript interfaces: `ShareResponse`, `SharedConversationResponse`, `CreateShareRequest`, `UpdateShareRequest` + - Follow patterns from `session.service.ts` + - _Requirements: 8.1, 8.2, 8.3, 8.4, 8.5_ + +- [x] 7. Implement share modal component + - [x] 7.1 Create `frontend/ai.client/src/app/session/components/share-modal/` with standalone Angular component + - Dialog overlay with three radio-style access options: "Keep private" (default), "Public link", "Specific people" + - When "Specific people" selected: email input field with tag-style chips, owner email as non-removable chip + - Remove button on each added email chip + - Validation: at least one email required when "Specific people" selected + - "Create share link" button calls ShareService.createShare() + - On success: display generated URL with "Copy link" button, "Chat shared" confirmation, "Future messages aren't included" note + - On error: display error message with retry option + - When opened for session with existing share: display current settings, existing URL, "Create new share link" button with "This will replace the existing share link" note + - Modify access level/emails for existing share calls ShareService.updateShare() + - Use Tailwind CSS, ng-icons, follow existing component patterns + - _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 5.7, 5.8, 5.9, 5.10, 5.11, 5.12_ + +- [x] 8. Integrate share into session list menu + - [x] 8.1 Add "Share" menu item to session list ellipsis menu in `frontend/ai.client/src/app/components/sidenav/components/session-list/` + - Add "Share" button between "Rename" and "Delete" in the `sessionMenu` template + - Use `heroArrowUpOnSquare` icon + - Click handler opens the Share Modal for the selected session + - _Requirements: 6.1, 6.2, 6.3_ + +- [x] 9. Implement shared conversation read-only view and route + - [x] 9.1 Create `frontend/ai.client/src/app/shared/shared-view.page.ts` standalone page component + - Fetch shared conversation via `ShareService.getSharedConversation(shareId)` using route param + - Display conversation title, share creation timestamp, all messages from snapshot + - Reuse existing message rendering components for text, code blocks, images, tool results + - No message input field or editing controls + - Display "Shared read-only snapshot" banner/badge + - Error states: "Conversation not found" for 404, "Access denied" for 403 + - Loading state while fetching + - _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5, 7.6_ + + - [x] 9.2 Add `/shared/:shareId` route to `frontend/ai.client/src/app/app.routes.ts` + - Lazy-load SharedViewPage component + - Protected by `authGuard` + - _Requirements: 7.1_ + + - [ ]* 9.3 Write property test for shared view rendering all snapshot data + - **Property 10: Shared view renders all snapshot data** + - Use fast-check to generate random SharedConversationResponse data, render component, verify title/timestamp/messages present in DOM + - **Validates: Requirements 7.2** + +- [x] 10. Final checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +## Notes + +- Tasks marked with `*` are optional and can be skipped for faster MVP +- Each task references specific requirements for traceability +- Checkpoints ensure incremental validation +- Property tests validate universal correctness properties from the design document +- Backend uses Python (FastAPI, Pydantic, Hypothesis), frontend uses TypeScript (Angular, fast-check) +- Infrastructure uses TypeScript (AWS CDK) diff --git a/.kiro/steering/versioning.md b/.kiro/steering/versioning.md index 51584ffc..69fe0dc0 100644 --- a/.kiro/steering/versioning.md +++ b/.kiro/steering/versioning.md @@ -17,7 +17,7 @@ Example: `1.0.0-beta.1`, `1.1.0` 2. Run `bash scripts/common/sync-version.sh` 3. Commit both the `VERSION` file and the updated manifests -The sync script updates `backend/pyproject.toml`, `frontend/ai.client/package.json`, `infrastructure/package.json`, and regenerates `backend/uv.lock` and `package-lock.json` for both npm projects. All updated files (including lockfiles) must be committed. +The sync script updates `backend/pyproject.toml`, `frontend/ai.client/package.json`, `infrastructure/package.json`, the `README.md` version badge and "Current release" text, and regenerates `backend/uv.lock` and `package-lock.json` for both npm projects. All updated files (including lockfiles) must be committed. ## PR Gate diff --git a/README.md b/README.md index 8e9ce824..399775d7 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ **An open-source, production-ready Generative AI platform for institutions** *Built by Boise State University, designed for everyone.* -[![Release](https://img.shields.io/badge/Release-v1.0.0--beta.19-6366f1?style=flat&logo=github&logoColor=white)](RELEASE_NOTES.md) +[![Release](https://img.shields.io/badge/Release-v1.0.0--beta.20-6366f1?style=flat&logo=github&logoColor=white)](RELEASE_NOTES.md) [![Nightly](https://github.com/Boise-State-Development/agentcore-public-stack/actions/workflows/nightly.yml/badge.svg)](https://github.com/Boise-State-Development/agentcore-public-stack/actions/workflows/nightly.yml) ![Python](https://img.shields.io/badge/Python-3.13+-3776AB?style=flat&logo=python&logoColor=white) @@ -257,7 +257,7 @@ agentcore-public-stack/ See [RELEASE_NOTES.md](RELEASE_NOTES.md) for the full changelog, including new features, bug fixes, platform upgrades, and deployment notes for each release. -**Current release:** v1.0.0-beta.19 +**Current release:** v1.0.0-beta.20 --- diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index e97a3627..409b42ad 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,3 +1,259 @@ +# Release Notes — v1.0.0-beta.20 + +**Release Date:** April 1, 2026 +**Previous Release:** v1.0.0-beta.19 (March 25, 2026) + +--- + +## Highlights + +This release delivers **reliable document deletion** with a soft-delete lifecycle and background cleanup, a **displayText system** that preserves original user messages when RAG augmentation or file attachments modify the prompt, a **fine-tuning cost dashboard** for admin visibility into SageMaker training spend, and a major **dependency refresh** across all three ecosystems via Dependabot. The security and code quality hardening from the initial beta.20 scope is also included — all CodeQL findings resolved, four Dependabot security vulnerabilities patched, cyclic imports eliminated, and silent exception swallowing replaced with proper logging. + +--- + +## Reliable Document Deletion + +Document deletion has been rearchitected with a soft-delete pattern and background cleanup to prevent orphaned S3 objects and vector embeddings. + +### Soft-Delete Lifecycle + +Documents now transition through a `deleting` status before removal. The delete endpoint marks the document immediately and returns, while cleanup runs asynchronously. A DynamoDB TTL field (7-day expiry) acts as a backstop for failed cleanups. + +### Cleanup Service + +A new `cleanup_service.py` handles retry logic for S3 vector deletion and source file removal. Deterministic vector key generation ensures reliable cleanup even if the original ingestion metadata is incomplete. + +### Search Filtering + +The search path now filters out non-complete documents, preventing stale results from appearing when a document is mid-deletion. The RAG service cross-checks document status during search. + +### Assistant Deletion + +When an assistant is deleted, all associated documents are batch soft-deleted with background cleanup. A new `delete_vectors_for_assistant` function removes embeddings from the vector store by assistant ID. + +### Upload Failure Reporting + +A new `POST /{document_id}/upload-failed` endpoint allows the frontend to report client-side upload errors, marking documents as failed with error details for debugging. + +### Test Coverage + +4,200+ lines of new tests across property-based tests (cleanup service, document deletion, search filtering, vector deletion) and integration tests (delete endpoints, cleanup service, document deletion flows). + +--- + +## DisplayText for RAG-Augmented and File Attachment Messages + +When RAG augmentation or file attachments modify the user's prompt before sending it to the agent, the original message text is now preserved and displayed in the UI instead of the augmented version. + +### How It Works + +- The `stream_async` and `StreamCoordinator` accept an `original_message` parameter to capture the user's input before modification +- When the original differs from the augmented version, a `displayText` metadata record (`D#` prefix) is stored in DynamoDB alongside the cost record +- The metadata retrieval path queries both cost records (`C#`) and display text records (`D#`) +- The frontend `user-message` component renders `displayText` when available, falling back to the stored message content + +### Debug Output Toggle + +A new `showDebugOutput` setting in Chat Preferences lets users toggle visibility of debug information, useful for inspecting what the agent actually received versus what the UI displays. + +--- + +## Fine-Tuning Cost Dashboard + +A new admin page provides visibility into SageMaker fine-tuning costs and usage. + +### Admin Cost Endpoint + +`GET /admin/fine-tuning/costs` returns aggregated cost data for fine-tuning jobs, with per-user breakdowns showing training hours consumed and quota utilization. + +### Default Quota Hours + +Fine-tuning access control now supports a default monthly quota for users without explicit grants, configurable via `CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS` in the infrastructure config. + +### Frontend + +A dedicated `/admin/fine-tuning-costs` page displays cost summaries, per-user breakdowns, and usage statistics with period selection. + +### Fine-Tuning Dashboard Polish + +The fine-tuning dashboard also received an informational section explaining the fine-tuning workflow and updated icons for better visual clarity. + +--- + +## Assistant Simplification + +### Archive Removal + +The assistant archive functionality has been removed entirely. The `ARCHIVED` status, `archive_assistant` endpoint, and `include_archived` query parameter are gone. Assistants now have a single delete operation — simpler lifecycle, less code. + +--- + +## Conversation Sharing Fixes + +### Shared Conversation Deletion + +Deleting a session now properly cascades to associated shared conversations. The shares service cleans up all share records when the parent session is deleted, and the frontend session list reflects the deletion state correctly. + +### Message Export Fix + +The share export feature (`POST /shares/{share_id}/export`) was failing to persist messages to AgentCore Memory. Fixed by switching from the deprecated `append_message` API to `create_message` with proper `SessionMessage` wrapping and index-based ordering. + +### UI Improvements + +- Shared conversation header simplified — metadata and export button repositioned for cleaner layout +- Export button moved to a floating action bar at the bottom of the shared view +- Icon updates: share icon replaced with `heroAdjustmentsHorizontal` in session management, `heroChatBubbleLeftRight` in shared view header + +--- + +## Testing Infrastructure + +### Analog.js Migration + +Frontend testing has been migrated to Analog.js tooling (`@analogjs/vite-plugin-angular` and `@analogjs/vitest-angular` v3.0.0-alpha.18). The standalone `vitest.config.ts` has been removed in favor of Analog.js configuration. Analog.js dependencies are pinned to exact versions per the supply chain policy. + +### Property-Based Testing + +`fast-check` has been added as a dev dependency (v4.6.0, exact pin) for property-based testing in the frontend test suite. + +--- + +## Security Vulnerability Patches + +Four Dependabot-flagged vulnerabilities have been patched across all three package ecosystems: + +| Package | Version Change | Severity | Issue | +|---------|---------------|----------|-------| +| `requests` (Python) | 2.32.5 → 2.33.0 | Medium | Insecure temp file reuse in `extract_zipped_paths()` | +| `picomatch` (frontend) | 4.0.3 → 4.0.4 | High / Medium | ReDoS via extglob quantifiers; method injection in POSIX character classes | +| `picomatch` (infrastructure) | 2.3.1 → 2.3.2 | Medium | Method injection in POSIX character classes | +| `diff` (infrastructure) | patched | Low | DoS in `parsePatch` / `applyPatch` | + +Frontend and infrastructure `picomatch` fixes use npm `overrides` to force patched versions through transitive dependency trees (`@angular-devkit/core`, `@angular/build`). + +**Known unfixable:** `yaml@1.10.2` is bundled inside `aws-cdk-lib@2.244.0` (latest) — awaiting an AWS CDK update. `Pygments@2.19.2` (latest) has no patched version yet. + +--- + +## CodeQL Remediation — All Findings Resolved + +Two passes resolved every open CodeQL finding on `develop`, covering 130+ files across Python, TypeScript, and GitHub Actions. + +### Log Injection (180 fixes) + +User-controlled values removed from f-string log statements across the entire backend. All logging now uses `%s`-style parameterized formatting, preventing log injection attacks where user input could forge log entries. + +### Silent Exception Swallowing (5 fixes) + +Empty `except: pass` blocks — a recurring source of hidden bugs — have been eliminated: + +- **`event_formatter.py`** — Errors during final result extraction now log a warning instead of vanishing silently. This was masking streaming failures that were impossible to diagnose. +- **`url_fetcher.py`** — Bare `except:` (catching `BaseException` including `KeyboardInterrupt`) narrowed to `Exception` with an explanatory comment. +- **`code_interpreter_diagram_tool.py`** — Same bare `except:` fix as above. +- **`admin/users/service.py`** — Invalid pagination cursors now log a warning instead of silently resetting to page 1. +- **`tool_result_processor.py`** — `JSONDecodeError` catch annotated with intent comment. + +### Cyclic Import Eliminated + +The circular dependency between `metadata_storage.py` and `dynamodb_storage.py` has been broken by moving the `get_metadata_storage()` factory function to the package `__init__.py`. The dependency graph is now one-directional: + +``` +storage/__init__.py (factory) → dynamodb_storage.py → metadata_storage.py (ABC) +``` + +Three callers updated to import from `apis.app_api.storage` instead of `apis.app_api.storage.metadata_storage`. + +### Other Fixes + +- **Unreachable code** — Dead `if result_seen: break` removed from `stream_processor.py` (`result_seen` was initialized to `False` and never set to `True`) +- **Redundant assignment** — Unused `job =` on `create_inference_job()` call removed in fine-tuning routes +- **Print during import** — `print()` statements in `inference_api/main.py` replaced with `logging` +- **Commented-out code** — Stale `InvocationRequest` class removed from inference API models +- **Unnecessary lambdas** — `lambda v: int(v)` simplified to `int` in fine-tuning repositories +- **13 unused local variables** removed across 10 files +- **3 unused imports** removed (including dead re-exports in `bedrock_embeddings.py`) + +### False Positives Dismissed (11 alerts) + +- 9× `actions/untrusted-checkout` on nightly workflows — these are schedule/dispatch only, never triggered by PRs +- 1× `py/non-iterable-in-for-loop` — iterating over `Enum` members is valid Python +- 1× `py/unused-global-variable` — `_generic_validator_initialized` is used via `global` statement (CodeQL doesn't track this) + +--- + +## RAG Ingestion Fixes + +### Lambda Image Digest Refresh + +Fixed an issue where RAG ingestion Lambda deployments would report "no changes" even after pushing a fresh Docker image. The root cause: CDK resolves the image tag via SSM at synth time, and if the tag hasn't changed (only the underlying layers), CloudFormation sees no diff. The deploy script now explicitly calls `update-function-code` after image push to force a digest refresh, with a wait condition to ensure the update completes. + +### Shared Embeddings Module + +Added the shared embeddings package to the RAG ingestion Lambda Docker image, resolving import errors when `bedrock_embeddings.py` attempted to load re-exported functions from `apis.shared.embeddings`. + +--- + +## CI/CD Improvements + +### PR Workflow Optimization + +CDK synthesis (`synth-cdk`) is now skipped on pull requests in the app-api workflow, matching the existing pattern for Docker builds and deployments. PRs no longer require AWS credentials for the synth step. + +### GitHub Actions Updates + +- `actions/upload-artifact` upgraded from 6.0.0 to 7.0.0 +- `actions/download-artifact` upgraded from 7.0.0 to 8.0.1 +- `actions/setup-node` upgraded from 5.0.0 to 6.3.0 +- `github/codeql-action` upgraded to latest SHA + +--- + +## Dependency Upgrades + +| Component | From | To | +|---|---|---| +| uvicorn | 0.35.0 | 0.42.0 | +| boto3 | 1.42.73 | 1.42.78 | +| strands-agents | 1.32.0 | 1.33.0 | +| strands-agents-tools | 0.2.23 | 0.3.0 | +| aws-opentelemetry-distro | 0.14.2 | 0.16.0 | +| bedrock-agentcore | 1.4.7 | 1.4.8 | +| openai | 2.29.0 | 2.30.0 | +| google-genai | 1.68.0 | 1.69.0 | +| cachetools | 7.0.5 | 6.2.4 (downgraded for aws-opentelemetry-distro compatibility) | +| hypothesis | 6.151.9 | 6.151.10 | +| ruff | 0.15.7 | 0.15.8 | +| Angular packages | 21.2.5 | 21.2.6 | +| @angular/cdk | 21.2.3 | 21.2.4 | +| @angular/build | 21.2.3 | 21.2.5 | +| @angular/cli | 21.2.3 | 21.2.5 | +| ng2-charts | bumped | latest | +| aws-cdk-lib | 2.244.0 | latest | +| constructs | bumped | latest | +| jest / @types/jest | bumped | latest | +| jsdom | bumped | 29.0.1 | + +--- + +## Test Fixes + +- Removed stale `AgentCoreMemorySessionManager` mock patch from session factory tests — the previous CodeQL commit correctly removed the unused import, but the test was still patching it at the old module path +- Updated shared view page spec with expanded test coverage (254 lines rewritten) +- Updated share export tests to match the new `create_message` API + +--- + +## Deployment Notes + +This release includes new backend endpoints and frontend pages but no new infrastructure resources (no new DynamoDB tables or S3 buckets). All changes are backward-compatible. + +- **Backend:** Restart App API and Inference API containers to pick up document deletion, displayText, cost dashboard, and dependency upgrades +- **Frontend:** Rebuild and deploy to pick up Analog.js testing migration, displayText rendering, cost dashboard page, and `picomatch` security patch +- **Infrastructure:** Run `npm install` to pick up `picomatch` and `diff` patches in lockfile. Redeploy if using fine-tuning to pick up the default quota hours config. +- **RAG Ingestion:** Redeploy to pick up the Lambda image digest fix and shared embeddings module + +--- + # Release Notes — v1.0.0-beta.19 **Release Date:** March 25, 2026 diff --git a/VERSION b/VERSION index 3282efda..ec46bbaa 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0.0-beta.19 +1.0.0-beta.20 diff --git a/backend/Dockerfile.rag-ingestion b/backend/Dockerfile.rag-ingestion index d6dfd664..01b56282 100644 --- a/backend/Dockerfile.rag-ingestion +++ b/backend/Dockerfile.rag-ingestion @@ -134,4 +134,11 @@ ENV DOCLING_ARTIFACTS_PATH=/opt/ml/models/docling-artifacts \ # 5. Copy Your Handler Code COPY backend/src/apis/app_api/documents/ingestion/ ${LAMBDA_TASK_ROOT} +# 6. Copy shared embeddings module (required by ingestion/embeddings/bedrock_embeddings.py) +# The ingestion embeddings re-export from apis.shared.embeddings, so we need the +# full apis.shared.embeddings package available in LAMBDA_TASK_ROOT. +COPY backend/src/apis/shared/__init__.py ${LAMBDA_TASK_ROOT}/apis/shared/__init__.py +COPY backend/src/apis/shared/embeddings/ ${LAMBDA_TASK_ROOT}/apis/shared/embeddings/ +RUN touch ${LAMBDA_TASK_ROOT}/apis/__init__.py + CMD [ "handler.lambda_handler" ] diff --git a/backend/lambda-functions/runtime-provisioner/lambda_function.py b/backend/lambda-functions/runtime-provisioner/lambda_function.py index 7efeebff..f82c891f 100644 --- a/backend/lambda-functions/runtime-provisioner/lambda_function.py +++ b/backend/lambda-functions/runtime-provisioner/lambda_function.py @@ -12,7 +12,7 @@ import json import os import logging -from typing import Dict, Any, Optional, List +from typing import Dict, Any, Optional from datetime import datetime import sys diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 137b65b7..646eff15 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "agentcore-stack" -version = "1.0.0-beta.19" +version = "1.0.0-beta.20" requires-python = ">=3.10" description = "Multi-agent conversational AI system with AWS Bedrock AgentCore" readme = "README.md" @@ -14,10 +14,10 @@ license = {text = "MIT"} dependencies = [ # FastAPI and web framework "fastapi==0.135.2", - "uvicorn[standard]==0.35.0", + "uvicorn[standard]==0.42.0", # AWS and cloud services - "boto3==1.42.73", + "boto3==1.42.78", # Utilities "python-dotenv==1.2.2", @@ -29,7 +29,7 @@ dependencies = [ # OAuth provider management "authlib==1.6.9", - "cachetools==7.0.5", + "cachetools==6.2.4", # Security: pin starlette to fix Dependabot alert (transitive via fastapi) "starlette==1.0.0", @@ -38,14 +38,14 @@ dependencies = [ [project.optional-dependencies] # AgentCore-specific dependencies (for inference_api) agentcore = [ - "strands-agents==1.32.0", - "strands-agents-tools==0.2.23", + "strands-agents==1.33.0", + "strands-agents-tools==0.3.0", - "aws-opentelemetry-distro==0.14.2", - "bedrock-agentcore==1.4.7", + "aws-opentelemetry-distro==0.16.0", + "bedrock-agentcore==1.4.8", # Multi-provider LLM support - "openai==2.29.0", # For OpenAI models - "google-genai==1.68.0", # For Google Gemini models + "openai==2.30.0", # For OpenAI models + "google-genai==1.69.0", # For Google Gemini models ] # Document ingestion pipeline dependencies (for Lambda deployment) @@ -59,10 +59,10 @@ dev = [ "pytest==9.0.2", "pytest-asyncio==1.3.0", "pytest-cov==7.1.0", - "hypothesis==6.151.9", + "hypothesis==6.151.10", "moto[dynamodb]==5.1.22", "black==26.3.1", - "ruff==0.15.7", + "ruff==0.15.8", "mypy==1.19.1", "types-aiofiles==25.1.0.20251011", "tiktoken==0.12.0", diff --git a/backend/scripts/seed_auth_provider.py b/backend/scripts/seed_auth_provider.py index ec569e8b..498a9e00 100755 --- a/backend/scripts/seed_auth_provider.py +++ b/backend/scripts/seed_auth_provider.py @@ -50,7 +50,7 @@ import re import sys from datetime import datetime, timezone -from typing import Any, Dict, Optional +from typing import Any, Dict def discover_oidc_endpoints(issuer_url: str) -> Dict[str, Any]: diff --git a/backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py b/backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py index 4793f1a7..d0310ab3 100644 --- a/backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py +++ b/backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py @@ -167,7 +167,6 @@ def generate_diagram_and_validate( "status": "error" } - execution_output = result.get("structuredContent", {}).get("stdout", "") execution_success = True if not execution_success: @@ -224,7 +223,8 @@ def generate_diagram_and_validate( filename = item.get("name", "") if filename: available_files.append(filename) - except: + except Exception: + # Best-effort file listing for error diagnostics; failure is non-critical pass return { diff --git a/backend/src/agents/local_tools/url_fetcher.py b/backend/src/agents/local_tools/url_fetcher.py index 1c4b7b6d..d087081e 100644 --- a/backend/src/agents/local_tools/url_fetcher.py +++ b/backend/src/agents/local_tools/url_fetcher.py @@ -3,9 +3,7 @@ Fetches and extracts text content from web pages """ -import json import logging -from typing import Optional from strands import tool logger = logging.getLogger(__name__) @@ -115,7 +113,8 @@ async def fetch_url_content( title_tag = soup.find('title') if title_tag: title = title_tag.get_text().strip() - except: + except Exception: + # Title extraction is best-effort; fall back to default pass # Extract text diff --git a/backend/src/agents/main_agent/core/model_config.py b/backend/src/agents/main_agent/core/model_config.py index 7313ed23..ac6a6d34 100644 --- a/backend/src/agents/main_agent/core/model_config.py +++ b/backend/src/agents/main_agent/core/model_config.py @@ -2,8 +2,8 @@ Model configuration for multi-provider LLM support (Bedrock, OpenAI, Gemini) """ import os -from typing import Dict, Any, Optional, Literal -from dataclasses import dataclass, field +from typing import Dict, Any, Optional +from dataclasses import dataclass from enum import Enum diff --git a/backend/src/agents/main_agent/integrations/external_mcp_client.py b/backend/src/agents/main_agent/integrations/external_mcp_client.py index 161f8721..403bc9ea 100644 --- a/backend/src/agents/main_agent/integrations/external_mcp_client.py +++ b/backend/src/agents/main_agent/integrations/external_mcp_client.py @@ -12,7 +12,7 @@ import logging import re -from typing import Optional, List, Any, Callable +from typing import Optional, List, Any from mcp.client.streamable_http import streamablehttp_client from strands.tools.mcp import MCPClient @@ -25,7 +25,6 @@ ) from agents.main_agent.integrations.gateway_auth import get_sigv4_auth from agents.main_agent.integrations.oauth_auth import ( - OAuthBearerAuth, CompositeAuth, create_oauth_bearer_auth, ) @@ -243,7 +242,7 @@ async def _get_oauth_token( token = await oauth_service.get_decrypted_token(user_id, provider_id) return token except Exception as e: - logger.error(f"Error getting OAuth token for user {user_id}, provider {provider_id}: {e}") + logger.error("Error getting OAuth token") return None async def load_external_tools( @@ -329,8 +328,7 @@ async def load_external_tools( if not token_to_use: logger.warning( - f"User {user_id} not connected to OAuth provider " - f"'{tool.requires_oauth_provider}' for tool {tool_id}" + "User not connected to required OAuth provider for tool" ) # Still create the client - it will fail gracefully when used # The MCP server should return an appropriate error diff --git a/backend/src/agents/main_agent/integrations/oauth_auth.py b/backend/src/agents/main_agent/integrations/oauth_auth.py index bd22dd09..9d7bc025 100644 --- a/backend/src/agents/main_agent/integrations/oauth_auth.py +++ b/backend/src/agents/main_agent/integrations/oauth_auth.py @@ -6,7 +6,7 @@ """ import logging -from typing import Generator, Optional, Callable, Awaitable +from typing import Generator, Optional, Callable import httpx diff --git a/backend/src/agents/main_agent/main_agent.py b/backend/src/agents/main_agent/main_agent.py index 32d576ba..7648716c 100644 --- a/backend/src/agents/main_agent/main_agent.py +++ b/backend/src/agents/main_agent/main_agent.py @@ -269,7 +269,7 @@ def _create_hooks(self) -> List: return hooks async def stream_async( - self, message: str, session_id: Optional[str] = None, files: Optional[List] = None, citations: Optional[List] = None + self, message: str, session_id: Optional[str] = None, files: Optional[List] = None, citations: Optional[List] = None, original_message: Optional[str] = None ) -> AsyncGenerator[str, None]: """ Stream agent responses @@ -279,6 +279,7 @@ async def stream_async( session_id: Session identifier (defaults to instance session_id) files: Optional list of FileContent objects (with base64 bytes) citations: Optional list of citation dicts from RAG retrieval + original_message: Original user message before RAG augmentation (for clean UI display) Yields: str: SSE formatted events @@ -299,6 +300,7 @@ async def stream_async( user_id=self.user_id, main_agent_wrapper=self, # Pass wrapper for metadata extraction citations=citations, # Pass citations for storage + original_message=original_message, # Pass original message for display text ): yield event diff --git a/backend/src/agents/main_agent/quota/checker.py b/backend/src/agents/main_agent/quota/checker.py index 05671c59..e1451e7e 100644 --- a/backend/src/agents/main_agent/quota/checker.py +++ b/backend/src/agents/main_agent/quota/checker.py @@ -5,7 +5,7 @@ import logging from apis.shared.auth.models import User from apis.app_api.costs.aggregator import CostAggregator -from .models import QuotaTier, QuotaCheckResult +from .models import QuotaCheckResult from .resolver import QuotaResolver from .event_recorder import QuotaEventRecorder diff --git a/backend/src/agents/main_agent/quota/models.py b/backend/src/agents/main_agent/quota/models.py index 321b8a66..614828d5 100644 --- a/backend/src/agents/main_agent/quota/models.py +++ b/backend/src/agents/main_agent/quota/models.py @@ -1,6 +1,6 @@ """Core domain models for quota management system.""" -from pydantic import BaseModel, Field, ConfigDict, field_validator, model_serializer, model_validator +from pydantic import BaseModel, Field, ConfigDict, field_validator from typing import Optional, Literal, Dict, Any from enum import Enum from decimal import Decimal diff --git a/backend/src/agents/main_agent/quota/repository.py b/backend/src/agents/main_agent/quota/repository.py index d0480878..7f7ee96c 100644 --- a/backend/src/agents/main_agent/quota/repository.py +++ b/backend/src/agents/main_agent/quota/repository.py @@ -5,7 +5,6 @@ import boto3 from botocore.exceptions import ClientError import logging -import uuid import os from .models import QuotaTier, QuotaAssignment, QuotaEvent, QuotaAssignmentType, QuotaOverride diff --git a/backend/src/agents/main_agent/session/compaction_models.py b/backend/src/agents/main_agent/session/compaction_models.py index 35fd018c..e11346ca 100644 --- a/backend/src/agents/main_agent/session/compaction_models.py +++ b/backend/src/agents/main_agent/session/compaction_models.py @@ -5,8 +5,7 @@ compaction, which helps manage token usage in long conversations. """ -from dataclasses import dataclass, field -from datetime import datetime, timezone +from dataclasses import dataclass from typing import Optional, Dict, Any import os diff --git a/backend/src/agents/main_agent/session/preview_session_manager.py b/backend/src/agents/main_agent/session/preview_session_manager.py index d3be8721..22bd4402 100644 --- a/backend/src/agents/main_agent/session/preview_session_manager.py +++ b/backend/src/agents/main_agent/session/preview_session_manager.py @@ -9,8 +9,7 @@ """ import logging -from typing import List, Optional, Any -from strands.types.content import Message +from typing import List, Any from strands.types.session import SessionMessage logger = logging.getLogger(__name__) @@ -55,9 +54,9 @@ def __init__(self, session_id: str, user_id: str): self._messages: List[SessionMessage] = [] self._message_index = 0 - logger.info(f"🔍 Preview session manager initialized: {session_id}") - logger.info(f" • In-memory storage only (no persistence)") - logger.info(f" • Multi-turn context: Enabled") + logger.info("🔍 Preview session manager initialized") + logger.info(" • In-memory storage only (no persistence)") + logger.info(" • Multi-turn context: Enabled") def read_session(self, session_id: str, window_id: str = "default") -> List[SessionMessage]: """ diff --git a/backend/src/agents/main_agent/session/session_factory.py b/backend/src/agents/main_agent/session/session_factory.py index 94d1b9b0..eca7c4ba 100644 --- a/backend/src/agents/main_agent/session/session_factory.py +++ b/backend/src/agents/main_agent/session/session_factory.py @@ -18,7 +18,6 @@ # AgentCore Memory integration (optional, only for cloud deployment) try: from bedrock_agentcore.memory.integrations.strands.config import AgentCoreMemoryConfig, RetrievalConfig - from bedrock_agentcore.memory.integrations.strands.session_manager import AgentCoreMemorySessionManager from bedrock_agentcore.memory import MemoryClient AGENTCORE_MEMORY_AVAILABLE = True except ImportError: @@ -100,7 +99,7 @@ def create_session_manager( """ # Check for preview session first - these use in-memory storage only if is_preview_session(session_id): - logger.info(f"🔍 Preview session detected: {session_id}") + logger.info("🔍 Preview session detected") return PreviewSessionManager(session_id=session_id, user_id=user_id) if not AGENTCORE_MEMORY_AVAILABLE: @@ -228,15 +227,14 @@ def _create_cloud_session_manager( summarization_strategy_id=summary_id, ) - logger.info(f"✅ AgentCore Memory initialized: user_id={user_id}") - logger.info(f" • Session: {session_id}, User: {user_id}") - logger.info(f" • Storage: AWS-managed DynamoDB") - logger.info(f" • Short-term memory: Conversation history (90 days retention)") - logger.info(f" • Long-term memory: {'Enabled' if retrieval_config else 'Disabled'} ({len(retrieval_config)} namespaces)") + logger.info("✅ AgentCore Memory initialized") + logger.info(" • Storage: AWS-managed DynamoDB") + logger.info(" • Short-term memory: Conversation history (90 days retention)") + logger.info(" • Long-term memory: %s (%d namespaces)", "Enabled" if retrieval_config else "Disabled", len(retrieval_config)) if compaction_config.enabled: - logger.info(f" • Compaction: Enabled (threshold={compaction_config.token_threshold:,})") + logger.info(" • Compaction: Enabled (threshold=%s)", f"{compaction_config.token_threshold:,}") else: - logger.info(f" • Compaction: Disabled") + logger.info(" • Compaction: Disabled") return session_manager diff --git a/backend/src/agents/main_agent/session/tests/test_compaction.py b/backend/src/agents/main_agent/session/tests/test_compaction.py index ba1e7941..99e9d679 100644 --- a/backend/src/agents/main_agent/session/tests/test_compaction.py +++ b/backend/src/agents/main_agent/session/tests/test_compaction.py @@ -8,7 +8,6 @@ import pytest import copy from agents.main_agent.session.compaction_models import CompactionState, CompactionConfig -from agents.main_agent.session.turn_based_session_manager import TurnBasedSessionManager class TestCompactionState: diff --git a/backend/src/agents/main_agent/session/tests/test_compaction_integration.py b/backend/src/agents/main_agent/session/tests/test_compaction_integration.py index 76c9737f..c99dc918 100644 --- a/backend/src/agents/main_agent/session/tests/test_compaction_integration.py +++ b/backend/src/agents/main_agent/session/tests/test_compaction_integration.py @@ -22,7 +22,6 @@ import uuid import asyncio import logging -from datetime import datetime, timezone # Configure logging logging.basicConfig( diff --git a/backend/src/agents/main_agent/streaming/event_formatter.py b/backend/src/agents/main_agent/streaming/event_formatter.py index e64a2d0b..c096ecc4 100644 --- a/backend/src/agents/main_agent/streaming/event_formatter.py +++ b/backend/src/agents/main_agent/streaming/event_formatter.py @@ -1,7 +1,10 @@ import json +import logging from typing import Dict, Any, List, Tuple from .tool_result_processor import ToolResultProcessor +logger = logging.getLogger(__name__) + class StreamEventFormatter: """Handles formatting of streaming events for SSE""" @@ -41,7 +44,7 @@ def extract_final_result_data(final_result) -> Tuple[List[Dict[str, str]], str]: result_text = " ".join(text_parts) except Exception as e: - pass + logger.warning("Failed to extract final result data: %s", e) return images, result_text diff --git a/backend/src/agents/main_agent/streaming/stream_coordinator.py b/backend/src/agents/main_agent/streaming/stream_coordinator.py index 7fa6b860..a45fe181 100644 --- a/backend/src/agents/main_agent/streaming/stream_coordinator.py +++ b/backend/src/agents/main_agent/streaming/stream_coordinator.py @@ -10,7 +10,7 @@ from datetime import datetime, timezone from typing import Any, AsyncGenerator, Dict, List, Optional, Union -from apis.shared.errors import ConversationalErrorEvent, ErrorCode, StreamErrorEvent, build_conversational_error_event +from apis.shared.errors import ErrorCode, StreamErrorEvent, build_conversational_error_event from .stream_processor import process_agent_stream @@ -38,6 +38,7 @@ async def stream_response( user_id: str, main_agent_wrapper: Any = None, citations: Optional[List] = None, + original_message: Optional[str] = None, ) -> AsyncGenerator[str, None]: """ Stream agent responses with proper lifecycle management @@ -53,6 +54,7 @@ async def stream_response( user_id: User identifier main_agent_wrapper: MainAgent wrapper instance (has model_config, enabled_tools, etc.) citations: Optional list of citation dicts from RAG retrieval to persist with metadata + original_message: Original user message before RAG augmentation (for clean UI display) Yields: str: SSE formatted events @@ -446,6 +448,21 @@ async def stream_response( logger.info(f"✅ Message metadata stored for {len(message_ids_to_store)} assistant messages (parallel)") + # Store displayText for user message if original_message differs from augmented + if original_message: + user_message_index = initial_message_count # User message is first in this turn + try: + from apis.shared.sessions.metadata import store_user_display_text + await store_user_display_text( + session_id=session_id, + user_id=user_id, + message_id=user_message_index, + display_text=original_message, + ) + logger.info(f"💾 Stored displayText for user message {user_message_index}") + except Exception as e: + logger.error(f"Failed to store user displayText: {e}", exc_info=True) + # Update compaction state if session manager supports it # This tracks input token usage and triggers compaction when threshold exceeded if hasattr(session_manager, "update_after_turn"): diff --git a/backend/src/agents/main_agent/streaming/stream_processor.py b/backend/src/agents/main_agent/streaming/stream_processor.py index 0525ff7d..c3491d10 100644 --- a/backend/src/agents/main_agent/streaming/stream_processor.py +++ b/backend/src/agents/main_agent/streaming/stream_processor.py @@ -1288,11 +1288,11 @@ async def mock_stream(): # Yield the metadata event yield processed_event - # If we've seen result, it's safe to break - # Otherwise, continue to next iteration to catch result event - if result_seen: - break - # Don't break yet - continue to catch result event in next iteration + # TODO: result_seen is never set to True — this break is currently dead code. + # When result-event tracking is implemented, set result_seen = True on result events + # and uncomment the break to exit early once both complete + result are seen. + # if result_seen: + # break # STEP 3: Process lifecycle events # NOTE: We process lifecycle events to capture the 'result' event which contains metrics diff --git a/backend/src/agents/main_agent/streaming/tool_result_processor.py b/backend/src/agents/main_agent/streaming/tool_result_processor.py index 7699e112..3060fc7b 100644 --- a/backend/src/agents/main_agent/streaming/tool_result_processor.py +++ b/backend/src/agents/main_agent/streaming/tool_result_processor.py @@ -92,6 +92,7 @@ def _extract_basic_content(tool_result: Dict[str, Any]) -> Tuple[str, List[Dict[ tool_result = tool_result.copy() tool_result["content"] = parsed_content except json.JSONDecodeError: + # Content is not valid JSON; leave it as a plain string pass if "content" in tool_result: @@ -292,8 +293,6 @@ def _handle_python_mcp_base64( # Pattern to match Base64 data URLs with optional filename attribute base64_pattern = r'data:([^;]+);base64,([A-Za-z0-9+/=\s]+?)' - matches = re.findall(base64_pattern, result_text) - def process_base64_match(match): custom_filename = match.group(1) # May be None if not provided mime_type = match.group(2) diff --git a/backend/src/agents/main_agent/tools/oauth_tool_service.py b/backend/src/agents/main_agent/tools/oauth_tool_service.py index 0e6e5ab5..f3abdb8b 100644 --- a/backend/src/agents/main_agent/tools/oauth_tool_service.py +++ b/backend/src/agents/main_agent/tools/oauth_tool_service.py @@ -34,7 +34,6 @@ async def my_oauth_tool(query: str, tool_context: ToolContext) -> dict: import os from dataclasses import dataclass from typing import Optional -from urllib.parse import urlencode logger = logging.getLogger(__name__) diff --git a/backend/src/agents/main_agent/tools/tool_catalog.py b/backend/src/agents/main_agent/tools/tool_catalog.py index c0a220c0..2057f1d8 100644 --- a/backend/src/agents/main_agent/tools/tool_catalog.py +++ b/backend/src/agents/main_agent/tools/tool_catalog.py @@ -4,7 +4,7 @@ Provides tool metadata for authorization, UI display, and discovery. Tools are identified by their function name (tool_id). """ -from dataclasses import dataclass, field +from dataclasses import dataclass from typing import List, Dict, Optional from enum import Enum diff --git a/backend/src/agents/main_agent/utils/timezone.py b/backend/src/agents/main_agent/utils/timezone.py index 618a6ef4..9b6ef363 100644 --- a/backend/src/agents/main_agent/utils/timezone.py +++ b/backend/src/agents/main_agent/utils/timezone.py @@ -6,16 +6,17 @@ logger = logging.getLogger(__name__) -# Import timezone support (zoneinfo for Python 3.9+, fallback to pytz) +# Check timezone support availability (zoneinfo for Python 3.9+, fallback to pytz) try: - from zoneinfo import ZoneInfo + from zoneinfo import ZoneInfo # noqa: F401 TIMEZONE_AVAILABLE = True except ImportError: try: - import pytz - TIMEZONE_AVAILABLE = True - except ImportError: + from importlib.util import find_spec + TIMEZONE_AVAILABLE = find_spec("pytz") is not None + except Exception: TIMEZONE_AVAILABLE = False + if not TIMEZONE_AVAILABLE: logger.warning("Neither zoneinfo nor pytz available - date will use UTC") diff --git a/backend/src/agents/utils/config.py b/backend/src/agents/utils/config.py index 94e0ff24..8a7497c0 100644 --- a/backend/src/agents/utils/config.py +++ b/backend/src/agents/utils/config.py @@ -1,7 +1,6 @@ """ Configuration management for AgentCore """ -import os from pathlib import Path diff --git a/backend/src/apis/app_api/admin/auth_providers/routes.py b/backend/src/apis/app_api/admin/auth_providers/routes.py index 9b70bffb..c0e4aec6 100644 --- a/backend/src/apis/app_api/admin/auth_providers/routes.py +++ b/backend/src/apis/app_api/admin/auth_providers/routes.py @@ -5,7 +5,6 @@ """ import logging -from typing import Optional from fastapi import APIRouter, Depends, HTTPException, Query, status @@ -36,7 +35,7 @@ async def list_auth_providers( admin_user: User = Depends(require_system_admin), ) -> AuthProviderListResponse: """List all configured OIDC authentication providers.""" - logger.info(f"Admin {admin_user.email} listing auth providers (enabled_only={enabled_only})") + logger.info("Admin listing auth providers") service = get_auth_provider_service() providers = await service.list_providers(enabled_only=enabled_only) @@ -64,7 +63,7 @@ async def get_runtime_image_tag( import boto3 from botocore.exceptions import ClientError - logger.info(f"Admin {admin_user.email} requesting runtime image tag") + logger.info("Admin requesting runtime image tag") project_prefix = os.environ.get("PROJECT_PREFIX", "agentcore") param_name = f"/{project_prefix}/inference-api/image-tag" @@ -105,7 +104,7 @@ async def discover_oidc_endpoints( Fetches the .well-known/openid-configuration document and returns the discovered endpoints, supported scopes, and claims. """ - logger.info(f"Admin {admin_user.email} discovering OIDC endpoints for: {request.issuer_url}") + logger.info("Admin discovering OIDC endpoints") service = get_auth_provider_service() return await service.discover_endpoints(request.issuer_url) @@ -121,7 +120,7 @@ async def get_auth_provider( admin_user: User = Depends(require_system_admin), ) -> AuthProviderResponse: """Get a specific authentication provider by ID.""" - logger.info(f"Admin {admin_user.email} requesting auth provider: {provider_id}") + logger.info("Admin requesting auth provider") service = get_auth_provider_service() provider = await service.get_provider(provider_id) @@ -151,7 +150,7 @@ async def create_auth_provider( If endpoints are not provided, they will be auto-discovered from the issuer URL's .well-known/openid-configuration endpoint. """ - logger.info(f"Admin {admin_user.email} creating auth provider: {data.provider_id}") + logger.info("Admin creating auth provider") try: service = get_auth_provider_service() @@ -180,7 +179,7 @@ async def update_auth_provider( Only provided fields are updated. If issuer_url is changed, endpoints are re-discovered automatically. """ - logger.info(f"Admin {admin_user.email} updating auth provider: {provider_id}") + logger.info("Admin updating auth provider") try: service = get_auth_provider_service() @@ -210,7 +209,7 @@ async def delete_auth_provider( admin_user: User = Depends(require_system_admin), ) -> None: """Delete an authentication provider and its client secret.""" - logger.info(f"Admin {admin_user.email} deleting auth provider: {provider_id}") + logger.info("Admin deleting auth provider") service = get_auth_provider_service() deleted = await service.delete_provider(provider_id) @@ -237,7 +236,7 @@ async def discover_oidc_endpoints( Fetches the .well-known/openid-configuration document and returns the discovered endpoints, supported scopes, and claims. """ - logger.info(f"Admin {admin_user.email} discovering OIDC endpoints for: {request.issuer_url}") + logger.info("Admin discovering OIDC endpoints") service = get_auth_provider_service() return await service.discover_endpoints(request.issuer_url) @@ -255,7 +254,7 @@ async def test_auth_provider( Test provider connectivity by verifying JWKS, discovery, and token endpoints are reachable. """ - logger.info(f"Admin {admin_user.email} testing auth provider: {provider_id}") + logger.info("Admin testing auth provider") service = get_auth_provider_service() return await service.test_provider(provider_id) diff --git a/backend/src/apis/app_api/admin/costs/routes.py b/backend/src/apis/app_api/admin/costs/routes.py index cdd61c04..8c451751 100644 --- a/backend/src/apis/app_api/admin/costs/routes.py +++ b/backend/src/apis/app_api/admin/costs/routes.py @@ -6,7 +6,7 @@ All endpoints require admin authentication via JWT token with Admin or SuperAdmin role. """ -from fastapi import APIRouter, Depends, Query, HTTPException, status +from fastapi import APIRouter, Depends, Query, HTTPException from fastapi.responses import StreamingResponse from typing import Optional, List import logging @@ -90,7 +90,7 @@ async def get_cost_dashboard( - 500 if server error """ logger.info( - f"Admin {admin_user.email} requesting cost dashboard for period={period}" + "Admin requesting cost dashboard" ) try: @@ -155,8 +155,7 @@ async def get_top_users( - 500 if server error """ logger.info( - f"Admin {admin_user.email} requesting top {limit} users " - f"for period={period}, min_cost={min_cost}" + "Admin requesting top users" ) try: @@ -211,8 +210,7 @@ async def get_system_summary( - 500 if server error """ logger.info( - f"Admin {admin_user.email} requesting system summary " - f"for {period_type} period={period}" + "Admin requesting system summary" ) try: @@ -260,7 +258,7 @@ async def get_usage_by_model( - 500 if server error """ logger.info( - f"Admin {admin_user.email} requesting model usage for period={period}" + "Admin requesting model usage" ) try: @@ -304,7 +302,7 @@ async def get_usage_by_tier( - 500 if server error """ logger.info( - f"Admin {admin_user.email} requesting tier usage for period={period}" + "Admin requesting tier usage" ) try: @@ -357,8 +355,7 @@ async def get_cost_trends( - 500 if server error """ logger.info( - f"Admin {admin_user.email} requesting trends " - f"from {start_date} to {end_date}" + "Admin requesting cost trends" ) try: @@ -413,7 +410,7 @@ async def export_cost_data( - 500 if server error """ logger.info( - f"Admin {admin_user.email} exporting {format} data for period={period}" + "Admin exporting cost data" ) try: diff --git a/backend/src/apis/app_api/admin/costs/service.py b/backend/src/apis/app_api/admin/costs/service.py index e24dff8c..b6f37cb9 100644 --- a/backend/src/apis/app_api/admin/costs/service.py +++ b/backend/src/apis/app_api/admin/costs/service.py @@ -91,7 +91,7 @@ async def get_top_users( List of TopUserCost sorted by cost descending. """ period = period or self._get_current_period() - logger.info(f"Getting top {limit} users by cost for period {period}") + logger.info("Getting top users by cost for period") try: users_data = await self.storage.get_top_users_by_cost( @@ -114,7 +114,7 @@ async def get_top_users( quota_percentage=None )) - logger.info(f"Retrieved {len(result)} top users for period {period}") + logger.info("Retrieved top users for period") return result except Exception as e: @@ -144,7 +144,7 @@ async def get_system_summary( else: period = period or self._get_current_period() - logger.info(f"Getting system summary for {period_type} period {period}") + logger.info("Getting system summary for period") try: summary_data = await self.storage.get_system_summary( @@ -154,7 +154,7 @@ async def get_system_summary( if not summary_data: # Return empty summary if no data exists - logger.warning(f"No system summary found for {period}") + logger.warning("No system summary found for period") return SystemCostSummary( period=period, period_type=period_type, @@ -201,7 +201,7 @@ async def get_usage_by_model( List of ModelUsageSummary sorted by cost descending. """ period = period or self._get_current_period() - logger.info(f"Getting model usage for period {period}") + logger.info("Getting model usage for period") try: model_data = await self.storage.get_model_usage(period=period) @@ -248,8 +248,8 @@ async def get_usage_by_tier( Returns: List of TierUsageSummary (currently empty, placeholder). """ - period = period or self._get_current_period() - logger.info(f"Getting tier usage for period {period} (placeholder)") + _period = period or self._get_current_period() # TODO: use once tier aggregation is implemented + logger.info("Getting tier usage for period") # TODO: Implement tier usage aggregation # This requires: @@ -277,7 +277,7 @@ async def get_daily_trends( Returns: List of CostTrend sorted by date ascending. """ - logger.info(f"Getting daily trends from {start_date} to {end_date}") + logger.info("Getting daily trends for date range") # Validate date range (max 90 days) try: @@ -338,8 +338,7 @@ async def get_dashboard( """ period = period or self._get_current_period() logger.info( - f"Building admin cost dashboard for period {period} " - f"(top_users={top_users_limit}, include_trends={include_trends})" + "Building admin cost dashboard for period" ) # Get system summary diff --git a/backend/src/apis/app_api/admin/fine_tuning/models.py b/backend/src/apis/app_api/admin/fine_tuning/models.py index 599e5d96..ccca8511 100644 --- a/backend/src/apis/app_api/admin/fine_tuning/models.py +++ b/backend/src/apis/app_api/admin/fine_tuning/models.py @@ -20,3 +20,26 @@ class AccessListResponse(BaseModel): """Response for listing all access grants.""" grants: List[FineTuningAccessGrant] total_count: int + + +# ========== Cost Dashboard Models ========== + + +class UserCostBreakdown(BaseModel): + """Per-user cost breakdown for a billing period.""" + email: str + total_cost_usd: float + total_gpu_hours: float + training_job_count: int + inference_job_count: int + + +class FineTuningCostDashboard(BaseModel): + """Aggregated cost dashboard for admin fine-tuning cost view.""" + period: str = Field(description="YYYY-MM billing period") + total_cost_usd: float + total_gpu_hours: float + active_user_count: int + training_job_count: int + inference_job_count: int + users: List[UserCostBreakdown] diff --git a/backend/src/apis/app_api/admin/fine_tuning/routes.py b/backend/src/apis/app_api/admin/fine_tuning/routes.py index dbedfdf6..33551762 100644 --- a/backend/src/apis/app_api/admin/fine_tuning/routes.py +++ b/backend/src/apis/app_api/admin/fine_tuning/routes.py @@ -1,5 +1,7 @@ """Admin API routes for fine-tuning access management.""" +from collections import defaultdict +from datetime import datetime, timezone from typing import Optional from fastapi import APIRouter, Depends, HTTPException, Query, status import logging @@ -23,7 +25,13 @@ InferenceJobResponse, InferenceJobListResponse, ) -from .models import GrantAccessRequest, UpdateQuotaRequest, AccessListResponse +from .models import ( + GrantAccessRequest, + UpdateQuotaRequest, + AccessListResponse, + UserCostBreakdown, + FineTuningCostDashboard, +) logger = logging.getLogger(__name__) @@ -52,7 +60,7 @@ async def list_access( repo: FineTuningAccessRepository = Depends(get_repository), ): """List all users with fine-tuning access (admin only).""" - logger.info(f"Admin {admin_user.email} listing fine-tuning access grants") + logger.info("Admin listing fine-tuning access grants") try: grants = repo.list_access() @@ -72,7 +80,7 @@ async def grant_access( repo: FineTuningAccessRepository = Depends(get_repository), ): """Grant fine-tuning access to a user by email (admin only).""" - logger.info(f"Admin {admin_user.email} granting fine-tuning access to {request.email}") + logger.info("Admin granting fine-tuning access") try: grant = repo.grant_access( @@ -95,7 +103,7 @@ async def get_access( repo: FineTuningAccessRepository = Depends(get_repository), ): """Get fine-tuning access info for a specific user (admin only).""" - logger.info(f"Admin {admin_user.email} getting fine-tuning access for {email}") + logger.info("Admin getting fine-tuning access") grant = repo.get_access(email) if not grant: @@ -114,10 +122,7 @@ async def update_quota( repo: FineTuningAccessRepository = Depends(get_repository), ): """Update GPU-hour quota for a user (admin only).""" - logger.info( - f"Admin {admin_user.email} updating quota for {email} " - f"to {request.monthly_quota_hours} hours" - ) + logger.info("Admin updating fine-tuning quota") try: result = repo.update_quota(email, request.monthly_quota_hours) @@ -141,7 +146,7 @@ async def revoke_access( repo: FineTuningAccessRepository = Depends(get_repository), ): """Revoke fine-tuning access for a user (admin only).""" - logger.info(f"Admin {admin_user.email} revoking fine-tuning access for {email}") + logger.info("Admin revoking fine-tuning access") try: success = repo.revoke_access(email) @@ -166,7 +171,7 @@ async def list_all_jobs( jobs_repo: FineTuningJobsRepository = Depends(get_jobs_repository), ): """List all fine-tuning jobs across all users (admin only).""" - logger.info(f"Admin {admin_user.email} listing all fine-tuning jobs (status={status_filter})") + logger.info("Admin listing all fine-tuning jobs") try: jobs = jobs_repo.list_all_jobs(status_filter=status_filter) @@ -188,7 +193,7 @@ async def list_all_inference_jobs( inf_repo: InferenceRepository = Depends(get_inf_repository), ): """List all inference jobs across all users (admin only).""" - logger.info(f"Admin {admin_user.email} listing all inference jobs (status={status_filter})") + logger.info("Admin listing all inference jobs") try: jobs = inf_repo.list_all_inference_jobs(status_filter=status_filter) @@ -199,3 +204,110 @@ async def list_all_inference_jobs( except Exception as e: logger.error(f"Error listing all inference jobs: {e}") raise HTTPException(status_code=500, detail="Internal server error") + + +# ========== Cost Dashboard ========== + +def _date_range_for_period(period: str) -> tuple[str, str]: + """Return (start_iso, end_iso) for a YYYY-MM period string.""" + year, month = int(period[:4]), int(period[5:7]) + start = datetime(year, month, 1, tzinfo=timezone.utc) + if month == 12: + end = datetime(year + 1, 1, 1, tzinfo=timezone.utc) + else: + end = datetime(year, month + 1, 1, tzinfo=timezone.utc) + return start.isoformat(), end.isoformat() + + +@router.get("/costs", response_model=FineTuningCostDashboard) +async def get_cost_dashboard( + month: Optional[str] = Query( + None, + description="Billing period in YYYY-MM format. Defaults to current month.", + regex=r"^\d{4}-\d{2}$", + ), + admin_user: User = Depends(require_admin), + jobs_repo: FineTuningJobsRepository = Depends(get_jobs_repository), + inf_repo: InferenceRepository = Depends(get_inf_repository), +): + """Get aggregated fine-tuning cost dashboard for a billing period. + + Queries the StatusIndex GSI for Completed and Stopped jobs within + the requested month, then aggregates costs by user in application code. + """ + period = month or datetime.now(timezone.utc).strftime("%Y-%m") + safe_period = period.replace("\n", "").replace("\r", "") + logger.info("Admin requesting fine-tuning cost dashboard for %s", safe_period) + + try: + start_iso, end_iso = _date_range_for_period(period) + + # Query training jobs (Completed + Stopped) via StatusIndex GSI + training_completed = jobs_repo.query_jobs_by_status_and_date("Completed", start_iso, end_iso) + training_stopped = jobs_repo.query_jobs_by_status_and_date("Stopped", start_iso, end_iso) + all_training = training_completed + training_stopped + + # Query inference jobs (Completed + Stopped) via StatusIndex GSI + inf_completed = inf_repo.query_jobs_by_status_and_date("Completed", start_iso, end_iso) + inf_stopped = inf_repo.query_jobs_by_status_and_date("Stopped", start_iso, end_iso) + all_inference = inf_completed + inf_stopped + + # Aggregate by user email + user_data: dict[str, dict] = defaultdict( + lambda: { + "total_cost_usd": 0.0, + "total_gpu_hours": 0.0, + "training_job_count": 0, + "inference_job_count": 0, + } + ) + + for job in all_training: + email = job.get("email", "unknown") + cost = job.get("estimated_cost_usd") or 0.0 + billable = job.get("billable_seconds") or 0 + user_data[email]["total_cost_usd"] += cost + user_data[email]["total_gpu_hours"] += billable / 3600 + user_data[email]["training_job_count"] += 1 + + for job in all_inference: + email = job.get("email", "unknown") + cost = job.get("estimated_cost_usd") or 0.0 + billable = job.get("billable_seconds") or 0 + user_data[email]["total_cost_usd"] += cost + user_data[email]["total_gpu_hours"] += billable / 3600 + user_data[email]["inference_job_count"] += 1 + + # Build per-user breakdowns sorted by cost descending + users = sorted( + [ + UserCostBreakdown( + email=email, + total_cost_usd=round(data["total_cost_usd"], 4), + total_gpu_hours=round(data["total_gpu_hours"], 2), + training_job_count=data["training_job_count"], + inference_job_count=data["inference_job_count"], + ) + for email, data in user_data.items() + ], + key=lambda u: u.total_cost_usd, + reverse=True, + ) + + total_cost = sum(u.total_cost_usd for u in users) + total_hours = sum(u.total_gpu_hours for u in users) + total_training = sum(u.training_job_count for u in users) + total_inference = sum(u.inference_job_count for u in users) + + return FineTuningCostDashboard( + period=period, + total_cost_usd=round(total_cost, 4), + total_gpu_hours=round(total_hours, 2), + active_user_count=len(users), + training_job_count=total_training, + inference_job_count=total_inference, + users=users, + ) + except Exception as e: + logger.error(f"Error building fine-tuning cost dashboard: {e}") + raise HTTPException(status_code=500, detail="Internal server error") diff --git a/backend/src/apis/app_api/admin/oauth/routes.py b/backend/src/apis/app_api/admin/oauth/routes.py index 4e6d3b2c..e5dee9b1 100644 --- a/backend/src/apis/app_api/admin/oauth/routes.py +++ b/backend/src/apis/app_api/admin/oauth/routes.py @@ -49,7 +49,7 @@ async def list_providers( Returns: OAuthProviderListResponse with all providers """ - logger.info(f"Admin {admin.email} listing OAuth providers") + logger.info("Admin listing OAuth providers") providers = await provider_repo.list_providers(enabled_only=enabled_only) @@ -80,7 +80,7 @@ async def get_provider( Raises: HTTPException: 404 if provider not found """ - logger.info(f"Admin {admin.email} getting OAuth provider: {provider_id}") + logger.info("Admin getting OAuth provider") provider = await provider_repo.get_provider(provider_id) @@ -114,7 +114,7 @@ async def create_provider( Raises: HTTPException: 400 if provider already exists or validation fails """ - logger.info(f"Admin {admin.email} creating OAuth provider: {provider_data.provider_id}") + logger.info("Admin creating OAuth provider") try: provider = await provider_repo.create_provider(provider_data) @@ -156,7 +156,7 @@ async def update_provider( - 400 if validation fails - 404 if provider not found """ - logger.info(f"Admin {admin.email} updating OAuth provider: {provider_id}") + logger.info("Admin updating OAuth provider") # Track if scopes changed (will invalidate cached tokens) old_provider = await provider_repo.get_provider(provider_id) @@ -179,7 +179,7 @@ async def update_provider( cache = get_token_cache() evicted = cache.delete_for_provider(provider_id) logger.info( - f"Scopes changed for provider {provider_id}, " + "Scopes changed for provider, " f"evicted {evicted} cached tokens" ) @@ -215,7 +215,7 @@ async def delete_provider( - 400 if users are connected and force=False - 404 if provider not found """ - logger.info(f"Admin {admin.email} deleting OAuth provider: {provider_id}") + logger.info("Admin deleting OAuth provider") # Check for connected users connected_tokens = await token_repo.list_provider_tokens(provider_id) @@ -232,7 +232,7 @@ async def delete_provider( # Delete user tokens if any if connected_tokens: deleted_count = await token_repo.delete_provider_tokens(provider_id) - logger.info(f"Deleted {deleted_count} user tokens for provider {provider_id}") + logger.info(f"Deleted {deleted_count} user tokens for provider") # Delete provider deleted = await provider_repo.delete_provider(provider_id) @@ -277,7 +277,7 @@ async def get_provider_connection_count( Raises: HTTPException: 404 if provider not found """ - logger.info(f"Admin {admin.email} getting connection count for: {provider_id}") + logger.info("Admin getting connection count for provider") # Verify provider exists provider = await provider_repo.get_provider(provider_id) diff --git a/backend/src/apis/app_api/admin/quota/models.py b/backend/src/apis/app_api/admin/quota/models.py index f40edc4a..52b48a4c 100644 --- a/backend/src/apis/app_api/admin/quota/models.py +++ b/backend/src/apis/app_api/admin/quota/models.py @@ -5,9 +5,7 @@ from agents.main_agent.quota.models import ( QuotaTier, QuotaAssignment, - QuotaEvent, QuotaAssignmentType, - QuotaOverride ) diff --git a/backend/src/apis/app_api/admin/quota/routes.py b/backend/src/apis/app_api/admin/quota/routes.py index 5f0e5edb..7802f4d2 100644 --- a/backend/src/apis/app_api/admin/quota/routes.py +++ b/backend/src/apis/app_api/admin/quota/routes.py @@ -81,7 +81,7 @@ async def create_tier( - 401 if not authenticated - 403 if user lacks admin role """ - logger.info(f"Admin {admin_user.email} creating tier {tier_data.tier_id}") + logger.info("Admin creating tier") try: tier = await service.create_tier(tier_data, admin_user) @@ -89,7 +89,7 @@ async def create_tier( except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: - logger.error(f"Error creating tier: {e}") + logger.error("Error creating tier") raise HTTPException(status_code=500, detail="Internal server error") @@ -110,13 +110,13 @@ async def list_tiers( Returns: List of quota tiers """ - logger.info(f"Admin {admin_user.email} listing tiers (enabled_only={enabled_only})") + logger.info("Admin listing tiers") try: tiers = await service.list_tiers(enabled_only=enabled_only) return tiers except Exception as e: - logger.error(f"Error listing tiers: {e}") + logger.error("Error listing tiers") raise HTTPException(status_code=500, detail="Internal server error") @@ -141,7 +141,7 @@ async def get_tier( HTTPException: - 404 if tier not found """ - logger.info(f"Admin {admin_user.email} getting tier {tier_id}") + logger.info("Admin getting tier") tier = await service.get_tier(tier_id) if not tier: @@ -173,7 +173,7 @@ async def update_tier( HTTPException: - 404 if tier not found """ - logger.info(f"Admin {admin_user.email} updating tier {tier_id}") + logger.info("Admin updating tier") try: tier = await service.update_tier(tier_id, updates, admin_user) @@ -183,7 +183,7 @@ async def update_tier( except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: - logger.error(f"Error updating tier: {e}") + logger.error("Error updating tier") raise HTTPException(status_code=500, detail="Internal server error") @@ -208,7 +208,7 @@ async def delete_tier( - 400 if tier is in use - 404 if tier not found """ - logger.info(f"Admin {admin_user.email} deleting tier {tier_id}") + logger.info("Admin deleting tier") try: success = await service.delete_tier(tier_id, admin_user) @@ -217,7 +217,7 @@ async def delete_tier( except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: - logger.error(f"Error deleting tier: {e}") + logger.error("Error deleting tier") raise HTTPException(status_code=500, detail="Internal server error") @@ -246,10 +246,7 @@ async def create_assignment( - 401 if not authenticated - 403 if user lacks admin role """ - logger.info( - f"Admin {admin_user.email} creating {assignment_data.assignment_type.value} " - f"assignment for tier {assignment_data.tier_id}" - ) + logger.info("Admin creating assignment") try: assignment = await service.create_assignment(assignment_data, admin_user) @@ -257,7 +254,7 @@ async def create_assignment( except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: - logger.error(f"Error creating assignment: {e}") + logger.error("Error creating assignment") raise HTTPException(status_code=500, detail="Internal server error") @@ -280,10 +277,7 @@ async def list_assignments( Returns: List of quota assignments """ - logger.info( - f"Admin {admin_user.email} listing assignments " - f"(type={assignment_type}, enabled_only={enabled_only})" - ) + logger.info("Admin listing assignments") try: assignments = await service.list_assignments( @@ -292,7 +286,7 @@ async def list_assignments( ) return assignments except Exception as e: - logger.error(f"Error listing assignments: {e}") + logger.error("Error listing assignments") raise HTTPException(status_code=500, detail="Internal server error") @@ -317,7 +311,7 @@ async def get_assignment( HTTPException: - 404 if assignment not found """ - logger.info(f"Admin {admin_user.email} getting assignment {assignment_id}") + logger.info("Admin getting assignment") assignment = await service.get_assignment(assignment_id) if not assignment: @@ -350,7 +344,7 @@ async def update_assignment( - 400 if validation fails - 404 if assignment not found """ - logger.info(f"Admin {admin_user.email} updating assignment {assignment_id}") + logger.info("Admin updating assignment") try: assignment = await service.update_assignment(assignment_id, updates, admin_user) @@ -360,7 +354,7 @@ async def update_assignment( except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: - logger.error(f"Error updating assignment: {e}") + logger.error("Error updating assignment") raise HTTPException(status_code=500, detail="Internal server error") @@ -382,14 +376,14 @@ async def delete_assignment( HTTPException: - 404 if assignment not found """ - logger.info(f"Admin {admin_user.email} deleting assignment {assignment_id}") + logger.info("Admin deleting assignment") try: success = await service.delete_assignment(assignment_id, admin_user) if not success: raise HTTPException(status_code=404, detail=f"Assignment '{assignment_id}' not found") except Exception as e: - logger.error(f"Error deleting assignment: {e}") + logger.error("Error deleting assignment") raise HTTPException(status_code=500, detail="Internal server error") @@ -419,7 +413,7 @@ async def get_user_quota_info( Returns: Comprehensive user quota information """ - logger.info(f"Admin {admin_user.email} inspecting quota for user {user_id}") + logger.info("Admin inspecting user quota") try: roles_list = [r.strip() for r in roles.split(",")] if roles else [] @@ -431,7 +425,7 @@ async def get_user_quota_info( ) return info except Exception as e: - logger.error(f"Error getting user quota info: {e}") + logger.error("Error getting user quota info") raise HTTPException(status_code=500, detail="Internal server error") @@ -463,7 +457,7 @@ async def create_override( - 401 if not authenticated - 403 if user lacks admin role """ - logger.info(f"Admin {admin_user.email} creating override for user {override_data.user_id}") + logger.info("Admin creating override") try: override = await service.create_override(override_data, admin_user) @@ -471,7 +465,7 @@ async def create_override( except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) except Exception as e: - logger.error(f"Error creating override: {e}") + logger.error("Error creating override") raise HTTPException(status_code=500, detail="Internal server error") @@ -494,7 +488,7 @@ async def list_overrides( Returns: List of quota overrides """ - logger.info(f"Admin {admin_user.email} listing overrides (user_id={user_id}, active_only={active_only})") + logger.info("Admin listing overrides") try: overrides = await service.list_overrides( @@ -503,7 +497,7 @@ async def list_overrides( ) return overrides except Exception as e: - logger.error(f"Error listing overrides: {e}") + logger.error("Error listing overrides") raise HTTPException(status_code=500, detail="Internal server error") @@ -527,7 +521,7 @@ async def get_override( Raises: HTTPException: 404 if override not found """ - logger.info(f"Admin {admin_user.email} getting override {override_id}") + logger.info("Admin getting override") try: override = await service.get_override(override_id) @@ -537,7 +531,7 @@ async def get_override( except HTTPException: raise except Exception as e: - logger.error(f"Error getting override: {e}") + logger.error("Error getting override") raise HTTPException(status_code=500, detail="Internal server error") @@ -563,7 +557,7 @@ async def update_override( Raises: HTTPException: 404 if override not found """ - logger.info(f"Admin {admin_user.email} updating override {override_id}") + logger.info("Admin updating override") try: override = await service.update_override(override_id, updates) @@ -573,7 +567,7 @@ async def update_override( except HTTPException: raise except Exception as e: - logger.error(f"Error updating override: {e}") + logger.error("Error updating override") raise HTTPException(status_code=500, detail="Internal server error") @@ -594,7 +588,7 @@ async def delete_override( Raises: HTTPException: 404 if override not found """ - logger.info(f"Admin {admin_user.email} deleting override {override_id}") + logger.info("Admin deleting override") try: success = await service.delete_override(override_id) @@ -603,7 +597,7 @@ async def delete_override( except HTTPException: raise except Exception as e: - logger.error(f"Error deleting override: {e}") + logger.error("Error deleting override") raise HTTPException(status_code=500, detail="Internal server error") @@ -632,10 +626,7 @@ async def get_events( Returns: List of quota events """ - logger.info( - f"Admin {admin_user.email} getting events " - f"(user_id={user_id}, tier_id={tier_id}, type={event_type})" - ) + logger.info("Admin getting events") try: events = await service.get_events( @@ -646,5 +637,5 @@ async def get_events( ) return events except Exception as e: - logger.error(f"Error getting events: {e}") + logger.error("Error getting events") raise HTTPException(status_code=500, detail="Internal server error") diff --git a/backend/src/apis/app_api/admin/quota/service.py b/backend/src/apis/app_api/admin/quota/service.py index 2ed6f761..f7e38e9c 100644 --- a/backend/src/apis/app_api/admin/quota/service.py +++ b/backend/src/apis/app_api/admin/quota/service.py @@ -107,7 +107,7 @@ async def update_tier( updated = await self.repository.update_tier(tier_id, update_dict) if updated: - logger.info(f"Updated tier {tier_id} by {admin_user.user_id}") + logger.info("Updated tier") # Invalidate cache self.resolver.invalidate_cache() @@ -137,7 +137,7 @@ async def delete_tier( success = await self.repository.delete_tier(tier_id) if success: - logger.info(f"Deleted tier {tier_id} by {admin_user.user_id}") + logger.info("Deleted tier") # Invalidate cache self.resolver.invalidate_cache() @@ -256,7 +256,7 @@ async def update_assignment( updated = await self.repository.update_assignment(assignment_id, update_dict) if updated: - logger.info(f"Updated assignment {assignment_id} by {admin_user.user_id}") + logger.info("Updated assignment") # Invalidate cache for affected users if existing.assignment_type.value == "direct_user" and existing.user_id: @@ -279,7 +279,7 @@ async def delete_assignment( success = await self.repository.delete_assignment(assignment_id) if success: - logger.info(f"Deleted assignment {assignment_id} by {admin_user.user_id}") + logger.info("Deleted assignment") # Invalidate cache if assignment.assignment_type.value == "direct_user" and assignment.user_id: @@ -326,18 +326,17 @@ async def get_user_quota_info( # Calculate quota info if resolved: tier = resolved.tier - limit = ( + _limit = ( tier.daily_cost_limit if tier.period_type == "daily" and tier.daily_cost_limit else tier.monthly_cost_limit ) # Convert Decimal limit to float for calculations - limit_float = float(limit) if limit else 0 + limit_float = float(_limit) if _limit else 0 percentage_used = (current_usage / limit_float * 100) if limit_float > 0 else 0 remaining = max(0, limit_float - current_usage) else: tier = None - limit = None percentage_used = 0 remaining = None @@ -393,7 +392,7 @@ async def create_override(self, override_data, admin_user: User): ) created = await self.repository.create_override(override) - logger.info(f"Created override {override_id} for user {override_data.user_id}") + logger.info("Created override") # Invalidate cache for this user self.resolver.invalidate_cache(user_id=override_data.user_id) @@ -433,7 +432,7 @@ async def update_override(self, override_id: str, updates): updated = await self.repository.update_override(override_id, updates_dict) if updated: - logger.info(f"Updated override {override_id}") + logger.info("Updated override") # Invalidate cache for affected user self.resolver.invalidate_cache(user_id=existing.user_id) @@ -449,7 +448,7 @@ async def delete_override(self, override_id: str) -> bool: success = await self.repository.delete_override(override_id) if success: - logger.info(f"Deleted override {override_id}") + logger.info("Deleted override") # Invalidate cache for affected user self.resolver.invalidate_cache(user_id=existing.user_id) diff --git a/backend/src/apis/app_api/admin/roles/routes.py b/backend/src/apis/app_api/admin/roles/routes.py index eef36a2c..76e6dabb 100644 --- a/backend/src/apis/app_api/admin/roles/routes.py +++ b/backend/src/apis/app_api/admin/roles/routes.py @@ -1,16 +1,10 @@ """Admin API routes for AppRole management.""" import logging -from typing import Optional from fastapi import APIRouter, Depends, HTTPException, status, Query from apis.shared.auth import User, require_admin -from apis.shared.rbac import ( - AppRoleService, - AppRoleAdminService, - AppRoleCache, -) from apis.shared.rbac.models import ( AppRoleCreate, AppRoleUpdate, @@ -19,7 +13,6 @@ CacheStatsResponse, ) from apis.shared.rbac.system_admin import require_system_admin -from apis.shared.rbac.service import get_app_role_service from apis.shared.rbac.admin_service import get_app_role_admin_service from apis.shared.rbac.cache import get_app_role_cache @@ -47,7 +40,7 @@ async def list_roles( Returns: AppRoleListResponse with list of all roles """ - logger.info(f"Admin {admin.email} listing roles") + logger.info("Admin listing roles") service = get_app_role_admin_service() roles = await service.list_roles(enabled_only=enabled_only) @@ -78,7 +71,7 @@ async def get_role( Raises: HTTPException: 404 if role not found """ - logger.info(f"Admin {admin.email} getting role: {role_id}") + logger.info("Admin getting role") service = get_app_role_admin_service() role = await service.get_role(role_id) @@ -112,7 +105,7 @@ async def create_role( Raises: HTTPException: 400 if role already exists or validation fails """ - logger.info(f"Admin {admin.email} creating role: {role_data.role_id}") + logger.info("Admin creating role") try: service = get_app_role_admin_service() @@ -152,7 +145,7 @@ async def update_role( - 400 if validation fails - 404 if role not found """ - logger.info(f"Admin {admin.email} updating role: {role_id}") + logger.info("Admin updating role") try: service = get_app_role_admin_service() @@ -194,7 +187,7 @@ async def delete_role( - 400 if trying to delete a system role - 404 if role not found """ - logger.info(f"Admin {admin.email} deleting role: {role_id}") + logger.info("Admin deleting role") try: service = get_app_role_admin_service() @@ -236,7 +229,7 @@ async def sync_role_permissions( Raises: HTTPException: 404 if role not found """ - logger.info(f"Admin {admin.email} syncing permissions for role: {role_id}") + logger.info("Admin syncing permissions for role") service = get_app_role_admin_service() role = await service.sync_effective_permissions(role_id, admin) @@ -265,7 +258,7 @@ async def get_cache_stats( Returns: CacheStatsResponse with cache statistics """ - logger.info(f"Admin {admin.email} getting cache stats") + logger.info("Admin getting cache stats") cache = get_app_role_cache() stats = cache.get_stats() @@ -285,7 +278,7 @@ async def invalidate_cache( Args: admin: Authenticated system admin user (injected) """ - logger.info(f"Admin {admin.email} invalidating all caches") + logger.info("Admin invalidating all caches") cache = get_app_role_cache() await cache.invalidate_all() diff --git a/backend/src/apis/app_api/admin/routes.py b/backend/src/apis/app_api/admin/routes.py index c1bcf679..1fb82cd0 100644 --- a/backend/src/apis/app_api/admin/routes.py +++ b/backend/src/apis/app_api/admin/routes.py @@ -9,14 +9,9 @@ import logging import os import boto3 -from datetime import datetime from botocore.exceptions import ClientError, BotoCoreError from .models import ( - UserInfo, - AllSessionsResponse, - SessionDeleteResponse, - SystemStatsResponse, BedrockModelsResponse, FoundationModelSummary, GeminiModelsResponse, @@ -30,9 +25,7 @@ ManagedModelUpdate, ManagedModel, ) -from apis.shared.auth import User, require_admin, require_roles, has_any_role, get_current_user -from apis.shared.sessions.metadata import list_user_sessions, get_session_metadata -from apis.shared.sessions.messages import get_messages +from apis.shared.auth import User, require_admin from apis.shared.models.managed_models import ( create_managed_model, get_managed_model, @@ -40,10 +33,6 @@ update_managed_model, delete_managed_model, ) -from apis.app_api.admin.services.model_access import ( - ModelAccessService, - get_model_access_service, -) from apis.shared.rbac.system_admin import require_system_admin logger = logging.getLogger(__name__) @@ -90,7 +79,7 @@ async def list_bedrock_models( - 403 if user lacks admin role - 500 if AWS API error or server error """ - logger.info(f"Admin {admin_user.email} listing Bedrock foundation models") + logger.info("Admin listing Bedrock foundation models") try: # Initialize Bedrock control plane client (not bedrock-runtime) @@ -111,7 +100,7 @@ async def list_bedrock_models( request_params['byCustomizationType'] = by_customization_type # Call AWS Bedrock API - logger.debug(f"Calling list_foundation_models with params: {request_params}") + logger.debug("Calling list_foundation_models") response = bedrock_client.list_foundation_models(**request_params) # Transform AWS response to our response model @@ -120,7 +109,7 @@ async def list_bedrock_models( # Apply client-side limiting if requested if max_results and len(all_models) > max_results: all_models = all_models[:max_results] - logger.debug(f"Limited results to {max_results} models (client-side)") + logger.debug("Limited results to max_results models (client-side)") model_summaries = [] for model in all_models: @@ -146,7 +135,7 @@ async def list_bedrock_models( # Sort models by ID in reverse order (newest versions typically have higher version numbers/dates) model_summaries.sort(key=lambda m: m.model_id, reverse=True) - logger.info(f"✅ Retrieved {len(model_summaries)} Bedrock foundation models") + logger.info("✅ Retrieved Bedrock foundation models") return BedrockModelsResponse( models=model_summaries, @@ -157,19 +146,19 @@ async def list_bedrock_models( except ClientError as e: error_code = e.response.get('Error', {}).get('Code', 'Unknown') error_message = e.response.get('Error', {}).get('Message', str(e)) - logger.error(f"AWS Bedrock API error: {error_code} - {error_message}") + logger.error("AWS Bedrock API error", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"AWS Bedrock API error: {error_code} - {error_message}" ) except BotoCoreError as e: - logger.error(f"Boto3 error calling Bedrock API: {e}", exc_info=True) + logger.error("Boto3 error calling Bedrock API", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error connecting to AWS Bedrock: {str(e)}" ) except Exception as e: - logger.error(f"Unexpected error listing Bedrock models: {e}", exc_info=True) + logger.error("Unexpected error listing Bedrock models", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Unexpected error: {str(e)}" @@ -200,7 +189,7 @@ async def list_gemini_models( - 403 if user lacks admin role - 500 if Google API error or server error """ - logger.info(f"Admin {admin_user.email} listing Gemini models") + logger.info("Admin listing Gemini models") try: # Check if Google API key is configured @@ -265,9 +254,9 @@ async def list_gemini_models( # Apply client-side limiting if requested if max_results and len(all_models) > max_results: all_models = all_models[:max_results] - logger.debug(f"Limited results to {max_results} models") + logger.debug("Limited results to max_results models") - logger.info(f"✅ Retrieved {len(all_models)} Gemini models") + logger.info("✅ Retrieved Gemini models") return GeminiModelsResponse( models=all_models, @@ -278,7 +267,7 @@ async def list_gemini_models( # Re-raise HTTP exceptions raise except Exception as e: - logger.error(f"Unexpected error listing Gemini models: {e}", exc_info=True) + logger.error("Unexpected error listing Gemini models", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error fetching Gemini models: {str(e)}" @@ -313,7 +302,7 @@ async def list_openai_models( - 403 if user lacks admin role - 500 if OpenAI API error or server error """ - logger.info(f"Admin {admin_user.email} listing OpenAI models") + logger.info("Admin listing OpenAI models") try: # Check if OpenAI API key is configured @@ -358,9 +347,9 @@ async def list_openai_models( # Apply client-side limiting if requested if max_results and len(all_models) > max_results: all_models = all_models[:max_results] - logger.debug(f"Limited results to {max_results} models") + logger.debug("Limited results to max_results models") - logger.info(f"✅ Retrieved {len(all_models)} OpenAI models") + logger.info("✅ Retrieved OpenAI models") return OpenAIModelsResponse( models=all_models, @@ -371,7 +360,7 @@ async def list_openai_models( # Re-raise HTTP exceptions raise except Exception as e: - logger.error(f"Unexpected error listing OpenAI models: {e}", exc_info=True) + logger.error("Unexpected error listing OpenAI models", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error fetching OpenAI models: {str(e)}" @@ -405,7 +394,7 @@ async def list_managed_models_endpoint( - 403 if user lacks admin role - 500 if server error """ - logger.info(f"Admin {admin_user.email} listing all enabled models") + logger.info("Admin listing all enabled models") try: models = await list_managed_models(user_roles=None) # None = no role filtering @@ -419,7 +408,7 @@ async def list_managed_models_endpoint( ) except Exception as e: - logger.error(f"Unexpected error listing enabled models: {e}", exc_info=True) + logger.error("Unexpected error listing enabled models", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error listing enabled models: {str(e)}" @@ -451,7 +440,7 @@ async def create_managed_model_endpoint( - 400 if model with same modelId already exists - 500 if server error """ - logger.info(f"Admin {admin_user.email} creating enabled model: {model_data.model_name}") + logger.info("Admin creating enabled model") try: model = await create_managed_model(model_data) @@ -459,13 +448,13 @@ async def create_managed_model_endpoint( except ValueError as e: # Model already exists - logger.warning(f"Model creation failed: {e}") + logger.warning("Model creation failed") raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=str(e) ) except Exception as e: - logger.error(f"Unexpected error creating enabled model: {e}", exc_info=True) + logger.error("Unexpected error creating enabled model", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error creating enabled model: {str(e)}" @@ -494,7 +483,7 @@ async def get_managed_model_endpoint( - 404 if model not found - 500 if server error """ - logger.info(f"Admin {admin_user.email} requesting enabled model: {model_id}") + logger.info("Admin requesting enabled model") try: model = await get_managed_model(model_id) @@ -510,7 +499,7 @@ async def get_managed_model_endpoint( except HTTPException: raise except Exception as e: - logger.error(f"Unexpected error getting enabled model: {e}", exc_info=True) + logger.error("Unexpected error getting enabled model", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error getting enabled model: {str(e)}" @@ -544,7 +533,7 @@ async def update_managed_model_endpoint( - 404 if model not found - 500 if server error """ - logger.info(f"Admin {admin_user.email} updating enabled model: {model_id}") + logger.info("Admin updating enabled model") try: model = await update_managed_model(model_id, updates) @@ -559,7 +548,7 @@ async def update_managed_model_endpoint( except ValueError as e: # Duplicate modelId or other validation error - logger.warning(f"Model update failed: {e}") + logger.warning("Model update failed") raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=str(e) @@ -567,7 +556,7 @@ async def update_managed_model_endpoint( except HTTPException: raise except Exception as e: - logger.error(f"Unexpected error updating enabled model: {e}", exc_info=True) + logger.error("Unexpected error updating enabled model", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error updating enabled model: {str(e)}" @@ -595,7 +584,7 @@ async def delete_managed_model_endpoint( - 404 if model not found - 500 if server error """ - logger.info(f"Admin {admin_user.email} deleting enabled model: {model_id}") + logger.info("Admin deleting enabled model") try: deleted = await delete_managed_model(model_id) @@ -611,7 +600,7 @@ async def delete_managed_model_endpoint( except HTTPException: raise except Exception as e: - logger.error(f"Unexpected error deleting enabled model: {e}", exc_info=True) + logger.error("Unexpected error deleting enabled model", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error deleting enabled model: {str(e)}" @@ -646,7 +635,7 @@ async def sync_model_roles( - 404 if model not found - 500 if server error """ - logger.info(f"Admin {admin_user.email} syncing roles for model: {model_id}") + logger.info("Admin syncing roles for model") try: # Get the model @@ -686,7 +675,7 @@ async def sync_model_roles( ) logger.info( - f"✅ Synced model {model_id}: allowedAppRoles = {granting_roles}" + "✅ Synced model allowedAppRoles" ) return updated_model @@ -694,7 +683,7 @@ async def sync_model_roles( except HTTPException: raise except Exception as e: - logger.error(f"Unexpected error syncing model roles: {e}", exc_info=True) + logger.error("Unexpected error syncing model roles", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error syncing model roles: {str(e)}" diff --git a/backend/src/apis/app_api/admin/services/tests/test_model_access.py b/backend/src/apis/app_api/admin/services/tests/test_model_access.py index 2420ef8a..11affeb4 100644 --- a/backend/src/apis/app_api/admin/services/tests/test_model_access.py +++ b/backend/src/apis/app_api/admin/services/tests/test_model_access.py @@ -6,7 +6,7 @@ """ import pytest -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock from datetime import datetime, timezone from apis.app_api.admin.services.model_access import ModelAccessService diff --git a/backend/src/apis/app_api/admin/services/tool_access.py b/backend/src/apis/app_api/admin/services/tool_access.py index 395f9402..bdce7e7e 100644 --- a/backend/src/apis/app_api/admin/services/tool_access.py +++ b/backend/src/apis/app_api/admin/services/tool_access.py @@ -92,8 +92,6 @@ async def filter_allowed_tools( return list(allowed_tools & all_tool_ids) # Filter requested tools to only allowed ones - requested_set = set(requested_tools) - if has_wildcard: # Wildcard: allow all requested tools that exist # (allow gateway tools even if not in catalog) diff --git a/backend/src/apis/app_api/admin/tools/routes.py b/backend/src/apis/app_api/admin/tools/routes.py index 30677a88..89e0a477 100644 --- a/backend/src/apis/app_api/admin/tools/routes.py +++ b/backend/src/apis/app_api/admin/tools/routes.py @@ -6,7 +6,7 @@ from fastapi import APIRouter, Depends, HTTPException, Query from apis.shared.auth import User, require_admin -from apis.app_api.tools.service import ToolCatalogService, get_tool_catalog_service +from apis.app_api.tools.service import get_tool_catalog_service from apis.app_api.tools.models import ( ToolCreateRequest, ToolUpdateRequest, @@ -41,7 +41,7 @@ async def admin_list_all_tools( Returns: AdminToolListResponse with all tools """ - logger.info(f"Admin {admin.email} listing full tool catalog") + logger.info("Admin listing full tool catalog") service = get_tool_catalog_service() tools = await service.get_all_tools(status=status, include_roles=True) @@ -69,7 +69,7 @@ async def admin_get_tool( Returns: AdminToolResponse for the tool """ - logger.info(f"Admin {admin.email} getting tool: {tool_id}") + logger.info("Admin getting tool") service = get_tool_catalog_service() tool = await service.get_tool(tool_id) @@ -105,7 +105,7 @@ async def admin_create_tool( Returns: Created AdminToolResponse """ - logger.info(f"Admin {admin.email} creating tool: {request.tool_id}") + logger.info("Admin creating tool") service = get_tool_catalog_service() @@ -157,7 +157,7 @@ async def admin_update_tool( Returns: Updated AdminToolResponse """ - logger.info(f"Admin {admin.email} updating tool: {tool_id}") + logger.info("Admin updating tool") service = get_tool_catalog_service() @@ -202,7 +202,7 @@ async def admin_delete_tool( Returns: Success message """ - logger.info(f"Admin {admin.email} deleting tool: {tool_id} (hard={hard})") + logger.info("Admin deleting tool") service = get_tool_catalog_service() deleted = await service.delete_tool(tool_id, admin, soft=not hard) @@ -236,7 +236,7 @@ async def get_tool_roles( Returns: ToolRolesResponse with role assignments """ - logger.info(f"Admin {admin.email} getting roles for tool: {tool_id}") + logger.info("Admin getting roles for tool") service = get_tool_catalog_service() @@ -272,7 +272,7 @@ async def set_tool_roles( Returns: Success message """ - logger.info(f"Admin {admin.email} setting roles for tool: {tool_id}") + logger.info("Admin setting roles for tool") service = get_tool_catalog_service() @@ -302,7 +302,7 @@ async def add_roles_to_tool( Returns: Success message """ - logger.info(f"Admin {admin.email} adding roles to tool: {tool_id}") + logger.info("Admin adding roles to tool") service = get_tool_catalog_service() @@ -332,7 +332,7 @@ async def remove_roles_from_tool( Returns: Success message """ - logger.info(f"Admin {admin.email} removing roles from tool: {tool_id}") + logger.info("Admin removing roles from tool") service = get_tool_catalog_service() @@ -369,7 +369,7 @@ async def sync_from_registry( Returns: SyncResult with discovered, orphaned, and unchanged tools """ - logger.info(f"Admin {admin.email} syncing tool catalog (dry_run={dry_run})") + logger.info("Admin syncing tool catalog") service = get_tool_catalog_service() result = await service.sync_catalog_from_registry(admin, dry_run=dry_run) diff --git a/backend/src/apis/app_api/admin/users/routes.py b/backend/src/apis/app_api/admin/users/routes.py index 0e1e50e0..ad4864a7 100644 --- a/backend/src/apis/app_api/admin/users/routes.py +++ b/backend/src/apis/app_api/admin/users/routes.py @@ -82,7 +82,7 @@ async def list_users( Returns: UserListResponse with paginated list of users """ - logger.info(f"Admin {admin_user.email} listing users (status={status}, domain={domain})") + logger.info("Admin listing users") if not service.enabled: logger.warning("User admin service is disabled - no table configured") @@ -111,7 +111,7 @@ async def search_users( Returns: UserListResponse with matching user (or empty if not found) """ - logger.info(f"Admin {admin_user.email} searching for user by email: {email}") + logger.info("Admin searching for user by email") if not service.enabled: logger.warning("User admin service is disabled - no table configured") @@ -136,7 +136,7 @@ async def list_email_domains( Note: Currently returns empty list. Full implementation requires maintaining a separate domain aggregation. """ - logger.info(f"Admin {admin_user.email} listing email domains") + logger.info("Admin listing email domains") return await service.list_domains(limit=limit) @@ -163,7 +163,7 @@ async def get_user_detail( Raises: HTTPException: 404 if user not found """ - logger.info(f"Admin {admin_user.email} requesting user detail: {user_id}") + logger.info("Admin requesting user detail") if not service.enabled: raise HTTPException( diff --git a/backend/src/apis/app_api/admin/users/service.py b/backend/src/apis/app_api/admin/users/service.py index 99e2cc05..820ac86e 100644 --- a/backend/src/apis/app_api/admin/users/service.py +++ b/backend/src/apis/app_api/admin/users/service.py @@ -8,7 +8,6 @@ from datetime import datetime, timezone from apis.shared.users.repository import UserRepository -from apis.shared.users.models import UserProfile, UserListItem from apis.app_api.costs.aggregator import CostAggregator from agents.main_agent.quota.resolver import QuotaResolver from agents.main_agent.quota.repository import QuotaRepository @@ -64,7 +63,8 @@ async def list_users( try: last_key = json.loads(base64.b64decode(cursor).decode()) except Exception: - pass + # Invalid cursor format; start from the beginning + logger.warning("Invalid pagination cursor, ignoring") # Query based on filters if domain: diff --git a/backend/src/apis/app_api/assistants/TESTING.md b/backend/src/apis/app_api/assistants/TESTING.md index 2c796a3b..1fce15fe 100644 --- a/backend/src/apis/app_api/assistants/TESTING.md +++ b/backend/src/apis/app_api/assistants/TESTING.md @@ -119,16 +119,7 @@ curl http://localhost:8000/api/assistants/AST-abc123def456 \ -H "Authorization: Bearer YOUR_JWT_TOKEN" ``` -### 5. Archive Assistant (Soft Delete) - -```bash -curl -X POST http://localhost:8000/api/assistants/AST-abc123def456/archive \ - -H "Authorization: Bearer YOUR_JWT_TOKEN" -``` - -**Result:** Status changes to `ARCHIVED`, file remains on disk but hidden from default listings - -### 6. Delete Assistant (Hard Delete) +### 5. Delete Assistant ```bash curl -X DELETE http://localhost:8000/api/assistants/AST-abc123def456 \ @@ -167,10 +158,6 @@ curl http://localhost:8000/api/assistants \ # Include DRAFT assistants curl "http://localhost:8000/api/assistants?include_drafts=true" \ -H "Authorization: Bearer YOUR_JWT_TOKEN" - -# Include ARCHIVED assistants -curl "http://localhost:8000/api/assistants?include_archived=true" \ - -H "Authorization: Bearer YOUR_JWT_TOKEN" ``` ### Scenario 3: Pagination diff --git a/backend/src/apis/app_api/assistants/routes.py b/backend/src/apis/app_api/assistants/routes.py index 983bf9d6..3ad1a0a2 100644 --- a/backend/src/apis/app_api/assistants/routes.py +++ b/backend/src/apis/app_api/assistants/routes.py @@ -30,7 +30,6 @@ UpdateAssistantRequest, ) from apis.shared.assistants.service import ( - archive_assistant, assistant_exists, create_assistant, create_assistant_draft, @@ -147,7 +146,6 @@ async def create_assistant_endpoint(request: CreateAssistantRequest, current_use async def list_assistants_endpoint( limit: Optional[int] = Query(None, ge=1, le=1000, description="Maximum number of assistants to return"), next_token: Optional[str] = Query(None, description="Pagination token for retrieving the next page"), - include_archived: bool = Query(False, description="Include archived assistants"), include_drafts: bool = Query(False, description="Include draft assistants"), include_public: bool = Query(False, description="Include public assistants (in addition to user's own)"), current_user: User = Depends(get_current_user), @@ -161,13 +159,11 @@ async def list_assistants_endpoint( When include_public=True, returns both the user's own assistants AND all public assistants (excluding those owned by the user to avoid duplicates). - By default, excludes draft and archived assistants. Use query parameters - to include them. + By default, excludes draft assistants. Use query parameters to include them. Args: limit: Maximum number of assistants to return (optional, 1-1000) next_token: Pagination token for retrieving next page (optional) - include_archived: Whether to include archived assistants (default: False) include_drafts: Whether to include draft assistants (default: False) include_public: Whether to include public assistants (default: False) current_user: Authenticated user from JWT token (injected by dependency) @@ -182,10 +178,7 @@ async def list_assistants_endpoint( """ user_id = current_user.user_id - logger.info( - f"GET /assistants - User: {user_id}, Limit: {limit}, NextToken: {next_token}, " - f"IncludeArchived: {include_archived}, IncludeDrafts: {include_drafts}, IncludePublic: {include_public}" - ) + logger.info("GET /assistants") try: # Retrieve assistants for the user with pagination @@ -193,7 +186,6 @@ async def list_assistants_endpoint( owner_id=user_id, limit=limit, next_token=next_token, - include_archived=include_archived, include_drafts=include_drafts, include_public=include_public, ) @@ -266,7 +258,7 @@ async def get_assistant_endpoint(assistant_id: str, current_user: User = Depends """ user_id = current_user.user_id - logger.info(f"GET /assistants/{assistant_id} - User: {user_id}") + logger.info("GET /assistants/{assistant_id}") try: # First check if assistant exists (without access check) @@ -321,7 +313,7 @@ async def update_assistant_endpoint(assistant_id: str, request: UpdateAssistantR """ user_id = current_user.user_id - logger.info(f"PUT /assistants/{assistant_id} - User: {user_id}") + logger.info("PUT /assistants/{assistant_id}") try: # Update assistant @@ -353,87 +345,38 @@ async def update_assistant_endpoint(assistant_id: str, request: UpdateAssistantR raise HTTPException(status_code=500, detail=f"Failed to update assistant: {str(e)}") -@router.post("/{assistant_id}/archive", response_model=AssistantResponse, response_model_exclude_none=True) -async def archive_assistant_endpoint(assistant_id: str, current_user: User = Depends(get_current_user)): - """ - Archive an assistant (soft delete). - - Sets the assistant status to ARCHIVED. The assistant will not appear - in default listings but can still be retrieved by ID and can be - un-archived by setting status back to COMPLETE. - - Requires JWT authentication. Users can only archive their own assistants. - - Args: - assistant_id: Assistant identifier from URL path - current_user: Authenticated user from JWT token (injected by dependency) - - Returns: - AssistantResponse with status=ARCHIVED - - Raises: - HTTPException: - - 401 if not authenticated - - 404 if assistant not found or not owned by user - - 500 if server error - """ - user_id = current_user.user_id - - logger.info(f"POST /assistants/{assistant_id}/archive - User: {user_id}") - - try: - # Archive assistant (soft delete) - archived_assistant = await archive_assistant(assistant_id=assistant_id, owner_id=user_id) - - if not archived_assistant: - raise HTTPException(status_code=404, detail=f"Assistant not found: {assistant_id}") - - # Convert to response model - return AssistantResponse.model_validate(archived_assistant.model_dump(by_alias=True)) - - except HTTPException: - raise - except Exception as e: - logger.error(f"Error archiving assistant: {e}", exc_info=True) - raise HTTPException(status_code=500, detail=f"Failed to archive assistant: {str(e)}") - - @router.delete("/{assistant_id}", status_code=204) async def delete_assistant_endpoint(assistant_id: str, current_user: User = Depends(get_current_user)): - """ - Delete an assistant permanently (hard delete). - - This is irreversible. The assistant and all associated data will be - permanently removed. Consider using POST /assistants/{id}/archive for - soft deletion instead. - - Requires JWT authentication. Users can only delete their own assistants. - - Args: - assistant_id: Assistant identifier from URL path - current_user: Authenticated user from JWT token (injected by dependency) - - Returns: - 204 No Content on successful deletion - - Raises: - HTTPException: - - 401 if not authenticated - - 404 if assistant not found or not owned by user - - 500 if server error - """ + """Delete an assistant and all associated documents using soft-delete + background cleanup.""" user_id = current_user.user_id - - logger.info(f"DELETE /assistants/{assistant_id} - User: {user_id}") + logger.info("DELETE /assistants/{assistant_id}") try: - # Delete assistant permanently (hard delete) - success = await delete_assistant(assistant_id=assistant_id, owner_id=user_id) + # 1. List all documents for the assistant + docs, _ = await list_assistant_documents( + assistant_id=assistant_id, + owner_id=user_id, + limit=1000, + ) + # 2. Batch soft-delete all documents with TTL + if docs: + from apis.app_api.documents.services.document_service import batch_soft_delete_documents + await batch_soft_delete_documents( + assistant_id=assistant_id, + document_ids=[doc.document_id for doc in docs], + ) + + # 3. Hard-delete assistant record + success = await delete_assistant(assistant_id=assistant_id, owner_id=user_id) if not success: raise HTTPException(status_code=404, detail=f"Assistant not found: {assistant_id}") - # Return 204 No Content (no response body) + # 4. Fire-and-forget background cleanup for all documents + if docs: + from apis.app_api.documents.services.cleanup_service import cleanup_assistant_documents + asyncio.ensure_future(cleanup_assistant_documents(assistant_id, docs)) + return None except HTTPException: @@ -473,7 +416,7 @@ async def test_chat_endpoint(assistant_id: str, request: AssistantTestChatReques """ user_id = current_user.user_id - logger.info(f"POST /assistants/{assistant_id}/test-chat - User: {user_id}, Message: {request.message[:50]}...") + logger.info("POST /assistants/{assistant_id}/test-chat") try: # 1. Get assistant and verify ownership @@ -603,7 +546,7 @@ async def share_assistant_endpoint(assistant_id: str, request: ShareAssistantReq """ user_id = current_user.user_id - logger.info(f"POST /assistants/{assistant_id}/shares - User: {user_id}, Emails: {len(request.emails)}") + logger.info("POST /assistants/{assistant_id}/shares") try: # Share assistant with emails @@ -647,7 +590,7 @@ async def unshare_assistant_endpoint(assistant_id: str, request: UnshareAssistan """ user_id = current_user.user_id - logger.info(f"DELETE /assistants/{assistant_id}/shares - User: {user_id}, Emails: {len(request.emails)}") + logger.info("DELETE /assistants/{assistant_id}/shares") try: # Unshare assistant from emails @@ -690,7 +633,7 @@ async def get_assistant_shares_endpoint(assistant_id: str, current_user: User = """ user_id = current_user.user_id - logger.info(f"GET /assistants/{assistant_id}/shares - User: {user_id}") + logger.info("GET /assistants/{assistant_id}/shares") try: # Get share list diff --git a/backend/src/apis/app_api/auth/api_keys/models.py b/backend/src/apis/app_api/auth/api_keys/models.py index 06aa9467..9f932984 100644 --- a/backend/src/apis/app_api/auth/api_keys/models.py +++ b/backend/src/apis/app_api/auth/api_keys/models.py @@ -1,7 +1,6 @@ """API Key request/response models.""" -from datetime import datetime -from typing import List, Optional +from typing import Optional from pydantic import BaseModel, Field diff --git a/backend/src/apis/app_api/auth/api_keys/repository.py b/backend/src/apis/app_api/auth/api_keys/repository.py index 5b840818..db74ebf8 100644 --- a/backend/src/apis/app_api/auth/api_keys/repository.py +++ b/backend/src/apis/app_api/auth/api_keys/repository.py @@ -17,7 +17,7 @@ import logging import os from datetime import datetime, timezone -from typing import Any, Dict, List, Optional +from typing import Any, Dict, Optional import boto3 from boto3.dynamodb.conditions import Key diff --git a/backend/src/apis/app_api/auth/api_keys/routes.py b/backend/src/apis/app_api/auth/api_keys/routes.py index 5b5e2216..57023ea8 100644 --- a/backend/src/apis/app_api/auth/api_keys/routes.py +++ b/backend/src/apis/app_api/auth/api_keys/routes.py @@ -95,7 +95,7 @@ async def delete_api_key( except HTTPException: raise except Exception as e: - logger.error(f"Failed to delete API key {key_id}: {e}", exc_info=True) + logger.error("Failed to delete API key", exc_info=True) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to delete API key", diff --git a/backend/src/apis/app_api/auth/api_keys/service.py b/backend/src/apis/app_api/auth/api_keys/service.py index 1301bb3b..937bca0a 100644 --- a/backend/src/apis/app_api/auth/api_keys/service.py +++ b/backend/src/apis/app_api/auth/api_keys/service.py @@ -73,9 +73,9 @@ async def delete_key(self, user_id: str, key_id: str) -> bool: """Delete a key belonging to the requesting user.""" deleted = await self.repo.delete_key(user_id, key_id) if deleted: - logger.info(f"API key {key_id} deleted by user {user_id}") + logger.info("API key deleted") else: - logger.info(f"API key {key_id} not found for user {user_id} (no-op)") + logger.info("API key not found (no-op)") return deleted # ------------------------------------------------------------------ diff --git a/backend/src/apis/app_api/auth/routes.py b/backend/src/apis/app_api/auth/routes.py index 1d7f2a1a..7fe3641d 100644 --- a/backend/src/apis/app_api/auth/routes.py +++ b/backend/src/apis/app_api/auth/routes.py @@ -1,6 +1,7 @@ """OIDC authentication routes with multi-provider support.""" import logging +import os from typing import Optional from fastapi import APIRouter, Depends, HTTPException, Query, Request, status @@ -109,7 +110,7 @@ async def login( prompt=prompt ) - logger.info(f"Generated authorization URL for OIDC login (provider: {provider_id})") + logger.info("Generated authorization URL for OIDC login") return LoginResponse( authorization_url=authorization_url, @@ -241,7 +242,7 @@ async def logout( post_logout_redirect_uri=post_logout_redirect_uri ) - logger.info(f"Generated logout URL (provider: {provider_id})") + logger.info("Generated logout URL") return LogoutResponse(logout_url=logout_url) @@ -389,8 +390,14 @@ async def get_runtime_endpoint_impl( f"(provider: {provider.provider_id}): {provider.agentcore_runtime_endpoint_url}" ) + # Allow local override for development (bypass cloud runtime) + runtime_url = os.environ.get( + "LOCAL_RUNTIME_ENDPOINT_URL", + provider.agentcore_runtime_endpoint_url, + ) + return RuntimeEndpointResponse( - runtime_endpoint_url=provider.agentcore_runtime_endpoint_url, + runtime_endpoint_url=runtime_url, provider_id=provider.provider_id, runtime_status=provider.agentcore_runtime_status, ) diff --git a/backend/src/apis/app_api/chat/routes.py b/backend/src/apis/app_api/chat/routes.py index 222cabd1..839e24f9 100644 --- a/backend/src/apis/app_api/chat/routes.py +++ b/backend/src/apis/app_api/chat/routes.py @@ -10,9 +10,8 @@ import asyncio import json import logging -from typing import AsyncGenerator -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException from fastapi.responses import StreamingResponse from agents.main_agent.session.session_factory import SessionFactory @@ -20,7 +19,7 @@ from apis.app_api.admin.services import get_tool_access_service from apis.shared.assistants.service import assistant_exists, get_assistant_with_access_check, mark_share_as_interacted from apis.shared.assistants.rag_service import augment_prompt_with_context, search_assistant_knowledgebase_with_formatting -from apis.shared.files.file_resolver import ResolvedFileContent, get_file_resolver +from apis.shared.files.file_resolver import get_file_resolver from apis.shared.sessions.models import SessionMetadata, SessionPreferences from apis.shared.sessions.messages import get_messages from apis.shared.sessions.metadata import get_session_metadata, store_session_metadata @@ -32,9 +31,7 @@ from apis.shared.auth.dependencies import get_current_user from apis.shared.auth.models import User from apis.shared.errors import ( - ConversationalErrorEvent, ErrorCode, - StreamErrorEvent, build_conversational_error_event, ) from apis.shared.quota import ( @@ -394,10 +391,22 @@ async def stream_with_cleanup(): # Pass resolved files (from S3) merged with any direct file content # Use augmented message if assistant RAG was applied + # + # Always store the original user message as displayText when the prompt + # will be modified before reaching the model. This happens when: + # 1. RAG augmentation prepends context chunks to the message + # 2. File attachments cause PromptBuilder to rewrite into ContentBlocks + # The original text becomes the single source of truth for UI display, + # while the full augmented prompt stays in AgentCore Memory for the LLM. + message_will_be_modified = ( + augmented_message != request.message # RAG augmentation + or bool(files_to_send) # File attachments + ) stream_iterator = agent.stream_async( - augmented_message, # Use augmented message if assistant RAG was applied + augmented_message, session_id=request.session_id, files=files_to_send if files_to_send else None, + original_message=request.message if message_will_be_modified else None, ) try: diff --git a/backend/src/apis/app_api/costs/aggregator.py b/backend/src/apis/app_api/costs/aggregator.py index 9c3473d8..6068ad3c 100644 --- a/backend/src/apis/app_api/costs/aggregator.py +++ b/backend/src/apis/app_api/costs/aggregator.py @@ -3,10 +3,9 @@ import logging from datetime import datetime, timezone, timedelta from typing import Optional, Dict, Tuple -from decimal import Decimal from .models import UserCostSummary, ModelCostSummary, CostBreakdown -from apis.app_api.storage.metadata_storage import get_metadata_storage +from apis.app_api.storage import get_metadata_storage logger = logging.getLogger(__name__) @@ -34,7 +33,7 @@ def _get_cached(self, user_id: str, period: str) -> Optional[UserCostSummary]: if cache_key in self._cache: summary, cached_at = self._cache[cache_key] if datetime.now(timezone.utc) - cached_at < timedelta(seconds=self.cache_ttl): - logger.debug(f"Cost summary cache hit for user {user_id}, period {period}") + logger.debug("Cost summary cache hit") return summary else: # Expired, remove from cache @@ -45,7 +44,7 @@ def _set_cached(self, user_id: str, period: str, summary: UserCostSummary) -> No """Cache a summary""" cache_key = self._get_cache_key(user_id, period) self._cache[cache_key] = (summary, datetime.now(timezone.utc)) - logger.debug(f"Cost summary cached for user {user_id}, period {period}") + logger.debug("Cost summary cached") def invalidate_cache(self, user_id: Optional[str] = None, period: Optional[str] = None) -> None: """Invalidate cache for specific user/period or all entries. @@ -280,15 +279,7 @@ def _create_empty_summary(self, user_id: str, period: str) -> UserCostSummary: # Parse period to get date range try: year, month = period.split('-') - # Calculate last day of month - if month == '12': - next_month = 1 - next_year = int(year) + 1 - else: - next_month = int(month) + 1 - next_year = int(year) - - # Get last day by going to first day of next month and subtracting a day + # Get last day of month from calendar import monthrange last_day = monthrange(int(year), int(month))[1] diff --git a/backend/src/apis/app_api/costs/calculator.py b/backend/src/apis/app_api/costs/calculator.py index 4de8d625..37f09b83 100644 --- a/backend/src/apis/app_api/costs/calculator.py +++ b/backend/src/apis/app_api/costs/calculator.py @@ -6,7 +6,7 @@ - Multi-provider cost support (Bedrock, OpenAI, Gemini) """ -from typing import Dict, Optional, Tuple +from typing import Dict, Tuple from .models import CostBreakdown diff --git a/backend/src/apis/app_api/costs/models.py b/backend/src/apis/app_api/costs/models.py index 8d63ab36..2a8bd93d 100644 --- a/backend/src/apis/app_api/costs/models.py +++ b/backend/src/apis/app_api/costs/models.py @@ -1,7 +1,6 @@ """Cost tracking data models for user cost aggregation and reporting.""" from pydantic import BaseModel, Field, ConfigDict -from typing import Optional class CostBreakdown(BaseModel): diff --git a/backend/src/apis/app_api/costs/routes.py b/backend/src/apis/app_api/costs/routes.py index d654d6f2..92ac9aa4 100644 --- a/backend/src/apis/app_api/costs/routes.py +++ b/backend/src/apis/app_api/costs/routes.py @@ -49,7 +49,7 @@ async def get_cost_summary( if not period: period = datetime.now(timezone.utc).strftime("%Y-%m") - logger.info(f"GET /costs/summary - User: {user_id}, Period: {period}") + logger.info("GET /costs/summary") try: # Get pre-aggregated summary (O(1) lookup) @@ -59,7 +59,7 @@ async def get_cost_summary( period=period ) - logger.info(f"Successfully retrieved cost summary for user {user_id}, period {period}") + logger.info("Successfully retrieved cost summary") return summary @@ -102,7 +102,7 @@ async def get_detailed_report( """ user_id = current_user.user_id - logger.info(f"GET /costs/detailed-report - User: {user_id}, Start: {start_date}, End: {end_date}") + logger.info("GET /costs/detailed-report") try: # Parse dates diff --git a/backend/src/apis/app_api/costs/tests/test_calculator.py b/backend/src/apis/app_api/costs/tests/test_calculator.py index aa1a1231..f109e279 100644 --- a/backend/src/apis/app_api/costs/tests/test_calculator.py +++ b/backend/src/apis/app_api/costs/tests/test_calculator.py @@ -2,7 +2,6 @@ import pytest from apis.app_api.costs.calculator import CostCalculator -from apis.app_api.costs.models import CostBreakdown class TestCostCalculator: diff --git a/backend/src/apis/app_api/documents/ingestion/embeddings/__init__.py b/backend/src/apis/app_api/documents/ingestion/embeddings/__init__.py index 9fb41c7f..b71f9668 100644 --- a/backend/src/apis/app_api/documents/ingestion/embeddings/__init__.py +++ b/backend/src/apis/app_api/documents/ingestion/embeddings/__init__.py @@ -2,6 +2,7 @@ Token validation and chunk splitting for the document ingestion pipeline. Core embedding/vector operations are in apis.shared.embeddings. +Re-exports are provided for Lambda handler compatibility. """ from .bedrock_embeddings import ( diff --git a/backend/src/apis/app_api/documents/ingestion/embeddings/bedrock_embeddings.py b/backend/src/apis/app_api/documents/ingestion/embeddings/bedrock_embeddings.py index 17cea7dc..7795273f 100644 --- a/backend/src/apis/app_api/documents/ingestion/embeddings/bedrock_embeddings.py +++ b/backend/src/apis/app_api/documents/ingestion/embeddings/bedrock_embeddings.py @@ -4,25 +4,33 @@ ingestion pipeline. The core embedding generation and vector store operations live in apis.shared.embeddings. -Re-exports shared functions for backward compatibility with existing -ingestion code that imports from this module. +Re-exports shared functions so the Lambda handler (which imports from +embeddings.bedrock_embeddings) can access them without knowing the +full apis.shared.embeddings path. """ import logging import re from typing import List -# Re-export shared functions so existing ingestion imports still work +# Re-export shared functions for Lambda handler compatibility. +# The handler imports from embeddings.bedrock_embeddings (Lambda task root path). from apis.shared.embeddings.bedrock_embeddings import ( # noqa: F401 - BEDROCK_EMBEDDING_CONFIG, - delete_vectors_for_document, generate_embeddings, - search_assistant_knowledgebase, store_embeddings_in_s3, + search_assistant_knowledgebase, ) logger = logging.getLogger(__name__) +__all__ = [ + "generate_embeddings", + "store_embeddings_in_s3", + "search_assistant_knowledgebase", + "validate_and_split_chunks", + "_validate_and_split_chunks", +] + # --- Token validation safety net (tiktoken-based, ingestion only) --- _tiktoken_encoder = None diff --git a/backend/src/apis/app_api/documents/ingestion/processors/docling_processor.py b/backend/src/apis/app_api/documents/ingestion/processors/docling_processor.py index 5af041af..250e6ac2 100644 --- a/backend/src/apis/app_api/documents/ingestion/processors/docling_processor.py +++ b/backend/src/apis/app_api/documents/ingestion/processors/docling_processor.py @@ -1,11 +1,10 @@ -import asyncio import logging import os import shutil import tempfile import time from pathlib import Path -from typing import Any, Callable, Coroutine, List, Optional, Union +from typing import Any, Callable, Coroutine, List, Optional logger = logging.getLogger(__name__) diff --git a/backend/src/apis/app_api/documents/ingestion/status.py b/backend/src/apis/app_api/documents/ingestion/status.py index e3447f27..a88bdbfa 100644 --- a/backend/src/apis/app_api/documents/ingestion/status.py +++ b/backend/src/apis/app_api/documents/ingestion/status.py @@ -6,7 +6,6 @@ import logging import os import traceback -import uuid from datetime import datetime, timezone from typing import Optional, Tuple, Literal @@ -118,7 +117,6 @@ def _format_error_message(exception: Exception) -> Tuple[str, str]: Returns: Tuple of (user_friendly_message, full_traceback_string) """ - exception_type = type(exception).__name__ exception_message = str(exception) # Get full traceback for technical details diff --git a/backend/src/apis/app_api/documents/models.py b/backend/src/apis/app_api/documents/models.py index 6da3340b..5a973e6c 100644 --- a/backend/src/apis/app_api/documents/models.py +++ b/backend/src/apis/app_api/documents/models.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, ConfigDict, Field # Type alias for document processing status -DocumentStatus = Literal["uploading", "chunking", "embedding", "complete", "failed"] +DocumentStatus = Literal["uploading", "chunking", "embedding", "complete", "failed", "deleting"] class Document(BaseModel): @@ -31,6 +31,7 @@ class Document(BaseModel): chunk_count: Optional[int] = Field(None, alias="chunkCount", description="Number of chunks created") created_at: str = Field(..., alias="createdAt", description="ISO 8601 timestamp of creation") updated_at: str = Field(..., alias="updatedAt", description="ISO 8601 timestamp of last update") + ttl: Optional[int] = Field(None, alias="ttl", description="DynamoDB TTL epoch timestamp for auto-expiry") class CreateDocumentRequest(BaseModel): @@ -88,3 +89,12 @@ class DownloadUrlResponse(BaseModel): download_url: str = Field(..., alias="downloadUrl", description="Presigned S3 URL for download") filename: str = Field(..., description="Original filename") expires_in: int = Field(..., alias="expiresIn", description="URL expiration in seconds") + + +class ReportUploadFailureRequest(BaseModel): + """Request body for reporting a client-side upload failure""" + + model_config = ConfigDict(populate_by_name=True) + + error: str = Field(..., description="User-friendly error message") + details: Optional[str] = Field(None, description="Technical error details") diff --git a/backend/src/apis/app_api/documents/routes.py b/backend/src/apis/app_api/documents/routes.py index bbab64b3..a23a1041 100644 --- a/backend/src/apis/app_api/documents/routes.py +++ b/backend/src/apis/app_api/documents/routes.py @@ -6,9 +6,8 @@ from fastapi import APIRouter, Depends, HTTPException, status from apis.shared.assistants.service import get_assistant -from apis.app_api.documents.models import CreateDocumentRequest, DocumentResponse, DocumentsListResponse, DownloadUrlResponse, UploadUrlResponse -from apis.app_api.documents.services.document_service import _generate_document_id, create_document, list_assistant_documents -from apis.app_api.documents.services.document_service import delete_document as delete_document_service +from apis.app_api.documents.models import CreateDocumentRequest, DocumentResponse, DocumentsListResponse, DownloadUrlResponse, UploadUrlResponse, ReportUploadFailureRequest +from apis.app_api.documents.services.document_service import _generate_document_id, create_document, list_assistant_documents, update_document_status from apis.app_api.documents.services.document_service import get_document as get_document_service from apis.app_api.documents.services.storage_service import generate_download_url, generate_upload_url from apis.shared.auth.dependencies import get_current_user_id @@ -79,6 +78,57 @@ async def generate_upload_url_endpoint( raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Failed to generate upload URL: {str(e)}") +@router.post("/{document_id}/upload-failed", response_model=DocumentResponse, status_code=status.HTTP_200_OK) +async def report_upload_failure( + assistant_id: str, document_id: str, request: ReportUploadFailureRequest, user_id: str = Depends(get_current_user_id) +) -> DocumentResponse: + """ + Report that a client-side S3 upload failed. + + Marks the document as 'failed' in DynamoDB so the frontend stops polling + and displays the error. Called by the client when the presigned URL upload + to S3 fails (network error, permission error, etc.). + + Args: + assistant_id: Parent assistant identifier + document_id: Document identifier + request: Error details from the client + user_id: Authenticated user ID from JWT + """ + try: + # Verify document exists and user owns the assistant + document = await get_document_service(assistant_id, document_id, user_id) + if not document: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Document not found: {document_id}") + + # Only allow marking as failed if still in 'uploading' state + if document.status != "uploading": + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Document is in '{document.status}' state, not 'uploading'. Cannot mark as upload failed.", + ) + + error_message = request.error or "Upload to S3 failed" + updated = await update_document_status( + assistant_id=assistant_id, + document_id=document_id, + status="failed", + error_message=error_message, + error_details=request.details, + ) + + if not updated: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to update document status") + + return DocumentResponse.model_validate(updated.model_dump(by_alias=True)) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error reporting upload failure: {e}", exc_info=True) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Failed to report upload failure: {str(e)}") + + @router.get("", response_model=DocumentsListResponse, status_code=status.HTTP_200_OK) async def list_documents( assistant_id: str, limit: Optional[int] = None, next_token: Optional[str] = None, user_id: str = Depends(get_current_user_id) @@ -190,52 +240,25 @@ async def get_download_url(assistant_id: str, document_id: str, user_id: str = D @router.delete("/{document_id}", status_code=status.HTTP_204_NO_CONTENT) async def delete_document(assistant_id: str, document_id: str, user_id: str = Depends(get_current_user_id)) -> None: - """ - Delete document from DynamoDB, S3, and vector store - - Args: - assistant_id: Parent assistant identifier - document_id: Document identifier - user_id: Authenticated user ID from JWT - """ + """Delete document using soft-delete + background cleanup pattern.""" try: - # Get document first to get S3 key - document = await get_document_service(assistant_id, document_id, user_id) + from apis.app_api.documents.services.document_service import soft_delete_document + from apis.app_api.documents.services.cleanup_service import cleanup_document_resources + document = await soft_delete_document(assistant_id, document_id, user_id) if not document: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Document not found: {document_id}") - # Delete from DynamoDB - success = await delete_document_service(assistant_id, document_id, user_id) - - if not success: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to delete document") - - # Delete S3 object - try: - import boto3 - - from apis.app_api.documents.services.storage_service import _get_documents_bucket - - s3_client = boto3.client("s3") - bucket = _get_documents_bucket() - - s3_client.delete_object(Bucket=bucket, Key=document.s3_key) - - logger.info(f"Deleted S3 object: {document.s3_key}") - except Exception as s3_error: - # Log but don't fail - DynamoDB deletion succeeded - logger.warning(f"Failed to delete S3 object {document.s3_key}: {s3_error}") - - # Delete vector store objects - try: - from apis.shared.embeddings.bedrock_embeddings import delete_vectors_for_document - - deleted_count = await delete_vectors_for_document(document_id) - logger.info(f"Deleted {deleted_count} vectors for document {document_id}") - except Exception as vector_error: - # Log but don't fail - DynamoDB and S3 deletion succeeded - logger.warning(f"Failed to delete vectors for document {document_id}: {vector_error}") + # Fire-and-forget cleanup (response already sent as 204) + import asyncio + asyncio.ensure_future( + cleanup_document_resources( + document_id=document.document_id, + assistant_id=assistant_id, + s3_key=document.s3_key, + chunk_count=document.chunk_count, + ) + ) return None diff --git a/backend/src/apis/app_api/documents/services/cleanup_service.py b/backend/src/apis/app_api/documents/services/cleanup_service.py new file mode 100644 index 00000000..8a4cf22d --- /dev/null +++ b/backend/src/apis/app_api/documents/services/cleanup_service.py @@ -0,0 +1,218 @@ +"""Cleanup service for document resource deletion with retries. + +Orchestrates deletion of vectors and S3 objects with exponential backoff +and jitter. Phases (vector deletion, S3 deletion) are independent — failure +of one does not prevent attempting the other. + +Never raises exceptions — all failures are logged and swallowed. +""" + +import asyncio +import logging +import os +import random +from typing import Optional + +import boto3 + +logger = logging.getLogger(__name__) + + +def _get_documents_bucket() -> str: + """Get documents S3 bucket name from environment.""" + bucket = os.environ.get("S3_ASSISTANTS_DOCUMENTS_BUCKET_NAME") + if not bucket: + raise ValueError("S3_ASSISTANTS_DOCUMENTS_BUCKET_NAME environment variable not set") + return bucket + + +async def cleanup_document_resources( + document_id: str, + assistant_id: str, + s3_key: str, + chunk_count: Optional[int], + max_retries: int = 3, + base_delay: float = 0.5, +) -> bool: + """ + Delete vectors and S3 source file with exponential backoff retries. + + Phase 1: Delete vectors (deterministic if chunk_count available, else probe-and-scan). + Phase 2: Delete S3 source file. + Phases are independent — failure of one does not prevent the other. + + Returns True only if both phases succeed. On True, hard-deletes the + DynamoDB record. On failure, logs and leaves the record for TTL auto-expiry. + + Never raises exceptions. + + Args: + document_id: The document identifier + assistant_id: Parent assistant identifier + s3_key: S3 object key for the source file + chunk_count: Number of vector chunks (None triggers probe-and-scan fallback) + max_retries: Maximum retry attempts per phase + base_delay: Base delay in seconds for exponential backoff + + Returns: + True if all resources were cleaned up successfully, False otherwise + """ + try: + vectors_deleted = await _delete_vectors_with_retries( + document_id, chunk_count, max_retries, base_delay + ) + except Exception as e: + logger.error(f"Unexpected error in vector deletion for {document_id}: {e}", exc_info=True) + vectors_deleted = False + + try: + s3_deleted = await _delete_s3_with_retries( + s3_key, max_retries, base_delay + ) + except Exception as e: + logger.error(f"Unexpected error in S3 deletion for {document_id}: {e}", exc_info=True) + s3_deleted = False + + all_succeeded = vectors_deleted and s3_deleted + + if all_succeeded: + try: + from apis.app_api.documents.services.document_service import hard_delete_document + + await hard_delete_document(assistant_id, document_id) + except Exception as e: + logger.error(f"Failed to hard-delete document {document_id}: {e}", exc_info=True) + else: + logger.warning( + f"Cleanup incomplete for {document_id}: vectors={vectors_deleted}, " + f"s3={s3_deleted}. TTL will auto-expire." + ) + + return all_succeeded + + +async def _delete_vectors_with_retries( + document_id: str, + chunk_count: Optional[int], + max_retries: int, + base_delay: float, +) -> bool: + """Delete vectors with exponential backoff + jitter retries. + + Uses deterministic deletion when chunk_count is available, + falls back to probe-and-scan otherwise. + """ + from apis.shared.embeddings.bedrock_embeddings import ( + delete_vectors_for_document, + delete_vectors_for_document_deterministic, + ) + + for attempt in range(max_retries): + try: + if chunk_count is not None: + await delete_vectors_for_document_deterministic(document_id, chunk_count) + else: + await delete_vectors_for_document(document_id) + return True + except Exception as e: + delay = base_delay * (2 ** attempt) + random.uniform(0, 0.1) + logger.warning( + f"Vector deletion attempt {attempt + 1}/{max_retries} failed for " + f"{document_id}: {e}, retrying in {delay:.2f}s" + ) + if attempt < max_retries - 1: + await asyncio.sleep(delay) + + logger.error(f"Vector deletion failed after {max_retries} attempts for {document_id}") + return False + + +async def _delete_s3_with_retries( + s3_key: str, + max_retries: int, + base_delay: float, +) -> bool: + """Delete S3 source file with exponential backoff + jitter retries.""" + bucket = _get_documents_bucket() + + for attempt in range(max_retries): + try: + loop = asyncio.get_event_loop() + s3_client = boto3.client("s3") + await loop.run_in_executor( + None, + lambda: s3_client.delete_object(Bucket=bucket, Key=s3_key), + ) + return True + except Exception as e: + delay = base_delay * (2 ** attempt) + random.uniform(0, 0.1) + logger.warning( + f"S3 deletion attempt {attempt + 1}/{max_retries} failed for " + f"{s3_key}: {e}, retrying in {delay:.2f}s" + ) + if attempt < max_retries - 1: + await asyncio.sleep(delay) + + logger.error(f"S3 deletion failed after {max_retries} attempts for {s3_key}") + return False + + +async def cleanup_assistant_documents( + assistant_id: str, + documents: list, + max_retries: int = 3, +) -> tuple[int, int]: + """ + Bulk cleanup for assistant deletion. Processes documents concurrently. + Returns (success_count, failure_count). + + Each document is cleaned up via cleanup_document_resources, which + hard-deletes the DynamoDB record on success. Never raises exceptions. + + Args: + assistant_id: The assistant whose documents are being cleaned up + documents: List of Document objects to clean up + max_retries: Maximum retry attempts per document per phase + + Returns: + Tuple of (success_count, failure_count) + """ + if not documents: + return (0, 0) + + try: + results = await asyncio.gather( + *( + cleanup_document_resources( + document_id=doc.document_id, + assistant_id=assistant_id, + s3_key=doc.s3_key, + chunk_count=doc.chunk_count, + max_retries=max_retries, + ) + for doc in documents + ), + return_exceptions=True, + ) + except Exception as e: + logger.error( + f"Unexpected error in bulk cleanup for assistant {assistant_id}: {e}", + exc_info=True, + ) + return (0, len(documents)) + + success_count = 0 + failure_count = 0 + for result in results: + if result is True: + success_count += 1 + else: + failure_count += 1 + + logger.info( + f"Bulk cleanup for assistant {assistant_id}: " + f"{success_count} succeeded, {failure_count} failed " + f"out of {len(documents)} documents" + ) + + return (success_count, failure_count) diff --git a/backend/src/apis/app_api/documents/services/document_service.py b/backend/src/apis/app_api/documents/services/document_service.py index 64a5cf88..fc645642 100644 --- a/backend/src/apis/app_api/documents/services/document_service.py +++ b/backend/src/apis/app_api/documents/services/document_service.py @@ -444,6 +444,10 @@ async def list_assistant_documents( except Exception as e: logger.error(f"Failed to auto-fail stale document {doc.document_id}: {e}") + # Skip documents in "deleting" status (soft-deleted, invisible to users) + if doc.status == "deleting": + continue + documents.append(doc) # Generate next_token from LastEvaluatedKey @@ -525,3 +529,218 @@ async def delete_document( except Exception as e: logger.error(f"Failed to delete document: {e}", exc_info=True) return False + + +async def soft_delete_document( + assistant_id: str, + document_id: str, + owner_id: str, + ttl_days: int = 7, +) -> Optional[Document]: + """ + Atomically mark a document as 'deleting' and set a TTL for auto-expiry. + + Returns the document (with chunk_count, s3_key) needed for cleanup. + Returns None if document not found or not owned by user. + Re-deleting a document already in "deleting" status is idempotent. + + Args: + assistant_id: Parent assistant identifier + document_id: Document identifier + owner_id: User identifier (for ownership verification) + ttl_days: Number of days until DynamoDB TTL auto-expires the record + + Returns: + Updated Document object if found and owned, None otherwise + """ + import time + + try: + import boto3 + from botocore.exceptions import ClientError + from apis.shared.assistants.service import get_assistant + except ImportError: + logger.error("boto3 is required for DynamoDB operations") + return None + + # Verify assistant ownership first + assistant = await get_assistant(assistant_id, owner_id) + if not assistant: + logger.warning(f"Access denied: assistant {assistant_id} not owned by user {owner_id}") + return None + + table_name = os.environ.get('DYNAMODB_ASSISTANTS_TABLE_NAME') + if not table_name: + logger.error("DYNAMODB_ASSISTANTS_TABLE_NAME environment variable not set") + return None + + dynamodb = boto3.resource('dynamodb') + table = dynamodb.Table(table_name) + + now = _get_current_timestamp() + ttl_value = int(time.time()) + ttl_days * 86400 + + try: + response = table.update_item( + Key={ + 'PK': f'AST#{assistant_id}', + 'SK': f'DOC#{document_id}' + }, + UpdateExpression='SET #status = :deleting, updatedAt = :now, #ttl = :ttl_value', + ExpressionAttributeNames={ + '#status': 'status', + '#ttl': 'ttl' + }, + ExpressionAttributeValues={ + ':deleting': 'deleting', + ':now': now, + ':ttl_value': ttl_value + }, + ConditionExpression='attribute_exists(PK)', + ReturnValues='ALL_NEW' + ) + + if 'Attributes' in response: + try: + return Document.model_validate(response['Attributes']) + except Exception as e: + logger.warning(f"Failed to parse document from DynamoDB response: {e}") + return None + + return None + + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', 'Unknown') + if error_code == 'ConditionalCheckFailedException': + logger.info(f"Document {document_id} not found for assistant {assistant_id}") + return None + logger.error(f"Failed to soft-delete document in DynamoDB: {error_code} - {e}") + return None + except Exception as e: + logger.error(f"Failed to soft-delete document: {e}", exc_info=True) + return None + + +async def hard_delete_document( + assistant_id: str, + document_id: str, +) -> bool: + """ + Unconditionally remove the DynamoDB record. Called after successful + cleanup of S3 and vectors. No ownership check needed — caller has + already verified ownership during soft-delete. + + Args: + assistant_id: Parent assistant identifier + document_id: Document identifier + + Returns: + True if deleted successfully, False otherwise + """ + try: + import boto3 + from botocore.exceptions import ClientError + except ImportError: + logger.error("boto3 is required for DynamoDB operations") + return False + + table_name = os.environ.get('DYNAMODB_ASSISTANTS_TABLE_NAME') + if not table_name: + logger.error("DYNAMODB_ASSISTANTS_TABLE_NAME environment variable not set") + return False + + dynamodb = boto3.resource('dynamodb') + table = dynamodb.Table(table_name) + + try: + table.delete_item( + Key={ + 'PK': f'AST#{assistant_id}', + 'SK': f'DOC#{document_id}' + } + ) + + logger.info(f"Hard-deleted document {document_id} for assistant {assistant_id}") + return True + + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', 'Unknown') + logger.error(f"Failed to hard-delete document from DynamoDB: {error_code} - {e}") + return False + except Exception as e: + logger.error(f"Failed to hard-delete document: {e}", exc_info=True) + return False + + +async def batch_soft_delete_documents( + assistant_id: str, + document_ids: list[str], + ttl_days: int = 7, +) -> int: + """ + Batch soft-delete multiple documents for an assistant. + Used during assistant deletion. Returns count of documents marked. + + No ownership check — caller (assistant delete endpoint) has already + verified ownership. + + Args: + assistant_id: Parent assistant identifier + document_ids: List of document IDs to soft-delete + ttl_days: Number of days until DynamoDB TTL auto-expires the records + + Returns: + Count of documents successfully marked as "deleting" + """ + import time + + try: + import boto3 + from botocore.exceptions import ClientError + except ImportError: + logger.error("boto3 is required for DynamoDB operations") + return 0 + + table_name = os.environ.get('DYNAMODB_ASSISTANTS_TABLE_NAME') + if not table_name: + logger.error("DYNAMODB_ASSISTANTS_TABLE_NAME environment variable not set") + return 0 + + dynamodb = boto3.resource('dynamodb') + table = dynamodb.Table(table_name) + + now = _get_current_timestamp() + ttl_value = int(time.time()) + ttl_days * 86400 + + marked_count = 0 + for document_id in document_ids: + try: + table.update_item( + Key={ + 'PK': f'AST#{assistant_id}', + 'SK': f'DOC#{document_id}' + }, + UpdateExpression='SET #status = :deleting, updatedAt = :now, #ttl = :ttl_value', + ExpressionAttributeNames={ + '#status': 'status', + '#ttl': 'ttl' + }, + ExpressionAttributeValues={ + ':deleting': 'deleting', + ':now': now, + ':ttl_value': ttl_value + }, + ConditionExpression='attribute_exists(PK)', + ) + marked_count += 1 + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', 'Unknown') + if error_code == 'ConditionalCheckFailedException': + logger.info(f"Document {document_id} not found for assistant {assistant_id}, skipping") + continue + logger.error(f"Failed to soft-delete document {document_id}: {error_code} - {e}") + except Exception as e: + logger.error(f"Unexpected error soft-deleting document {document_id}: {e}", exc_info=True) + + logger.info(f"Batch soft-deleted {marked_count}/{len(document_ids)} documents for assistant {assistant_id}") + return marked_count diff --git a/backend/src/apis/app_api/files/routes.py b/backend/src/apis/app_api/files/routes.py index 3e9476f5..f7e9fa54 100644 --- a/backend/src/apis/app_api/files/routes.py +++ b/backend/src/apis/app_api/files/routes.py @@ -116,7 +116,7 @@ async def complete_upload( Call this after successfully uploading the file using the pre-signed URL. This verifies the S3 object exists and updates the file status to 'ready'. """ - logger.info(f"User {user.email} completing upload {upload_id}") + logger.info("User completing upload") try: response = await service.complete_upload(user.user_id, upload_id) @@ -129,7 +129,7 @@ async def complete_upload( ) except FileUploadError as e: - logger.warning(f"Upload completion error for {upload_id}: {e}") + logger.warning("Upload completion error") raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=str(e), @@ -176,11 +176,7 @@ async def list_files( Optionally filter by session/conversation. Returns only files with 'ready' status. Supports sorting by date (default), size, or type. """ - logger.info( - f"User {user.email} listing files" - + (f" for session {session_id}" if session_id else "") - + f" (sort: {sort_by.value} {sort_order.value})" - ) + logger.info("User listing files") response = await service.list_user_files( user_id=user.user_id, @@ -206,7 +202,7 @@ async def delete_file( Use this when a user removes an attached file before sending, or when manually deleting from the file browser. """ - logger.info(f"User {user.email} deleting file {upload_id}") + logger.info("User deleting file") deleted = await service.delete_file(user.user_id, upload_id) diff --git a/backend/src/apis/app_api/files/service.py b/backend/src/apis/app_api/files/service.py index 53f23b40..eeec0935 100644 --- a/backend/src/apis/app_api/files/service.py +++ b/backend/src/apis/app_api/files/service.py @@ -8,7 +8,7 @@ import logging import uuid from datetime import datetime, timedelta, timezone -from typing import List, Optional, Tuple +from typing import List, Optional import boto3 from botocore.config import Config @@ -17,7 +17,6 @@ from apis.shared.files.models import ( FileMetadata, FileStatus, - UserFileQuota, PresignRequest, PresignResponse, CompleteUploadResponse, @@ -285,14 +284,14 @@ async def complete_upload( raise # Update status to READY - updated = await self.repository.update_file_status( + await self.repository.update_file_status( user_id, upload_id, FileStatus.READY ) # Increment quota await self.repository.increment_quota(user_id, file_meta.size_bytes) - logger.info(f"Completed upload {upload_id} for user {user_id}") + logger.info("Completed file upload") return CompleteUploadResponse( upload_id=upload_id, @@ -342,7 +341,7 @@ async def delete_file(self, user_id: str, upload_id: str) -> bool: Key=file_meta.s3_key, ) except ClientError as e: - logger.warning(f"Failed to delete S3 object for {upload_id}: {e}") + logger.warning("Failed to delete S3 object", exc_info=True) # Continue with metadata deletion even if S3 fails # Delete metadata @@ -352,7 +351,7 @@ async def delete_file(self, user_id: str, upload_id: str) -> bool: if deleted and file_meta.status == FileStatus.READY: await self.repository.decrement_quota(user_id, file_meta.size_bytes) - logger.info(f"Deleted file {upload_id} for user {user_id}") + logger.info("Deleted file") return True async def list_user_files( diff --git a/backend/src/apis/app_api/fine_tuning/dependencies.py b/backend/src/apis/app_api/fine_tuning/dependencies.py index 6d11627a..268894ca 100644 --- a/backend/src/apis/app_api/fine_tuning/dependencies.py +++ b/backend/src/apis/app_api/fine_tuning/dependencies.py @@ -1,18 +1,20 @@ """FastAPI dependencies for fine-tuning access control and services.""" +import os import logging from fastapi import Depends, HTTPException, status from apis.shared.auth import User from apis.shared.auth.dependencies import get_current_user from .repository import FineTuningAccessRepository, get_fine_tuning_access_repository -from .job_repository import FineTuningJobsRepository, get_fine_tuning_jobs_repository -from .s3_service import FineTuningS3Service, get_fine_tuning_s3_service -from .sagemaker_service import SageMakerService, get_sagemaker_service -from .inference_repository import InferenceRepository, get_inference_repository -from .script_packaging_service import ScriptPackagingService, get_script_packaging_service logger = logging.getLogger(__name__) +# Default monthly GPU-hour quota for users without an explicit grant. +# Set to 0 to revert to whitelist-only mode (original behaviour). +DEFAULT_MONTHLY_QUOTA_HOURS = float( + os.environ.get("FINE_TUNING_DEFAULT_QUOTA_HOURS", "0") +) + async def require_fine_tuning_access( user: User = Depends(get_current_user), @@ -20,19 +22,46 @@ async def require_fine_tuning_access( ) -> dict: """FastAPI dependency that enforces fine-tuning access. - Checks the user's email against the fine-tuning-access table. + Behaviour depends on ``FINE_TUNING_DEFAULT_QUOTA_HOURS``: + + * **0 (default / whitelist mode):** Only users with an explicit grant + in the ``fine-tuning-access`` table are allowed. Anyone else + receives a 403. + * **> 0 (open-access mode):** All authenticated users are allowed. + On first use, a grant is auto-created with the configured default + quota so that the existing quota-tracking machinery keeps working. + Also performs lazy quota period reset if a new month has started. - Returns the access grant dict if the user has access. - Raises HTTPException 403 if the user is not whitelisted. + Returns the access grant dict. """ grant = repo.check_and_reset_quota(user.email) - if grant is None: - logger.warning(f"Fine-tuning access denied for {user.email}") - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You do not have access to fine-tuning features. Contact an administrator to request access.", + if grant is not None: + return grant + + # No explicit grant exists for this user. + if DEFAULT_MONTHLY_QUOTA_HOURS > 0: + # Open-access mode: auto-provision a grant with the default quota. + logger.info( + f"Auto-provisioning fine-tuning access for {user.email} " + f"with {DEFAULT_MONTHLY_QUOTA_HOURS}h default quota" ) + try: + new_grant = repo.grant_access( + email=user.email, + granted_by="system-default", + monthly_quota_hours=DEFAULT_MONTHLY_QUOTA_HOURS, + ) + return new_grant + except ValueError: + # Race condition: another request created the grant first. + grant = repo.check_and_reset_quota(user.email) + if grant is not None: + return grant - return grant + logger.warning(f"Fine-tuning access denied for {user.email}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="You do not have access to fine-tuning features. Contact an administrator to request access.", + ) diff --git a/backend/src/apis/app_api/fine_tuning/inference_repository.py b/backend/src/apis/app_api/fine_tuning/inference_repository.py index 8f328eb6..94306dc4 100644 --- a/backend/src/apis/app_api/fine_tuning/inference_repository.py +++ b/backend/src/apis/app_api/fine_tuning/inference_repository.py @@ -125,7 +125,7 @@ def get_inference_job(self, user_id: str, job_id: str) -> Optional[dict]: return None return self._item_to_dict(item) except ClientError as e: - logger.error(f"Error getting inference job {job_id}: {e}") + logger.error("Error getting inference job") raise def list_user_inference_jobs(self, user_id: str) -> List[dict]: @@ -209,6 +209,61 @@ def list_all_inference_jobs(self, status_filter: Optional[str] = None) -> List[d logger.error(f"Error listing all inference jobs: {e}") raise + def query_jobs_by_status_and_date( + self, + status_value: str, + start_date: str, + end_date: str, + ) -> List[dict]: + """Query the StatusIndex GSI for inference jobs with a given status in a date range. + + Args: + status_value: Job status (e.g. "Completed", "Stopped"). + start_date: ISO date string (inclusive lower bound on createdAt). + end_date: ISO date string (inclusive upper bound on createdAt). + + Returns: + List of inference job dicts. + """ + try: + items: List[dict] = [] + response = self._table.query( + IndexName="StatusIndex", + KeyConditionExpression="#s = :status AND createdAt BETWEEN :start AND :end", + FilterExpression="job_type = :jt", + ExpressionAttributeNames={"#s": "status"}, + ExpressionAttributeValues={ + ":status": status_value, + ":start": start_date, + ":end": end_date, + ":jt": "inference", + }, + ScanIndexForward=False, + ) + items.extend(response.get("Items", [])) + + while "LastEvaluatedKey" in response: + response = self._table.query( + IndexName="StatusIndex", + KeyConditionExpression="#s = :status AND createdAt BETWEEN :start AND :end", + FilterExpression="job_type = :jt", + ExpressionAttributeNames={"#s": "status"}, + ExpressionAttributeValues={ + ":status": status_value, + ":start": start_date, + ":end": end_date, + ":jt": "inference", + }, + ScanIndexForward=False, + ExclusiveStartKey=response["LastEvaluatedKey"], + ) + items.extend(response.get("Items", [])) + + return [self._item_to_dict(item) for item in items] + except ClientError as e: + logger.error(f"Error querying inference jobs by status={status_value}: {e}") + raise + def update_inference_status( self, user_id: str, @@ -230,7 +285,7 @@ def update_inference_status( field_map = { "transform_start_time": ("transform_start_time", None), "transform_end_time": ("transform_end_time", None), - "billable_seconds": ("billable_seconds", lambda v: int(v)), + "billable_seconds": ("billable_seconds", int), "estimated_cost_usd": ("estimated_cost_usd", lambda v: Decimal(str(v))), "error_message": ("error_message", None), "result_s3_key": ("result_s3_key", None), diff --git a/backend/src/apis/app_api/fine_tuning/job_repository.py b/backend/src/apis/app_api/fine_tuning/job_repository.py index 3fff9c3f..7c428ea2 100644 --- a/backend/src/apis/app_api/fine_tuning/job_repository.py +++ b/backend/src/apis/app_api/fine_tuning/job_repository.py @@ -2,7 +2,6 @@ import os import logging -import uuid from datetime import datetime, timezone from decimal import Decimal from typing import Optional, List, Dict, Any @@ -125,7 +124,7 @@ def get_job(self, user_id: str, job_id: str) -> Optional[dict]: return None return self._item_to_dict(item) except ClientError as e: - logger.error(f"Error getting job {job_id}: {e}") + logger.error("Error getting job") raise def list_user_jobs(self, user_id: str) -> List[dict]: @@ -218,7 +217,7 @@ def update_job_status( field_map = { "training_start_time": ("training_start_time", None), "training_end_time": ("training_end_time", None), - "billable_seconds": ("billable_seconds", lambda v: int(v)), + "billable_seconds": ("billable_seconds", int), "estimated_cost_usd": ("estimated_cost_usd", lambda v: Decimal(str(v))), "error_message": ("error_message", None), "training_progress": ("training_progress", lambda v: Decimal(str(v))), @@ -250,6 +249,57 @@ def update_job_status( return None raise + def query_jobs_by_status_and_date( + self, + status_value: str, + start_date: str, + end_date: str, + ) -> List[dict]: + """Query the StatusIndex GSI for jobs with a given status in a date range. + + Args: + status_value: Job status (e.g. "Completed", "Stopped"). + start_date: ISO date string (inclusive lower bound on createdAt). + end_date: ISO date string (inclusive upper bound on createdAt). + + Returns: + List of job dicts. + """ + try: + items: List[dict] = [] + response = self._table.query( + IndexName="StatusIndex", + KeyConditionExpression="#s = :status AND createdAt BETWEEN :start AND :end", + ExpressionAttributeNames={"#s": "status"}, + ExpressionAttributeValues={ + ":status": status_value, + ":start": start_date, + ":end": end_date, + }, + ScanIndexForward=False, + ) + items.extend(response.get("Items", [])) + + while "LastEvaluatedKey" in response: + response = self._table.query( + IndexName="StatusIndex", + KeyConditionExpression="#s = :status AND createdAt BETWEEN :start AND :end", + ExpressionAttributeNames={"#s": "status"}, + ExpressionAttributeValues={ + ":status": status_value, + ":start": start_date, + ":end": end_date, + }, + ScanIndexForward=False, + ExclusiveStartKey=response["LastEvaluatedKey"], + ) + items.extend(response.get("Items", [])) + + return [self._item_to_dict(item) for item in items] + except ClientError as e: + logger.error(f"Error querying jobs by status={status_value}: {e}") + raise + def delete_job(self, user_id: str, job_id: str) -> bool: """Delete a job record. Returns False if not found.""" try: diff --git a/backend/src/apis/app_api/fine_tuning/routes.py b/backend/src/apis/app_api/fine_tuning/routes.py index 87dfe87c..4daf6b54 100644 --- a/backend/src/apis/app_api/fine_tuning/routes.py +++ b/backend/src/apis/app_api/fine_tuning/routes.py @@ -20,7 +20,6 @@ from .job_models import ( AVAILABLE_MODELS, MODEL_CATALOG, - AvailableModel, PresignRequest, PresignResponse, CreateJobRequest, @@ -59,17 +58,28 @@ async def check_access( This endpoint does NOT require fine-tuning access — it is used by the frontend to decide whether to show the fine-tuning UI. """ + from .dependencies import DEFAULT_MONTHLY_QUOTA_HOURS + grant = repo.check_and_reset_quota(user.email) - if grant is None: - return FineTuningAccessResponse(has_access=False) + if grant is not None: + return FineTuningAccessResponse( + has_access=True, + monthly_quota_hours=grant["monthly_quota_hours"], + current_month_usage_hours=grant["current_month_usage_hours"], + quota_period=grant["quota_period"], + ) - return FineTuningAccessResponse( - has_access=True, - monthly_quota_hours=grant["monthly_quota_hours"], - current_month_usage_hours=grant["current_month_usage_hours"], - quota_period=grant["quota_period"], - ) + # No explicit grant — check if open-access mode is enabled. + if DEFAULT_MONTHLY_QUOTA_HOURS > 0: + return FineTuningAccessResponse( + has_access=True, + monthly_quota_hours=DEFAULT_MONTHLY_QUOTA_HOURS, + current_month_usage_hours=0.0, + quota_period=None, + ) + + return FineTuningAccessResponse(has_access=False) # ========================================================================= @@ -720,7 +730,7 @@ async def create_inference_job( input_s3_uri = f"s3://{s3_service.bucket_name}/{request.input_s3_key}" # Create DynamoDB record - job = inf_repo.create_inference_job( + inf_repo.create_inference_job( user_id=user.user_id, email=user.email, job_id=job_id, diff --git a/backend/src/apis/app_api/fine_tuning/s3_service.py b/backend/src/apis/app_api/fine_tuning/s3_service.py index 22db7844..35de9898 100644 --- a/backend/src/apis/app_api/fine_tuning/s3_service.py +++ b/backend/src/apis/app_api/fine_tuning/s3_service.py @@ -3,7 +3,6 @@ import os import logging import uuid -from datetime import datetime, timedelta from typing import Optional, Tuple import boto3 diff --git a/backend/src/apis/app_api/fine_tuning/sagemaker_scripts/inference.py b/backend/src/apis/app_api/fine_tuning/sagemaker_scripts/inference.py index 75fcfc93..fda1ed3e 100644 --- a/backend/src/apis/app_api/fine_tuning/sagemaker_scripts/inference.py +++ b/backend/src/apis/app_api/fine_tuning/sagemaker_scripts/inference.py @@ -19,10 +19,6 @@ # available in the SageMaker DLC container. input_fn, output_fn, and # _sanitize_label must remain importable without torch/transformers so they # can be unit-tested locally. -try: - import numpy as np -except ImportError: # pragma: no cover - np = None logger = logging.getLogger(__name__) diff --git a/backend/src/apis/app_api/main.py b/backend/src/apis/app_api/main.py index 97f6efc3..a533c6cd 100644 --- a/backend/src/apis/app_api/main.py +++ b/backend/src/apis/app_api/main.py @@ -28,8 +28,6 @@ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) logger = logging.getLogger(__name__) - -# Lifespan event handler (replaces on_event) @asynccontextmanager async def lifespan(app: FastAPI): # Startup diff --git a/backend/src/apis/app_api/memory/routes.py b/backend/src/apis/app_api/memory/routes.py index 1ce4d54f..4d19bb15 100644 --- a/backend/src/apis/app_api/memory/routes.py +++ b/backend/src/apis/app_api/memory/routes.py @@ -46,7 +46,7 @@ async def get_memory_status( Requires JWT authentication. """ - logger.info(f"GET /memory/status - User: {current_user.user_id}") + logger.info("GET /memory/status") config_info = get_memory_config_info() @@ -81,7 +81,7 @@ async def get_preferences_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /memory/preferences - User: {user_id}, Query: {query}, TopK: {top_k}") + logger.info("GET /memory/preferences") if not is_memory_available(): raise HTTPException( @@ -146,7 +146,7 @@ async def get_facts_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /memory/facts - User: {user_id}, Query: {query}, TopK: {top_k}") + logger.info("GET /memory/facts") if not is_memory_available(): raise HTTPException( @@ -217,7 +217,7 @@ async def get_summaries_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /memory/summaries/{session_id} - User: {user_id}, Query: {query}, TopK: {top_k}") + logger.info("GET /memory/summaries") if not is_memory_available(): raise HTTPException( @@ -277,7 +277,7 @@ async def get_all_memories_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /memory - User: {user_id}, TopK: {top_k}") + logger.info("GET /memory") if not is_memory_available(): raise HTTPException( @@ -355,7 +355,7 @@ async def search_memories_endpoint( """ user_id = current_user.user_id - logger.info(f"POST /memory/search - User: {user_id}, Query: {request.query}") + logger.info("POST /memory/search") if not is_memory_available(): raise HTTPException( @@ -420,9 +420,7 @@ async def get_strategies_endpoint( Returns: StrategiesResponse with list of strategies """ - user_id = current_user.user_id - - logger.info(f"GET /memory/strategies - User: {user_id}") + logger.info("GET /memory/strategies") if not is_memory_available(): raise HTTPException( @@ -479,7 +477,7 @@ async def delete_memory_endpoint( """ user_id = current_user.user_id - logger.info(f"DELETE /memory/{record_id} - User: {user_id}") + logger.info("DELETE /memory") if not is_memory_available(): raise HTTPException( diff --git a/backend/src/apis/app_api/memory/services/memory_service.py b/backend/src/apis/app_api/memory/services/memory_service.py index 728f0ba2..4eee5040 100644 --- a/backend/src/apis/app_api/memory/services/memory_service.py +++ b/backend/src/apis/app_api/memory/services/memory_service.py @@ -148,7 +148,7 @@ async def get_user_preferences( search_query = query or "user preferences settings behavior" try: - logger.info(f"Retrieving preferences for user {user_id} from namespace {namespace}") + logger.info("Retrieving preferences from memory") memories = client.retrieve_memories( memory_id=config.memory_id, namespace=namespace, @@ -193,7 +193,7 @@ async def get_user_facts( search_query = query or "information facts knowledge" try: - logger.info(f"Retrieving facts for user {user_id} from namespace {namespace}") + logger.info("Retrieving facts from memory") memories = client.retrieve_memories( memory_id=config.memory_id, namespace=namespace, @@ -244,7 +244,7 @@ async def get_session_summaries( search_query = query or "conversation summary topics decisions" try: - logger.info(f"Retrieving summaries for user {user_id}, session {session_id} from namespace {namespace}") + logger.info("Retrieving session summaries from memory") memories = client.retrieve_memories( memory_id=config.memory_id, namespace=namespace, @@ -295,7 +295,7 @@ async def search_memories( return [] try: - logger.info(f"Searching memories for user {user_id} in namespace {namespace}") + logger.info("Searching memories") memories = client.retrieve_memories( memory_id=config.memory_id, namespace=namespace, @@ -323,7 +323,7 @@ async def get_memory_strategies() -> List[Dict[str, Any]]: config = load_memory_config() try: - logger.info(f"Getting memory strategies for memory {config.memory_id}") + logger.info("Getting memory strategies") strategies = client.get_memory_strategies(memory_id=config.memory_id) logger.info(f"Retrieved {len(strategies)} strategies") return strategies @@ -445,7 +445,7 @@ async def delete_memory( # Use boto3 directly since MemoryClient doesn't expose delete methods client = boto3.client('bedrock-agentcore', region_name=config.region) - logger.info(f"Attempting to delete memory record {record_id} from memory {config.memory_id}") + logger.info("Attempting to delete memory record") # Use batch_delete_memory_records API response = client.batch_delete_memory_records( @@ -458,16 +458,15 @@ async def delete_memory( failed = response.get('failedRecords', []) if successful: - logger.info(f"Successfully deleted memory record {record_id}") + logger.info("Successfully deleted memory record") return True elif failed: - error_msg = failed[0].get('errorMessage', 'Unknown error') - logger.warning(f"Failed to delete memory record {record_id}: {error_msg}") + logger.warning("Failed to delete memory record: %s", failed[0].get('errorMessage', 'Unknown error')) return False else: - logger.info(f"Delete request processed for {record_id}") + logger.info("Delete request processed for memory record") return True except Exception as e: - logger.error(f"Failed to delete memory {record_id}: {e}", exc_info=True) + logger.error("Failed to delete memory record", exc_info=True) return False diff --git a/backend/src/apis/app_api/messages/models.py b/backend/src/apis/app_api/messages/models.py index 4fbb45da..8269a233 100644 --- a/backend/src/apis/app_api/messages/models.py +++ b/backend/src/apis/app_api/messages/models.py @@ -117,6 +117,7 @@ class MessageMetadata(BaseModel): attribution: Optional[Attribution] = Field(None, description="Attribution for cost tracking and billing") cost: Optional[float] = Field(None, description="Total cost in USD for this message (computed from token usage and pricing)") citations: Optional[List[Dict[str, str]]] = Field(None, description="RAG citations for this message (stored as dicts for flexible JSON storage)") + display_text: Optional[str] = Field(None, alias="displayText", description="Original user message text before RAG augmentation (for clean UI display)") # Note: Feedback will be added in future implementation # feedback: Optional[Feedback] = None diff --git a/backend/src/apis/app_api/sessions/routes.py b/backend/src/apis/app_api/sessions/routes.py index 9c5d71fa..d540b797 100644 --- a/backend/src/apis/app_api/sessions/routes.py +++ b/backend/src/apis/app_api/sessions/routes.py @@ -21,6 +21,7 @@ from apis.shared.sessions.messages import get_messages from apis.shared.sessions.metadata import store_session_metadata, get_session_metadata, list_user_sessions from .services.session_service import SessionService +from apis.app_api.shares.service import get_share_service from apis.shared.auth.dependencies import get_current_user from apis.shared.auth.models import User @@ -56,7 +57,7 @@ async def list_user_sessions_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /sessions - User: {user_id}, Limit: {limit}, NextToken: {next_token}") + logger.info("GET /sessions - listing user sessions") try: # Retrieve sessions for the user with pagination @@ -80,7 +81,7 @@ async def list_user_sessions_endpoint( ) except Exception as e: - logger.error(f"Error listing user sessions: {e}", exc_info=True) + logger.error("Error listing user sessions", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to list user sessions: {str(e)}" @@ -112,7 +113,7 @@ async def get_session_metadata_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /sessions/{session_id}/metadata - User: {user_id}") + logger.info("GET /sessions/metadata - retrieving session metadata") try: # Retrieve session metadata @@ -135,7 +136,7 @@ async def get_session_metadata_endpoint( except HTTPException: raise except Exception as e: - logger.error(f"Error retrieving session metadata: {e}", exc_info=True) + logger.error("Error retrieving session metadata", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to retrieve session metadata: {str(e)}" @@ -170,7 +171,7 @@ async def update_session_metadata_endpoint( """ user_id = current_user.user_id - logger.info(f"PUT /sessions/{session_id}/metadata - User: {user_id}") + logger.info("PUT /sessions/metadata - updating session metadata") try: # Get existing metadata or create new @@ -280,7 +281,7 @@ async def update_session_metadata_endpoint( except HTTPException: raise except Exception as e: - logger.error(f"Error updating session metadata: {e}", exc_info=True) + logger.error("Error updating session metadata", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to update session metadata: {str(e)}" @@ -321,7 +322,7 @@ async def delete_session_endpoint( """ user_id = current_user.user_id - logger.info(f"DELETE /sessions/{session_id} - User: {user_id}") + logger.info("DELETE /sessions - deleting session") try: service = SessionService() @@ -352,14 +353,21 @@ async def delete_session_endpoint( session_id ) - logger.info(f"Successfully deleted session {session_id} for user {user_id}") + # 3. Delete share snapshots so share links stop working + share_service = get_share_service() + background_tasks.add_task( + share_service.delete_shares_for_session, + session_id + ) + + logger.info("Successfully deleted session") return Response(status_code=204) except HTTPException: raise except Exception as e: - logger.error(f"Error deleting session: {e}", exc_info=True) + logger.error("Error deleting session", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to delete session: {str(e)}" @@ -403,7 +411,7 @@ async def bulk_delete_sessions_endpoint( user_id = current_user.user_id session_ids = request.session_ids - logger.info(f"POST /sessions/bulk-delete - User: {user_id}, Count: {len(session_ids)}") + logger.info("POST /sessions/bulk-delete - bulk deleting sessions") results = [] deleted_count = 0 @@ -411,6 +419,7 @@ async def bulk_delete_sessions_endpoint( try: service = SessionService() + share_service = get_share_service() for session_id in session_ids: try: @@ -430,6 +439,10 @@ async def bulk_delete_sessions_endpoint( service.delete_session_files, session_id ) + background_tasks.add_task( + share_service.delete_shares_for_session, + session_id + ) results.append(BulkDeleteSessionResult( session_id=session_id, success=True, @@ -445,7 +458,7 @@ async def bulk_delete_sessions_endpoint( failed_count += 1 except Exception as e: - logger.warning(f"Failed to delete session {session_id}: {e}") + logger.warning("Failed to delete session in bulk operation") results.append(BulkDeleteSessionResult( session_id=session_id, success=False, @@ -453,10 +466,7 @@ async def bulk_delete_sessions_endpoint( )) failed_count += 1 - logger.info( - f"Bulk delete completed for user {user_id}: " - f"{deleted_count} deleted, {failed_count} failed" - ) + logger.info("Bulk delete completed") return BulkDeleteSessionsResponse( deleted_count=deleted_count, @@ -465,7 +475,7 @@ async def bulk_delete_sessions_endpoint( ) except Exception as e: - logger.error(f"Error in bulk delete sessions: {e}", exc_info=True) + logger.error("Error in bulk delete sessions", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to bulk delete sessions: {str(e)}" @@ -503,7 +513,7 @@ async def get_session_messages_endpoint( """ user_id = current_user.user_id - logger.info(f"GET /sessions/{session_id}/messages - User: {user_id}, Limit: {limit}, NextToken: {next_token}") + logger.info("GET /sessions/messages - retrieving session messages") try: # Retrieve messages from storage (cloud or local) with pagination @@ -514,24 +524,24 @@ async def get_session_messages_endpoint( next_token=next_token ) - logger.info(f"Successfully retrieved {len(response.messages)} messages for session {session_id}") + logger.info("Successfully retrieved session messages") return response except ValueError as e: - logger.error(f"Configuration error: {e}") + logger.error("Configuration error retrieving messages") raise HTTPException( status_code=500, detail=f"Server configuration error: {str(e)}" ) except FileNotFoundError as e: - logger.warning(f"Session not found: {session_id}") + logger.warning("Session not found") raise HTTPException( status_code=404, detail=f"Session not found: {session_id}" ) except Exception as e: - logger.error(f"Error retrieving messages: {e}", exc_info=True) + logger.error("Error retrieving messages", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to retrieve messages: {str(e)}" diff --git a/backend/src/apis/app_api/sessions/services/metadata.py b/backend/src/apis/app_api/sessions/services/metadata.py index c180db83..7c372a03 100644 --- a/backend/src/apis/app_api/sessions/services/metadata.py +++ b/backend/src/apis/app_api/sessions/services/metadata.py @@ -296,7 +296,7 @@ async def _update_cost_summary_async( date = now.strftime('%Y-%m-%d') # Use storage abstraction for the atomic update - from apis.app_api.storage.metadata_storage import get_metadata_storage + from apis.app_api.storage import get_metadata_storage storage = get_metadata_storage() await storage.update_user_cost_summary( @@ -685,11 +685,11 @@ async def get_all_message_metadata(session_id: str, user_id: str) -> Dict[str, A async def _get_all_message_metadata_cloud(session_id: str, user_id: str, table_name: str) -> Dict[str, Any]: """ - Retrieve all message metadata (cost records) for a session from DynamoDB + Retrieve all message metadata (cost records + display text) for a session from DynamoDB - Uses the SessionLookupIndex GSI to query cost records by session ID. - Cost records have SK pattern: C#{timestamp}#{uuid} - GSI pattern: GSI_PK=SESSION#{session_id}, GSI_SK=C#{timestamp} + Uses the SessionLookupIndex GSI to query records by session ID. + Cost records have SK pattern: C#{timestamp}#{uuid}, GSI_SK: C#{timestamp} + Display text records have SK pattern: D#{session_id}#{message_id}, GSI_SK: D#{message_id} Args: session_id: Session identifier @@ -708,18 +708,21 @@ async def _get_all_message_metadata_cloud(session_id: str, user_id: str, table_n logger.info(f"🔍 Querying cost records via GSI for session {session_id}") - # Query cost records for this session using GSI - # GSI_PK: SESSION#{session_id} - # GSI_SK: begins_with C# for cost records - response = table.query( + # Query cost records (C#) and display text records (D#) in parallel + cost_response = table.query( IndexName='SessionLookupIndex', KeyConditionExpression=Key('GSI_PK').eq(f'SESSION#{session_id}') & Key('GSI_SK').begins_with('C#') ) + display_response = table.query( + IndexName='SessionLookupIndex', + KeyConditionExpression=Key('GSI_PK').eq(f'SESSION#{session_id}') & Key('GSI_SK').begins_with('D#') + ) - items = response.get("Items", []) + items = cost_response.get("Items", []) + display_items = display_response.get("Items", []) metadata_index = {} - logger.info(f"📦 DynamoDB returned {len(items)} cost record items") + logger.info(f"📦 DynamoDB returned {len(items)} cost record items, {len(display_items)} display text items") for item in items: # Verify user ownership @@ -744,6 +747,22 @@ async def _get_all_message_metadata_cloud(session_id: str, user_id: str, table_n metadata_index[message_id] = item_float logger.info(f"📂 Retrieved {len(metadata_index)} cost records from DynamoDB") + + # Merge displayText from D# records into metadata index + for item in display_items: + if item.get('userId') != user_id: + continue + item_float = _convert_decimal_to_float(item) + message_id_raw = item_float.get("messageId") + message_id = str(int(message_id_raw)) if isinstance(message_id_raw, (int, float)) else str(message_id_raw) + display_text = item_float.get("displayText") + if display_text: + if message_id in metadata_index: + metadata_index[message_id]["displayText"] = display_text + else: + metadata_index[message_id] = {"displayText": display_text} + logger.debug(f"🔗 Merged displayText for user message {message_id}") + logger.info(f"📋 Metadata keys: {sorted(metadata_index.keys())}") return metadata_index diff --git a/backend/src/apis/app_api/sessions/services/session_service.py b/backend/src/apis/app_api/sessions/services/session_service.py index 9f5364cd..bb0a791b 100644 --- a/backend/src/apis/app_api/sessions/services/session_service.py +++ b/backend/src/apis/app_api/sessions/services/session_service.py @@ -178,14 +178,14 @@ async def get_session(self, user_id: str, session_id: str) -> Optional[SessionMe items = response.get('Items', []) if not items: - logger.info(f"Session not found: {session_id}") + logger.info("Session not found via GSI") return None item = _convert_decimal_to_float(items[0]) # Verify user ownership if item.get('userId') != user_id: - logger.warning(f"Session {session_id} belongs to different user") + logger.warning("Session belongs to different user") return None # Remove DynamoDB keys @@ -195,7 +195,7 @@ async def get_session(self, user_id: str, session_id: str) -> Optional[SessionMe return SessionMetadata.model_validate(item) except Exception as e: - logger.error(f"Failed to get session {session_id}: {e}", exc_info=True) + logger.error("Failed to get session", exc_info=True) return None async def delete_session(self, user_id: str, session_id: str) -> bool: @@ -224,11 +224,11 @@ async def delete_session(self, user_id: str, session_id: str) -> bool: # Get current session via GSI to find its SK session = await self.get_session(user_id, session_id) if not session: - logger.info(f"Session not found for deletion: {session_id}") + logger.info("Session not found for deletion") return False if session.deleted: - logger.info(f"Session {session_id} already deleted") + logger.info("Session already deleted") return True now = datetime.now(timezone.utc) @@ -272,7 +272,7 @@ async def delete_session(self, user_id: str, session_id: str) -> bool: Key={'PK': pk, 'SK': old_sk} ) - logger.info(f"Soft-deleted session {session_id} for user {user_id}") + logger.info("Soft-deleted session") # Note: AgentCore Memory cleanup is now handled via BackgroundTasks # in the route handler for true fire-and-forget behavior @@ -284,10 +284,10 @@ async def delete_session(self, user_id: str, session_id: str) -> bool: except self.dynamodb.meta.client.exceptions.TransactionCanceledException as e: # Transaction failed - likely the session was already deleted or modified - logger.warning(f"Transaction cancelled for session {session_id}: {e}") + logger.warning("Transaction cancelled for session deletion") return False except Exception as e: - logger.error(f"Failed to delete session {session_id}: {e}", exc_info=True) + logger.error("Failed to delete session", exc_info=True) return False def delete_agentcore_memory(self, session_id: str, user_id: str) -> None: @@ -361,14 +361,14 @@ def delete_agentcore_memory(self, session_id: str, user_id: str) -> None: except client.exceptions.ResourceNotFoundException: # Session doesn't exist in AgentCore Memory - nothing to delete - logger.debug(f"Session {session_id} not found in AgentCore Memory") + logger.debug("Session not found in AgentCore Memory") return except Exception as e: - logger.warning(f"Failed to list events for session {session_id}: {e}") + logger.warning("Failed to list events for session") return if not all_event_ids: - logger.debug(f"No events found for session {session_id} in AgentCore Memory") + logger.debug("No events found for session in AgentCore Memory") return # Delete events sequentially - this runs in background so no need @@ -384,18 +384,15 @@ def delete_agentcore_memory(self, session_id: str, user_id: str) -> None: ) deleted_count += 1 except Exception as e: - logger.warning(f"Failed to delete event {event_id}: {e}") + logger.warning("Failed to delete event from AgentCore Memory") - logger.info( - f"Deleted {deleted_count}/{len(all_event_ids)} events from AgentCore Memory " - f"for session {session_id}" - ) + logger.info("Deleted events from AgentCore Memory") except ImportError: logger.debug("AgentCore Memory SDK not available, skipping content deletion") except Exception as e: # Log but don't raise - content deletion failures shouldn't block session deletion - logger.error(f"Failed to delete AgentCore Memory content for session {session_id}: {e}") + logger.error("Failed to delete AgentCore Memory content for session") def delete_session_files(self, session_id: str) -> None: """ @@ -424,12 +421,10 @@ def delete_session_files(self, session_id: str) -> None: file_service.delete_session_files(session_id) ) if deleted_count > 0: - logger.info( - f"Background task deleted {deleted_count} files for session {session_id}" - ) + logger.info("Background task deleted files for session") finally: loop.close() except Exception as e: # Log but don't raise - file deletion failures shouldn't affect session deletion - logger.error(f"Failed to delete files for session {session_id}: {e}") + logger.error("Failed to delete files for session") diff --git a/backend/src/apis/app_api/sessions/tests/test_cache_savings.py b/backend/src/apis/app_api/sessions/tests/test_cache_savings.py index 9f514a15..519084db 100644 --- a/backend/src/apis/app_api/sessions/tests/test_cache_savings.py +++ b/backend/src/apis/app_api/sessions/tests/test_cache_savings.py @@ -1,7 +1,7 @@ """Unit tests for cache savings calculation in metadata service""" import pytest -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch from datetime import datetime, timezone from apis.app_api.messages.models import ( diff --git a/backend/src/apis/app_api/shares/service.py b/backend/src/apis/app_api/shares/service.py index 0d12c7ca..f9a12f91 100644 --- a/backend/src/apis/app_api/shares/service.py +++ b/backend/src/apis/app_api/shares/service.py @@ -4,7 +4,6 @@ conversation share snapshots. Supports multiple shares per session. """ -import json import logging import os import re @@ -197,6 +196,44 @@ async def revoke_share(self, share_id: str, user: User) -> None: self._table.delete_item(Key={"share_id": item["share_id"]}) logger.info(f"Revoked share {item['share_id']}") + async def delete_shares_for_session(self, session_id: str) -> int: + """Delete all share snapshots for a session. + + Called as a background task when the session owner deletes a conversation. + Removes share records so that existing share links stop working. + Exported conversations (copied into recipients' own sessions) are unaffected. + + Returns: + Number of shares deleted. + """ + if not self._enabled: + logger.debug("ShareService disabled - skipping share cleanup") + return 0 + + try: + items = self._find_shares_by_session(session_id) + if not items: + return 0 + + # Batch delete all shares for this session + with self._table.batch_writer() as batch: + for item in items: + batch.delete_item(Key={"share_id": item["share_id"]}) + + logger.info( + f"Deleted {len(items)} share(s) for session " + f"{self._sanitize_id(session_id)}" + ) + return len(items) + + except Exception: + logger.error( + "Failed to delete shares for session " + f"{self._sanitize_id(session_id)}", + exc_info=True, + ) + return 0 + async def get_shares_for_session(self, session_id: str, user_id: str) -> ShareListResponse: """Return all shares for a session owned by the user.""" self._ensure_enabled() @@ -280,8 +317,8 @@ async def _copy_messages_to_memory( ) -> int: """Write snapshot messages into AgentCore Memory for a new session. - Converts each MessageResponse dict back to Bedrock Converse format - and appends it via AgentCoreMemorySessionManager. + Converts each MessageResponse dict to SessionMessage format and + persists via create_message to the "default" namespace. Returns: Number of messages successfully written. @@ -298,6 +335,7 @@ async def _copy_messages_to_memory( from bedrock_agentcore.memory.integrations.strands.session_manager import ( AgentCoreMemorySessionManager, ) + from strands.types.session import SessionMessage except ImportError: logger.error("AgentCore Memory SDK not available — cannot copy messages") return 0 @@ -319,15 +357,18 @@ async def _copy_messages_to_memory( ) count = 0 - for msg_dict in snapshot_messages: + for idx, msg_dict in enumerate(snapshot_messages): converse_msg = self._snapshot_msg_to_converse(msg_dict) if converse_msg is None: continue try: - await asyncio.to_thread(mgr.append_message, converse_msg, None) + # Create SessionMessage with proper index for ordering + session_msg = SessionMessage.from_message(converse_msg, index=idx) + # Use create_message with "default" namespace (same as list_messages uses) + await asyncio.to_thread(mgr.create_message, session_id, "default", session_msg) count += 1 except Exception as e: - logger.warning(f"Failed to copy message {count}: {e}") + logger.warning(f"Failed to copy message {idx}: {e}") logger.info(f"Copied {count}/{len(snapshot_messages)} messages to AgentCore Memory") return count diff --git a/backend/src/apis/app_api/storage/__init__.py b/backend/src/apis/app_api/storage/__init__.py index 71bb7e66..26fb7d65 100644 --- a/backend/src/apis/app_api/storage/__init__.py +++ b/backend/src/apis/app_api/storage/__init__.py @@ -1,10 +1,34 @@ """Storage utilities for DynamoDB-backed persistence""" -from .metadata_storage import MetadataStorage, get_metadata_storage +import logging +import os + +from .metadata_storage import MetadataStorage from .dynamodb_storage import DynamoDBStorage +logger = logging.getLogger(__name__) + + +def get_metadata_storage() -> MetadataStorage: + """ + Get DynamoDB storage backend. + + Environment Variables: + DYNAMODB_SESSIONS_METADATA_TABLE_NAME: DynamoDB table name for message metadata + DYNAMODB_COST_SUMMARY_TABLE_NAME: DynamoDB table for cost summaries + """ + sessions_table = os.environ.get("DYNAMODB_SESSIONS_METADATA_TABLE_NAME") + cost_summary_table = os.environ.get("DYNAMODB_COST_SUMMARY_TABLE_NAME") + + logger.info( + "Using DynamoDB metadata storage - " + "sessions_table=%s, cost_summary_table=%s", + sessions_table, cost_summary_table, + ) + return DynamoDBStorage() + + __all__ = [ - # Metadata storage "MetadataStorage", "get_metadata_storage", "DynamoDBStorage", diff --git a/backend/src/apis/app_api/storage/metadata_storage.py b/backend/src/apis/app_api/storage/metadata_storage.py index 166080d4..df640649 100644 --- a/backend/src/apis/app_api/storage/metadata_storage.py +++ b/backend/src/apis/app_api/storage/metadata_storage.py @@ -151,24 +151,3 @@ async def get_user_messages_in_range( List of metadata dictionaries matching the date range """ pass - - -def get_metadata_storage() -> MetadataStorage: - """ - Get DynamoDB storage backend. - - Environment Variables: - DYNAMODB_SESSIONS_METADATA_TABLE_NAME: DynamoDB table name for message metadata - DYNAMODB_COST_SUMMARY_TABLE_NAME: DynamoDB table for cost summaries - """ - import os - - sessions_table = os.environ.get("DYNAMODB_SESSIONS_METADATA_TABLE_NAME") - cost_summary_table = os.environ.get("DYNAMODB_COST_SUMMARY_TABLE_NAME") - - logger.info( - f"Using DynamoDB metadata storage - " - f"sessions_table={sessions_table}, cost_summary_table={cost_summary_table}" - ) - from .dynamodb_storage import DynamoDBStorage - return DynamoDBStorage() diff --git a/backend/src/apis/app_api/tools/service.py b/backend/src/apis/app_api/tools/service.py index ad635d63..510eded4 100644 --- a/backend/src/apis/app_api/tools/service.py +++ b/backend/src/apis/app_api/tools/service.py @@ -6,8 +6,7 @@ """ import logging -from typing import Dict, List, Optional, Set -from datetime import datetime +from typing import Dict, List, Optional from apis.shared.auth.models import User from apis.shared.rbac.models import UserEffectivePermissions @@ -22,7 +21,6 @@ ToolProtocol, ToolStatus, ToolRoleAssignment, - AdminToolResponse, SyncResult, ) from .repository import ToolCatalogRepository, get_tool_catalog_repository diff --git a/backend/src/apis/app_api/users/routes.py b/backend/src/apis/app_api/users/routes.py index 78010e3b..35e8ad54 100644 --- a/backend/src/apis/app_api/users/routes.py +++ b/backend/src/apis/app_api/users/routes.py @@ -1,14 +1,13 @@ """Users API routes for non-admin user operations.""" from fastapi import APIRouter, HTTPException, Depends, Query -from typing import List import logging from apis.shared.auth.dependencies import get_current_user from apis.shared.auth.models import User from apis.shared.rbac.service import get_app_role_service from apis.shared.users.repository import UserRepository -from apis.shared.users.models import UserProfile, UserListItem, UserStatus +from apis.shared.users.models import UserStatus from .models import UserSearchResult, UserSearchResponse, UserPermissionsResponse logger = logging.getLogger(__name__) @@ -81,7 +80,7 @@ async def search_users( - 401 if not authenticated - 500 if server error """ - logger.info(f"GET /users/search - User: {current_user.user_id}, Query: {q}, Limit: {limit}") + logger.info("GET /users/search") if not user_repo.enabled: logger.debug("User repository not enabled - returning empty results") @@ -166,11 +165,11 @@ async def search_users( # Limit results results = results[:limit] - logger.info(f"Found {len(results)} users matching query '{q}'") + logger.info("User search completed") return UserSearchResponse(users=results) except Exception as e: - logger.error(f"Error searching users: {e}", exc_info=True) + logger.error("Error searching users", exc_info=True) raise HTTPException( status_code=500, detail=f"Failed to search users: {str(e)}" diff --git a/backend/src/apis/inference_api/chat/converse_routes.py b/backend/src/apis/inference_api/chat/converse_routes.py index 640a6730..ea7536fd 100644 --- a/backend/src/apis/inference_api/chat/converse_routes.py +++ b/backend/src/apis/inference_api/chat/converse_routes.py @@ -84,7 +84,7 @@ async def _record_cost(user_id: str, model_id: str, usage: dict, key_id: str) -> pricing = await create_pricing_snapshot(model_id) if pricing is None: logger.warning( - f"No pricing snapshot for model {model_id}; skipping cost recording" + "No pricing snapshot for model; skipping cost recording" ) return @@ -125,7 +125,7 @@ async def _record_cost(user_id: str, model_id: str, usage: dict, key_id: str) -> ) except Exception as exc: logger.error( - f"Failed to record cost for user {user_id}, model {model_id}: {exc}", + "Failed to record cost", exc_info=True, ) @@ -336,11 +336,7 @@ async def api_converse( """ # 1. Validate API key validated_key = await _validate_api_key(x_api_key) - logger.info( - f"api-converse request from user={validated_key.user_id} " - f"key={validated_key.key_id} model={request.model_id} " - f"messages={len(request.messages)} stream={request.stream}" - ) + logger.info("api-converse request received") # 1.5 Per-key rate limit (fail-open) from apis.shared.rate_limit import get_rate_limiter diff --git a/backend/src/apis/inference_api/chat/models.py b/backend/src/apis/inference_api/chat/models.py index d7a39a0b..f2b4a5d9 100644 --- a/backend/src/apis/inference_api/chat/models.py +++ b/backend/src/apis/inference_api/chat/models.py @@ -38,11 +38,6 @@ class InvocationRequest(BaseModel): rag_assistant_id: Optional[str] = None -# class InvocationRequest(BaseModel): -# """AgentCore Runtime standard request format""" -# input: InvocationInput - - class InvocationResponse(BaseModel): """AgentCore Runtime standard response format""" diff --git a/backend/src/apis/inference_api/chat/routes.py b/backend/src/apis/inference_api/chat/routes.py index 881fb74a..ae4d6893 100644 --- a/backend/src/apis/inference_api/chat/routes.py +++ b/backend/src/apis/inference_api/chat/routes.py @@ -10,22 +10,20 @@ import json import logging import os -from datetime import datetime, timezone from typing import AsyncGenerator, Union from fastapi import APIRouter, Depends, HTTPException, status from fastapi.responses import StreamingResponse from agents.main_agent.session.session_factory import SessionFactory -from apis.shared.auth.dependencies import get_current_user, get_current_user_trusted +from apis.shared.auth.dependencies import get_current_user_trusted from apis.shared.auth.models import User from apis.shared.errors import ( ConversationalErrorEvent, ErrorCode, build_conversational_error_event, - create_error_response, ) -from apis.shared.files.file_resolver import ResolvedFileContent, get_file_resolver +from apis.shared.files.file_resolver import get_file_resolver from apis.shared.models.managed_models import list_managed_models from apis.shared.quota import ( QuotaExceededEvent, @@ -92,15 +90,15 @@ async def _resolve_caching_enabled(model_id: str | None, explicit_caching_enable managed_models = await list_managed_models() for model in managed_models: if model.model_id == model_id: - logger.debug(f"Found managed model {model_id}, supports_caching={model.supports_caching}") + logger.debug("Found managed model, checking supports_caching") return model.supports_caching # Model not found in managed models - use default - logger.debug(f"Model {model_id} not found in managed models, using default caching behavior") + logger.debug("Model not found in managed models, using default caching behavior") return None except Exception as e: - logger.warning(f"Failed to look up managed model {model_id}: {e}") + logger.warning("Failed to look up managed model for caching") return None @@ -154,7 +152,7 @@ async def stream_conversational_message( # Skip persistence for preview sessions if is_preview_session(session_id): - logger.info(f"🔍 Preview session {session_id} - skipping message persistence") + logger.info("Preview session - skipping message persistence") return # Save messages to session for persistence @@ -177,10 +175,10 @@ async def stream_conversational_message( session_manager.base_manager.create_message(session_id, "default", user_session_msg) session_manager.base_manager.create_message(session_id, "default", assistant_session_msg) - logger.info(f"💾 Saved {stop_reason} messages to session {session_id}") + logger.info("Saved messages to session") except Exception as e: - logger.error(f"Failed to save {stop_reason} messages to session: {e}", exc_info=True) + logger.error("Failed to save messages to session", exc_info=True) # ============================================================ @@ -211,16 +209,16 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g input_data = request user_id = current_user.user_id auth_token = current_user.raw_token - logger.info(f"Invocation request - Session: {input_data.session_id}, User: {user_id}") - logger.info(f"Message: {input_data.message[:50]}...") + logger.info("Invocation request received") + logger.info("Message received") if input_data.enabled_tools: - logger.info(f"Enabled tools ({len(input_data.enabled_tools)}): {input_data.enabled_tools}") + logger.info(f"Enabled tools ({len(input_data.enabled_tools)})") if input_data.files: logger.info(f"Files attached: {len(input_data.files)} files") for file in input_data.files: - logger.info(f" - {file.filename} ({file.content_type})") + logger.info(" - File attached") if input_data.file_upload_ids: logger.info(f"File upload IDs: {len(input_data.file_upload_ids)} IDs to resolve") @@ -241,7 +239,7 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g files_to_send.append(FileContent(filename=rf.filename, content_type=rf.content_type, bytes=rf.bytes)) logger.info(f"Resolved {len(resolved_files)} files from upload IDs") except Exception as e: - logger.warning(f"Failed to resolve file upload IDs: {e}") + logger.warning("Failed to resolve file upload IDs") # Continue without files rather than failing the request # Check quota if enforcement is enabled @@ -254,7 +252,7 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g if not quota_result.allowed: # Quota blocked - stream as SSE instead of 429 for better UX - logger.warning(f"Quota blocked for user {user_id}: {quota_result.message}") + logger.warning("Quota blocked for user") if quota_result.tier is None: # No quota tier configured for this user quota_exceeded_event = build_no_quota_configured_event(quota_result) @@ -265,11 +263,11 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g # Check for warning level quota_warning_event = build_quota_warning_event(quota_result) if quota_warning_event: - logger.info(f"Quota warning for user {user_id}: {quota_result.warning_level}") + logger.info("Quota warning for user") except Exception as e: # Log error but don't block request - fail open for quota errors - logger.error(f"Error checking quota for user {user_id}: {e}", exc_info=True) + logger.error("Error checking quota for user", exc_info=True) # If quota exceeded, stream the quota exceeded message instead of agent response if quota_exceeded_event: @@ -303,7 +301,7 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g system_prompt = input_data.system_prompt # Start with provided system prompt logger.info( - f"Invocation request - Session: {input_data.session_id}, Assistant ID: {input_data.rag_assistant_id}, Message: {input_data.message[:50]}..." + "Invocation request - processing with assistant context" ) if input_data.rag_assistant_id: @@ -326,8 +324,8 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g SessionPreferences, ) - logger.info(f"🔍 DEBUG: Assistant RAG requested - Assistant: {input_data.rag_assistant_id}, Session: {input_data.session_id}") - logger.info(f"🔍 DEBUG: User ID: {user_id}, User Email: {current_user.email}") + logger.info("Assistant RAG requested") + logger.info("Processing for authenticated user") # 1. Check if session already has an assistant attached # If it does, verify it's the same assistant (can't change assistants mid-session) @@ -342,13 +340,13 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g # Session already has an assistant - verify it's the same one if existing_assistant_id != input_data.rag_assistant_id: logger.warning( - f"Attempted to change assistant from {existing_assistant_id} to {input_data.rag_assistant_id} in session {input_data.session_id}" + "Attempted to change assistant mid-session" ) raise HTTPException( status_code=400, detail="Cannot change assistants mid-session. Start a new session to use a different assistant." ) # Same assistant - allow it to continue - logger.info(f"Continuing with existing assistant {input_data.rag_assistant_id} in session {input_data.session_id}") + logger.info("Continuing with existing assistant in session") else: # No assistant attached - verify session has no messages (can only attach to new sessions) messages_response = await get_messages( @@ -358,7 +356,7 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g ) if messages_response.messages and len(messages_response.messages) > 0: logger.warning( - f"Attempted to attach assistant {input_data.rag_assistant_id} to session {input_data.session_id} with existing messages" + "Attempted to attach assistant to session with existing messages" ) raise HTTPException( status_code=400, detail="Assistants can only be attached to new sessions, start a new session to chat with this assistant" @@ -366,74 +364,74 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g except HTTPException: raise except Exception as e: - logger.error(f"Error checking session state: {e}", exc_info=True) + logger.error("Error checking session state", exc_info=True) # Continue anyway - better to allow than block on error else: - logger.info(f"🔍 Preview session - skipping session state validation") + logger.info("Preview session - skipping session state validation") # 2. Load assistant with access check - logger.info(f"🔍 DEBUG: Loading assistant {input_data.rag_assistant_id} with access check...") + logger.info("Loading assistant with access check...") assistant = await get_assistant_with_access_check(assistant_id=input_data.rag_assistant_id, user_id=user_id, user_email=current_user.email) if not assistant: - logger.warning(f"🔍 DEBUG: get_assistant_with_access_check returned None for {input_data.rag_assistant_id}") + logger.warning("get_assistant_with_access_check returned None") # Check if assistant exists at all to provide better error message from apis.shared.assistants.service import assistant_exists exists = await assistant_exists(input_data.rag_assistant_id) if not exists: - logger.warning(f"❌ Assistant {input_data.rag_assistant_id} does not exist (404)") + logger.warning("Assistant does not exist (404)") raise HTTPException(status_code=404, detail=f"Assistant not found: {input_data.rag_assistant_id}") else: - logger.warning(f"🔒 Access denied: user {user_id} ({current_user.email}) cannot access assistant {input_data.rag_assistant_id} (403)") + logger.warning("Access denied to assistant (403)") raise HTTPException(status_code=403, detail=f"Access denied: You do not have permission to access this assistant") # Log assistant details for debugging - logger.info(f"🔍 DEBUG: Assistant loaded successfully!") - logger.info(f"🔍 DEBUG: Assistant ID: {assistant.assistant_id}") - logger.info(f"🔍 DEBUG: Assistant Name: {assistant.name}") - logger.info(f"🔍 DEBUG: Assistant Owner ID: {assistant.owner_id}") - logger.info(f"🔍 DEBUG: Assistant Visibility: {assistant.visibility}") - logger.info(f"🔍 DEBUG: Assistant Instructions: {assistant.instructions[:200] if assistant.instructions else 'NONE'}...") - logger.info(f"🔍 DEBUG: Assistant Instructions Length: {len(assistant.instructions) if assistant.instructions else 0}") - logger.info(f"🔍 DEBUG: Assistant Vector Index ID: {assistant.vector_index_id}") + logger.info("Assistant loaded successfully!") + logger.info("Assistant details retrieved") + logger.info("Assistant name retrieved") + logger.info("Assistant owner retrieved") + logger.info("Assistant visibility retrieved") + logger.info("Assistant instructions retrieved") + logger.info("Assistant instructions length retrieved") + logger.info("Assistant vector index retrieved") # Mark as viewed if this is a shared assistant (not owned) if assistant.owner_id != user_id: await mark_share_as_interacted(assistant_id=input_data.rag_assistant_id, user_email=current_user.email) # 3. Search assistant knowledge base - logger.info(f"🔍 DEBUG: Starting knowledge base search for assistant {input_data.rag_assistant_id}...") + logger.info("Starting knowledge base search for assistant...") try: - logger.info(f"🔍 DEBUG: Searching knowledge base for assistant {input_data.rag_assistant_id} with query: {input_data.message[:100]}...") + logger.info("Searching knowledge base for assistant...") context_chunks = await search_assistant_knowledgebase_with_formatting( assistant_id=input_data.rag_assistant_id, query=input_data.message, top_k=5 ) - logger.info(f"🔍 DEBUG: Knowledge base search returned {len(context_chunks) if context_chunks else 0} chunks") + logger.info(f"Knowledge base search returned {len(context_chunks) if context_chunks else 0} chunks") if context_chunks: for i, chunk in enumerate(context_chunks): - logger.info(f"🔍 DEBUG: Chunk {i + 1}: {chunk.get('text', '')[:100]}...") - logger.info(f"🔍 DEBUG: Chunk {i + 1} metadata: {chunk.get('metadata', {})}") + logger.info(f"Chunk {i + 1} retrieved") + logger.info(f"Chunk {i + 1} metadata retrieved") # 4. Augment message with context if context_chunks: augmented_message = augment_prompt_with_context(user_message=input_data.message, context_chunks=context_chunks) logger.info( - f"✅ Augmented message with {len(context_chunks)} context chunks. Original length: {len(input_data.message)}, Augmented length: {len(augmented_message)}" + f"Augmented message with {len(context_chunks)} context chunks" ) - logger.info(f"🔍 DEBUG: Augmented message preview: {augmented_message[:500]}...") + logger.info("Augmented message preview available") else: - logger.info(f"⚠️ No context chunks found for assistant {input_data.rag_assistant_id} - using original message without augmentation") + logger.info("No context chunks found for assistant - using original message without augmentation") except Exception as e: - logger.error(f"❌ Error searching assistant knowledge base: {e}", exc_info=True) - logger.error(f"🔍 DEBUG: Exception type: {type(e).__name__}") + logger.error("Error searching assistant knowledge base", exc_info=True) + logger.error(f"Exception type: {type(e).__name__}") # Continue without RAG context rather than failing # 5. Append assistant's instructions to the base system prompt (don't replace) # For preview sessions, prefer the system_prompt from the request (live form edits) # over the saved assistant instructions, so users can test changes before saving. - logger.info(f"🔍 DEBUG: Checking assistant instructions... assistant.instructions is {'truthy' if assistant.instructions else 'falsy'}") + logger.info("Checking assistant instructions...") preview_instructions_override = input_data.system_prompt if is_preview_session(input_data.session_id) and input_data.system_prompt else None effective_instructions = preview_instructions_override or assistant.instructions @@ -449,23 +447,23 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g system_prompt = f"{base_prompt}\n\n## Assistant-Specific Instructions\n\n{effective_instructions}" if preview_instructions_override: logger.info( - f"✅ Using live preview instructions override (length: {len(effective_instructions)})" + "Using live preview instructions override" ) else: logger.info( - f"✅ Appended assistant instructions to base system prompt (base: {len(base_prompt)}, assistant: {len(effective_instructions)}, total: {len(system_prompt)})" + "Appended assistant instructions to base system prompt" ) - logger.info(f"🔍 DEBUG: Final system prompt preview (last 500 chars): ...{system_prompt[-500:]}") + logger.info("Final system prompt built") else: # No assistant instructions - use base prompt if no system_prompt provided - logger.warning(f"🔍 DEBUG: No instructions found on assistant {input_data.rag_assistant_id}!") + logger.warning("No instructions found on assistant!") if not system_prompt: from agents.main_agent.core.system_prompt_builder import SystemPromptBuilder base_prompt_builder = SystemPromptBuilder() system_prompt = base_prompt_builder.build(include_date=True) logger.info( - f"⚠️ Assistant {input_data.rag_assistant_id} has no instructions - using {'provided' if system_prompt else 'default'} system prompt" + "Assistant has no instructions - using fallback system prompt" ) # 6. Save assistant_id to session preferences (persist for future loads) @@ -477,7 +475,6 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g # Update existing metadata with assistant_id in preferences prefs_dict = existing_metadata.preferences.model_dump(by_alias=False) if existing_metadata.preferences else {} prefs_dict["assistant_id"] = input_data.rag_assistant_id - preferences = SessionPreferences(**prefs_dict) updated_metadata = existing_metadata.model_copy(update={"assistant_id": input_data.rag_assistant_id}) @@ -504,12 +501,12 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g ) await store_session_metadata(session_id=input_data.session_id, user_id=user_id, session_metadata=updated_metadata) - logger.info(f"💾 Saved assistant_id {input_data.rag_assistant_id} to session {input_data.session_id} preferences") + logger.info("Saved assistant_id to session preferences") except Exception as e: - logger.error(f"Failed to save assistant_id to session preferences: {e}", exc_info=True) + logger.error("Failed to save assistant_id to session preferences", exc_info=True) # Continue - not critical if metadata save fails else: - logger.info(f"🔍 Preview session - skipping assistant_id persistence") + logger.info("Preview session - skipping assistant_id persistence") try: # Resolve caching_enabled based on managed model configuration @@ -517,7 +514,7 @@ async def invocations(request: InvocationRequest, current_user: User = Depends(g caching_enabled = await _resolve_caching_enabled(model_id=input_data.model_id, explicit_caching_enabled=input_data.caching_enabled) if caching_enabled is False: - logger.info(f"Prompt caching disabled for model {input_data.model_id}") + logger.info("Prompt caching disabled for model") # Get agent instance with user-specific configuration # AgentCore Memory tracks preferences across sessions per user_id @@ -565,11 +562,23 @@ async def stream_with_quota_warning() -> AsyncGenerator[str, None]: # Then yield all agent stream events # Use augmented message if assistant RAG was applied # Use resolved files (from S3) merged with any direct file content + # + # Always store the original user message as displayText when the prompt + # will be modified before reaching the model. This happens when: + # 1. RAG augmentation prepends context chunks to the message + # 2. File attachments cause PromptBuilder to rewrite into ContentBlocks + # The original text becomes the single source of truth for UI display, + # while the full augmented prompt stays in AgentCore Memory for the LLM. + message_will_be_modified = ( + augmented_message != input_data.message # RAG augmentation + or bool(files_to_send) # File attachments + ) async for event in agent.stream_async( - augmented_message, # Use augmented message if assistant RAG was applied + augmented_message, session_id=input_data.session_id, files=files_to_send if files_to_send else None, - citations=citations_for_storage if citations_for_storage else None, # Pass citations for persistence + citations=citations_for_storage if citations_for_storage else None, + original_message=input_data.message if message_will_be_modified else None, ): yield event @@ -586,7 +595,7 @@ async def stream_with_quota_warning() -> AsyncGenerator[str, None]: raise except Exception as e: # Stream error as a conversational assistant message for better UX - logger.error(f"Error in invocations: {e}", exc_info=True) + logger.error("Error in invocations", exc_info=True) error_event = build_conversational_error_event(code=ErrorCode.AGENT_ERROR, error=e, session_id=input_data.session_id, recoverable=True) diff --git a/backend/src/apis/inference_api/chat/service.py b/backend/src/apis/inference_api/chat/service.py index d1646ef4..11006fd2 100644 --- a/backend/src/apis/inference_api/chat/service.py +++ b/backend/src/apis/inference_api/chat/service.py @@ -5,10 +5,8 @@ import logging import hashlib -import json import os from typing import Optional, List, Tuple -from functools import lru_cache from datetime import datetime, timezone import boto3 @@ -144,11 +142,11 @@ def get_agent( # Check cache if cache_key in _agent_cache: - logger.debug(f"✅ Agent cache hit for session {session_id}") + logger.debug("✅ Agent cache hit") return _agent_cache[cache_key] # Cache miss - create new agent - logger.debug(f"⚠️ Agent cache miss for session {session_id} - creating new instance") + logger.debug("⚠️ Agent cache miss - creating new instance") # Create agent with multi-provider support agent = MainAgent( @@ -172,7 +170,7 @@ def get_agent( logger.debug(f"🗑️ Evicted oldest agent from cache (size={_CACHE_MAX_SIZE})") _agent_cache[cache_key] = agent - logger.debug(f"💾 Cached agent for session {session_id} (cache size={len(_agent_cache)})") + logger.debug("💾 Cached agent") return agent diff --git a/backend/src/apis/inference_api/main.py b/backend/src/apis/inference_api/main.py index 65fe8b5c..0e3f5018 100644 --- a/backend/src/apis/inference_api/main.py +++ b/backend/src/apis/inference_api/main.py @@ -13,15 +13,18 @@ from dotenv import load_dotenv import os +import logging as _logging + # Load .env file from backend/src directory (parent of apis/) # override=True so .env values win over shell env vars during local development. # In production (containers), there is no .env file, so container-injected env vars are used directly. env_path = Path(__file__).parent.parent.parent / '.env' +_startup_logger = _logging.getLogger(__name__) if env_path.exists(): load_dotenv(dotenv_path=env_path, override=True) - print(f"Loaded environment variables from: {env_path}") + _startup_logger.info("Loaded environment variables from: %s", env_path) else: - print(f"Warning: .env file not found at {env_path}") + _startup_logger.warning(".env file not found at %s", env_path) from fastapi import FastAPI from fastapi.staticfiles import StaticFiles diff --git a/backend/src/apis/shared/assistants/__init__.py b/backend/src/apis/shared/assistants/__init__.py index 17f63c7b..bd5bff83 100644 --- a/backend/src/apis/shared/assistants/__init__.py +++ b/backend/src/apis/shared/assistants/__init__.py @@ -17,7 +17,6 @@ UpdateAssistantRequest, ) from .service import ( - archive_assistant, assistant_exists, check_share_access, create_assistant, @@ -51,7 +50,6 @@ "AssistantSharesResponse", "UpdateAssistantRequest", # Service functions - "archive_assistant", "assistant_exists", "check_share_access", "create_assistant", diff --git a/backend/src/apis/shared/assistants/models.py b/backend/src/apis/shared/assistants/models.py index ff78c14e..94af5d91 100644 --- a/backend/src/apis/shared/assistants/models.py +++ b/backend/src/apis/shared/assistants/models.py @@ -24,7 +24,7 @@ class Assistant(BaseModel): usage_count: int = Field(0, alias="usageCount", description="Number of times used") created_at: str = Field(..., alias="createdAt", description="ISO 8601 timestamp of creation") updated_at: str = Field(..., alias="updatedAt", description="ISO 8601 timestamp of last update") - status: Literal["DRAFT", "COMPLETE", "ARCHIVED"] = Field(..., description="Assistant lifecycle status") + status: Literal["DRAFT", "COMPLETE"] = Field(..., description="Assistant lifecycle status") image_url: Optional[str] = Field(None, alias="imageUrl", description="URL to assistant avatar/image") @@ -63,7 +63,7 @@ class UpdateAssistantRequest(BaseModel): tags: Optional[List[str]] = Field(None, description="Search keywords") starters: Optional[List[str]] = Field(None, description="Conversation starter prompts") emoji: Optional[str] = Field(None, description="Single emoji character for assistant avatar") - status: Optional[Literal["DRAFT", "COMPLETE", "ARCHIVED"]] = Field(None, description="Lifecycle status") + status: Optional[Literal["DRAFT", "COMPLETE"]] = Field(None, description="Lifecycle status") image_url: Optional[str] = Field(None, alias="imageUrl", description="URL to assistant avatar/image") @@ -85,7 +85,7 @@ class AssistantResponse(BaseModel): usage_count: int = Field(..., alias="usageCount", description="Usage count") created_at: str = Field(..., alias="createdAt", description="ISO 8601 creation timestamp") updated_at: str = Field(..., alias="updatedAt", description="ISO 8601 update timestamp") - status: Literal["DRAFT", "COMPLETE", "ARCHIVED"] = Field(..., description="Lifecycle status") + status: Literal["DRAFT", "COMPLETE"] = Field(..., description="Lifecycle status") image_url: Optional[str] = Field(None, alias="imageUrl", description="URL to assistant avatar/image") # Share metadata (only present for shared assistants) diff --git a/backend/src/apis/shared/assistants/rag_service.py b/backend/src/apis/shared/assistants/rag_service.py index a089ba8a..6c3d76e4 100644 --- a/backend/src/apis/shared/assistants/rag_service.py +++ b/backend/src/apis/shared/assistants/rag_service.py @@ -5,7 +5,10 @@ """ import logging -from typing import Any, Dict, List +import os +from typing import Any, Dict, List, Set + +import boto3 from apis.shared.embeddings.bedrock_embeddings import search_assistant_knowledgebase @@ -39,6 +42,9 @@ async def search_assistant_knowledgebase_with_formatting(assistant_id: str, quer logger.info(f"No vectors found for assistant {assistant_id} with query: {query[:50]}...") return [] + # Filter out chunks from documents that are not in "complete" status + vectors = _filter_vectors_by_document_status(vectors, assistant_id) + # Format results - return document_id for on-demand download URL generation formatted_results = [] for vector in vectors[:top_k]: @@ -62,6 +68,74 @@ async def search_assistant_knowledgebase_with_formatting(assistant_id: str, quer return [] +def _filter_vectors_by_document_status(vectors: List[Dict[str, Any]], assistant_id: str) -> List[Dict[str, Any]]: + """ + Filter vector results to only include chunks from documents with status='complete'. + + Extracts unique document_ids from vector metadata, looks up each document's status + in DynamoDB, and removes chunks from documents that are not 'complete' or don't exist. + + On any DynamoDB failure, falls back to returning unfiltered results (graceful degradation). + + Args: + vectors: List of vector results from S3 Vectors search + assistant_id: Assistant identifier for DynamoDB key construction + + Returns: + Filtered list of vectors from complete documents only + """ + # Extract unique document_ids from vector metadata + doc_ids: Set[str] = set() + for vector in vectors: + doc_id = vector.get("metadata", {}).get("document_id") + if doc_id: + doc_ids.add(doc_id) + + if not doc_ids: + return vectors + + # Look up document status in DynamoDB + valid_doc_ids: Set[str] = set() + try: + table_name = os.environ.get("DYNAMODB_ASSISTANTS_TABLE_NAME") + if table_name: + region = os.environ.get("AWS_REGION", "us-west-2") + dynamodb = boto3.resource("dynamodb", region_name=region) + table = dynamodb.Table(table_name) + for doc_id in doc_ids: + try: + response = table.get_item( + Key={"PK": f"AST#{assistant_id}", "SK": f"DOC#{doc_id}"} + ) + item = response.get("Item") + if item and item.get("status") == "complete": + valid_doc_ids.add(doc_id) + else: + logger.info( + f"Filtering out doc {doc_id}: " + f"status={item.get('status') if item else 'NOT_FOUND'}" + ) + except Exception as e: + logger.warning(f"Failed to look up document {doc_id}: {e}") + # Skip individual lookup failures + else: + # No table configured — fall back to unfiltered + logger.warning("DYNAMODB_ASSISTANTS_TABLE_NAME not configured, returning unfiltered results") + valid_doc_ids = doc_ids + except Exception as e: + logger.warning(f"DynamoDB lookup failed, returning unfiltered results: {e}") + valid_doc_ids = doc_ids # Graceful degradation + + # Filter vectors to only include chunks from valid documents + filtered = [v for v in vectors if v.get("metadata", {}).get("document_id") in valid_doc_ids] + if len(filtered) < len(vectors): + logger.info( + f"Document status filter: {len(vectors)} vectors → {len(filtered)} " + f"(removed {len(vectors) - len(filtered)} from non-complete docs)" + ) + return filtered + + def augment_prompt_with_context(user_message: str, context_chunks: List[Dict[str, Any]], max_context_length: int = 2000) -> str: """ Augment user message with retrieved context chunks diff --git a/backend/src/apis/shared/assistants/service.py b/backend/src/apis/shared/assistants/service.py index 5eb1c7f2..79a5b844 100644 --- a/backend/src/apis/shared/assistants/service.py +++ b/backend/src/apis/shared/assistants/service.py @@ -559,7 +559,6 @@ async def list_user_assistants( owner_id: str, limit: Optional[int] = None, next_token: Optional[str] = None, - include_archived: bool = False, include_drafts: bool = False, include_public: bool = False, ) -> Tuple[List[Assistant], Optional[str]]: @@ -570,7 +569,6 @@ async def list_user_assistants( owner_id: User identifier limit: Maximum number of assistants to return (optional) next_token: Pagination token for retrieving next page (optional) - include_archived: Whether to include archived assistants include_drafts: Whether to include draft assistants include_public: Deprecated/Ignored. Public assistants are no longer listed in a general index. @@ -590,7 +588,6 @@ async def list_user_assistants( table_name=assistants_table, limit=limit, next_token=next_token, - include_archived=include_archived, include_drafts=include_drafts, include_public=include_public, ) @@ -652,7 +649,6 @@ async def _list_user_assistants_cloud( table_name: str, limit: Optional[int] = None, next_token: Optional[str] = None, - include_archived: bool = False, include_drafts: bool = False, include_public: bool = False, ) -> Tuple[List[Assistant], Optional[str]]: @@ -664,7 +660,6 @@ async def _list_user_assistants_cloud( table_name: DynamoDB table name limit: Maximum number of assistants to return (optional) next_token: Pagination token (optional) - include_archived: Whether to include archived assistants include_drafts: Whether to include draft assistants include_public: Ignored. @@ -683,9 +678,6 @@ async def _list_user_assistants_cloud( filter_parts = [] expression_attribute_values = {} - if not include_archived: - filter_parts.append("#status <> :archived") - expression_attribute_values[":archived"] = "ARCHIVED" if not include_drafts: filter_parts.append("#status <> :draft") expression_attribute_values[":draft"] = "DRAFT" @@ -757,20 +749,6 @@ async def _list_user_assistants_cloud( return [], None -async def archive_assistant(assistant_id: str, owner_id: str) -> Optional[Assistant]: - """ - Archive an assistant (soft delete - sets status to ARCHIVED) - - Args: - assistant_id: Assistant identifier - owner_id: User identifier (for ownership verification) - - Returns: - Updated Assistant object with status=ARCHIVED, None if not found - """ - return await update_assistant(assistant_id=assistant_id, owner_id=owner_id, status="ARCHIVED") - - async def delete_assistant(assistant_id: str, owner_id: str) -> bool: """ Delete an assistant permanently (hard delete) diff --git a/backend/src/apis/shared/auth/rbac.py b/backend/src/apis/shared/auth/rbac.py index dbd92a93..bcdcba27 100644 --- a/backend/src/apis/shared/auth/rbac.py +++ b/backend/src/apis/shared/auth/rbac.py @@ -1,6 +1,6 @@ """Role-based access control utilities.""" -from typing import List, Callable +from typing import Callable from fastapi import Depends, HTTPException, status import logging diff --git a/backend/src/apis/shared/embeddings/bedrock_embeddings.py b/backend/src/apis/shared/embeddings/bedrock_embeddings.py index 1c381a8a..d9459270 100644 --- a/backend/src/apis/shared/embeddings/bedrock_embeddings.py +++ b/backend/src/apis/shared/embeddings/bedrock_embeddings.py @@ -163,7 +163,13 @@ async def delete_vectors_for_document(document_id: str) -> int: """ Delete all vectors for a specific document from the S3 vector store. - Vectors are stored with keys formatted as {document_id}#{chunk_index}. + Vectors are stored with keys formatted as {document_id}#{chunk_index} + where chunk_index is a sequential integer starting at 0. + + Uses a probe-and-delete strategy: generates candidate keys in batches, + checks which exist via GetVectors, then deletes them. Stops probing + when a batch returns no results. Falls back to a full list scan if + the probe finds nothing (handles unexpected key formats). Args: document_id: The document identifier @@ -175,10 +181,134 @@ async def delete_vectors_for_document(document_id: str) -> int: vector_bucket = _get_vector_store_bucket() vector_index = _get_vector_store_index() + existing_keys = [] + probe_batch_size = 500 + probe_offset = 0 + max_probe = 10000 # Safety limit + + # Strategy 1: Probe for keys using the known pattern {document_id}#{index} + while probe_offset < max_probe: + candidate_keys = [f"{document_id}#{i}" for i in range(probe_offset, probe_offset + probe_batch_size)] + try: + response = client.get_vectors( + vectorBucketName=vector_bucket, + indexName=vector_index, + keys=candidate_keys, + ) + found = [v["key"] for v in response.get("vectors", [])] + if not found: + # No more vectors in this range — stop probing + break + existing_keys.extend(found) + except Exception as e: + logger.warning(f"GetVectors probe failed at offset {probe_offset}: {e}") + break + + probe_offset += probe_batch_size + + # Strategy 2: Fallback to list scan if probe found nothing + if not existing_keys: + logger.info(f"Key probe found no vectors for {document_id}, falling back to list scan") + next_token = None + document_prefix = f"{document_id}#" + + while True: + list_params = { + "vectorBucketName": vector_bucket, + "indexName": vector_index, + "maxResults": 1000, + } + if next_token: + list_params["nextToken"] = next_token + + response = client.list_vectors(**list_params) + for vector in response.get("vectors", []): + if vector.get("key", "").startswith(document_prefix): + existing_keys.append(vector["key"]) + + next_token = response.get("nextToken") + if not next_token: + break + + # Delete all found keys in batches + if existing_keys: + delete_batch_size = 500 + deleted_count = 0 + + for i in range(0, len(existing_keys), delete_batch_size): + batch = existing_keys[i : i + delete_batch_size] + client.delete_vectors(vectorBucketName=vector_bucket, indexName=vector_index, keys=batch) + deleted_count += len(batch) + + logger.info(f"Deleted {deleted_count} vectors for document {document_id}") + return deleted_count + else: + logger.info(f"No vectors found for document {document_id}") + return 0 + + +async def delete_vectors_for_document_deterministic( + document_id: str, + chunk_count: int, +) -> int: + """ + Delete vectors using deterministic keys: {document_id}#{i} for i in range(chunk_count). + No probing, no list-scan. O(chunk_count) with a single batch delete call. + + Deletion of non-existent keys is a no-op in the S3 Vectors API. + + Args: + document_id: The document identifier + chunk_count: Number of chunks to delete + + Returns: + Number of keys sent for deletion (= chunk_count) + + Raises: + Exception: If S3 Vectors API call fails (caller handles retries) + """ + if chunk_count == 0: + return 0 + + client = boto3.client("s3vectors", region_name=AWS_REGION) + vector_bucket = _get_vector_store_bucket() + vector_index = _get_vector_store_index() + + keys = [f"{document_id}#{i}" for i in range(chunk_count)] + batch_size = 500 + + for i in range(0, len(keys), batch_size): + batch = keys[i : i + batch_size] + client.delete_vectors( + vectorBucketName=vector_bucket, + indexName=vector_index, + keys=batch, + ) + + logger.info(f"Deterministic delete: sent {chunk_count} keys for document {document_id}") + return chunk_count + + +async def delete_vectors_for_assistant(assistant_id: str) -> int: + """ + Delete ALL vectors belonging to an assistant from the S3 vector store. + + Used when deleting an entire assistant to prevent orphaned vectors. + Scans the index filtering by assistant_id metadata via list + client-side filter. + + Args: + assistant_id: The assistant identifier + + Returns: + Number of vectors deleted + """ + client = boto3.client("s3vectors", region_name=AWS_REGION) + vector_bucket = _get_vector_store_bucket() + vector_index = _get_vector_store_index() + keys_to_delete = [] next_token = None - # List all vectors with pagination, filtering for this document while True: list_params = { "vectorBucketName": vector_bucket, @@ -186,18 +316,14 @@ async def delete_vectors_for_document(document_id: str) -> int: "maxResults": 1000, "returnMetadata": True, } - if next_token: list_params["nextToken"] = next_token response = client.list_vectors(**list_params) - vectors = response.get("vectors", []) - - document_prefix = f"{document_id}#" - for vector in vectors: - vector_key = vector.get("key", "") - if vector_key.startswith(document_prefix): - keys_to_delete.append(vector_key) + for vector in response.get("vectors", []): + metadata = vector.get("metadata", {}) + if metadata.get("assistant_id") == assistant_id: + keys_to_delete.append(vector["key"]) next_token = response.get("nextToken") if not next_token: @@ -206,14 +332,13 @@ async def delete_vectors_for_document(document_id: str) -> int: if keys_to_delete: batch_size = 500 deleted_count = 0 - for i in range(0, len(keys_to_delete), batch_size): batch = keys_to_delete[i : i + batch_size] client.delete_vectors(vectorBucketName=vector_bucket, indexName=vector_index, keys=batch) deleted_count += len(batch) - logger.info(f"Deleted {deleted_count} vectors for document {document_id}") + logger.info(f"Deleted {deleted_count} vectors for assistant {assistant_id}") return deleted_count else: - logger.info(f"No vectors found for document {document_id}") + logger.info(f"No vectors found for assistant {assistant_id}") return 0 diff --git a/backend/src/apis/shared/files/file_resolver.py b/backend/src/apis/shared/files/file_resolver.py index 58ca6ba4..6ce4a7b5 100644 --- a/backend/src/apis/shared/files/file_resolver.py +++ b/backend/src/apis/shared/files/file_resolver.py @@ -10,7 +10,7 @@ import base64 import logging from dataclasses import dataclass -from typing import List, Optional, TYPE_CHECKING +from typing import List, Optional import boto3 from botocore.exceptions import ClientError diff --git a/backend/src/apis/shared/files/models.py b/backend/src/apis/shared/files/models.py index 8d9d4587..c3fba383 100644 --- a/backend/src/apis/shared/files/models.py +++ b/backend/src/apis/shared/files/models.py @@ -8,7 +8,6 @@ from datetime import datetime, timezone from enum import Enum from typing import List, Optional -import time from pydantic import BaseModel, Field, ConfigDict diff --git a/backend/src/apis/shared/oauth/provider_repository.py b/backend/src/apis/shared/oauth/provider_repository.py index 9f3e9c49..31df2825 100644 --- a/backend/src/apis/shared/oauth/provider_repository.py +++ b/backend/src/apis/shared/oauth/provider_repository.py @@ -4,7 +4,7 @@ import logging import os from datetime import datetime, timezone -from typing import Dict, List, Optional +from typing import List, Optional import boto3 from botocore.exceptions import ClientError diff --git a/backend/src/apis/shared/oauth/routes.py b/backend/src/apis/shared/oauth/routes.py index 1c4de22e..d901bd98 100644 --- a/backend/src/apis/shared/oauth/routes.py +++ b/backend/src/apis/shared/oauth/routes.py @@ -134,7 +134,7 @@ async def initiate_connection( - 403 if user not authorized for provider """ logger.info( - f"User {current_user.email} initiating OAuth connection to {provider_id}" + "User initiating OAuth connection" ) # Resolve user's application roles @@ -180,7 +180,7 @@ async def oauth_callback( # Handle error from provider if error: - logger.warning(f"OAuth callback error: {error} - {error_description}") + logger.warning("OAuth callback error") params = urlencode({"error": error, "error_description": error_description or ""}) return RedirectResponse( url=f"{frontend_url}{callback_path}?{params}", @@ -252,7 +252,7 @@ async def disconnect_provider( Raises: HTTPException: 404 if not connected to provider """ - logger.info(f"User {current_user.email} disconnecting from {provider_id}") + logger.info("User disconnecting from OAuth provider") disconnected = await oauth_service.disconnect( user_id=current_user.user_id, diff --git a/backend/src/apis/shared/oauth/service.py b/backend/src/apis/shared/oauth/service.py index 975396c2..78612c75 100644 --- a/backend/src/apis/shared/oauth/service.py +++ b/backend/src/apis/shared/oauth/service.py @@ -22,7 +22,6 @@ OAuthConnectionStatus, OAuthProvider, OAuthUserToken, - compute_scopes_hash, ) from .provider_repository import OAuthProviderRepository, get_provider_repository from .token_cache import TokenCache, get_token_cache diff --git a/backend/src/apis/shared/quota.py b/backend/src/apis/shared/quota.py index 1939b21e..fd2c594d 100644 --- a/backend/src/apis/shared/quota.py +++ b/backend/src/apis/shared/quota.py @@ -7,7 +7,6 @@ import logging import os from typing import Optional -from decimal import Decimal from pydantic import BaseModel, Field, ConfigDict from agents.main_agent.quota.repository import QuotaRepository diff --git a/backend/src/apis/shared/rbac/admin_service.py b/backend/src/apis/shared/rbac/admin_service.py index 0ca57656..6525e09b 100644 --- a/backend/src/apis/shared/rbac/admin_service.py +++ b/backend/src/apis/shared/rbac/admin_service.py @@ -2,7 +2,6 @@ import logging from typing import List, Optional, Set -from datetime import datetime from apis.shared.auth.models import User diff --git a/backend/src/apis/shared/rbac/models.py b/backend/src/apis/shared/rbac/models.py index d4298e2f..0678df48 100644 --- a/backend/src/apis/shared/rbac/models.py +++ b/backend/src/apis/shared/rbac/models.py @@ -2,7 +2,6 @@ from dataclasses import dataclass, field from typing import List, Optional -from datetime import datetime from pydantic import BaseModel, Field diff --git a/backend/src/apis/shared/rbac/repository.py b/backend/src/apis/shared/rbac/repository.py index e2ece4c3..0a00486e 100644 --- a/backend/src/apis/shared/rbac/repository.py +++ b/backend/src/apis/shared/rbac/repository.py @@ -8,7 +8,7 @@ import boto3 from botocore.exceptions import ClientError -from .models import AppRole, EffectivePermissions +from .models import AppRole logger = logging.getLogger(__name__) diff --git a/backend/src/apis/shared/sessions/metadata.py b/backend/src/apis/shared/sessions/metadata.py index 21e83a85..acdb7b83 100644 --- a/backend/src/apis/shared/sessions/metadata.py +++ b/backend/src/apis/shared/sessions/metadata.py @@ -89,6 +89,68 @@ async def store_message_metadata( +async def store_user_display_text( + session_id: str, + user_id: str, + message_id: int, + display_text: str, +) -> None: + """ + Store the original user message text for clean UI display. + + When the prompt sent to the model differs from what the user typed + (e.g. RAG augmentation, file attachment content blocks), this stores + the original so the frontend can show the clean version. The full + augmented prompt stays in AgentCore Memory for the LLM. + + Uses a D# (display) prefix SK pattern to separate from C# cost records. + + Args: + session_id: Session identifier + user_id: User identifier + message_id: 0-based message index (user message position) + display_text: Original user message before prompt modification + """ + sessions_metadata_table = os.environ.get('DYNAMODB_SESSIONS_METADATA_TABLE_NAME') + if not sessions_metadata_table: + raise RuntimeError("DYNAMODB_SESSIONS_METADATA_TABLE_NAME environment variable is required") + + # Skip preview sessions + if is_preview_session(session_id): + return + + try: + import boto3 + from datetime import datetime, timezone, timedelta + + dynamodb = boto3.resource('dynamodb') + table = dynamodb.Table(sessions_metadata_table) + + timestamp = datetime.now(timezone.utc).isoformat() + ttl = int((datetime.now(timezone.utc) + timedelta(days=365)).timestamp()) + + item = { + "PK": f"USER#{user_id}", + "SK": f"D#{session_id}#{message_id}", + "GSI_PK": f"SESSION#{session_id}", + "GSI_SK": f"D#{message_id}", + "sessionId": session_id, + "messageId": message_id, + "userId": user_id, + "displayText": display_text, + "timestamp": timestamp, + "ttl": ttl, + } + + table.put_item(Item=item) + logger.info(f"💾 Stored displayText for user message {message_id} in session {session_id}") + + except Exception as e: + # Non-critical: displayText is a UI enhancement, don't break the request + logger.error(f"Failed to store user displayText: {e}", exc_info=True) + + + async def _store_message_metadata_cloud( session_id: str, user_id: str, @@ -311,7 +373,7 @@ async def _update_cost_summary_async( date = now.strftime('%Y-%m-%d') # Use storage abstraction for the atomic update - from apis.app_api.storage.metadata_storage import get_metadata_storage + from apis.app_api.storage import get_metadata_storage storage = get_metadata_storage() await storage.update_user_cost_summary( @@ -722,11 +784,11 @@ async def get_all_message_metadata(session_id: str, user_id: str) -> Dict[str, A async def _get_all_message_metadata_cloud(session_id: str, user_id: str, table_name: str) -> Dict[str, Any]: """ - Retrieve all message metadata (cost records) for a session from DynamoDB + Retrieve all message metadata (cost records + display text) for a session from DynamoDB - Uses the SessionLookupIndex GSI to query cost records by session ID. - Cost records have SK pattern: C#{timestamp}#{uuid} - GSI pattern: GSI_PK=SESSION#{session_id}, GSI_SK=C#{timestamp} + Uses the SessionLookupIndex GSI to query records by session ID. + Cost records have SK pattern: C#{timestamp}#{uuid}, GSI_SK: C#{timestamp} + Display text records have SK pattern: D#{session_id}#{message_id}, GSI_SK: D#{message_id} Args: session_id: Session identifier @@ -745,18 +807,21 @@ async def _get_all_message_metadata_cloud(session_id: str, user_id: str, table_n logger.info(f"🔍 Querying cost records via GSI for session {session_id}") - # Query cost records for this session using GSI - # GSI_PK: SESSION#{session_id} - # GSI_SK: begins_with C# for cost records - response = table.query( + # Query cost records (C#) and display text records (D#) in parallel + cost_response = table.query( IndexName='SessionLookupIndex', KeyConditionExpression=Key('GSI_PK').eq(f'SESSION#{session_id}') & Key('GSI_SK').begins_with('C#') ) + display_response = table.query( + IndexName='SessionLookupIndex', + KeyConditionExpression=Key('GSI_PK').eq(f'SESSION#{session_id}') & Key('GSI_SK').begins_with('D#') + ) - items = response.get("Items", []) + items = cost_response.get("Items", []) + display_items = display_response.get("Items", []) metadata_index = {} - logger.info(f"📦 DynamoDB returned {len(items)} cost record items") + logger.info(f"📦 DynamoDB returned {len(items)} cost record items, {len(display_items)} display text items") for item in items: # Verify user ownership @@ -781,6 +846,22 @@ async def _get_all_message_metadata_cloud(session_id: str, user_id: str, table_n metadata_index[message_id] = item_float logger.info(f"📂 Retrieved {len(metadata_index)} cost records from DynamoDB") + + # Merge displayText from D# records into metadata index + for item in display_items: + if item.get('userId') != user_id: + continue + item_float = _convert_decimal_to_float(item) + message_id_raw = item_float.get("messageId") + message_id = str(int(message_id_raw)) if isinstance(message_id_raw, (int, float)) else str(message_id_raw) + display_text = item_float.get("displayText") + if display_text: + if message_id in metadata_index: + metadata_index[message_id]["displayText"] = display_text + else: + metadata_index[message_id] = {"displayText": display_text} + logger.debug(f"🔗 Merged displayText for user message {message_id}") + logger.info(f"📋 Metadata keys: {sorted(metadata_index.keys())}") return metadata_index diff --git a/backend/src/apis/shared/sessions/models.py b/backend/src/apis/shared/sessions/models.py index fb4fd684..d92dc9e7 100644 --- a/backend/src/apis/shared/sessions/models.py +++ b/backend/src/apis/shared/sessions/models.py @@ -262,6 +262,7 @@ class MessageMetadata(BaseModel): attribution: Optional[Attribution] = Field(None, description="Attribution for cost tracking and billing") cost: Optional[float] = Field(None, description="Total cost in USD for this message (computed from token usage and pricing)") citations: Optional[List[Dict[str, str]]] = Field(None, description="RAG citations for this message (stored as dicts for flexible JSON storage)") + display_text: Optional[str] = Field(None, alias="displayText", description="Original user message text before RAG augmentation (for clean UI display)") # Note: Feedback will be added in future implementation # feedback: Optional[Feedback] = None diff --git a/backend/tests/agents/main_agent/session/test_session_factory.py b/backend/tests/agents/main_agent/session/test_session_factory.py index f3ca994c..c5aec739 100644 --- a/backend/tests/agents/main_agent/session/test_session_factory.py +++ b/backend/tests/agents/main_agent/session/test_session_factory.py @@ -187,10 +187,9 @@ class TestRetrievalThresholdEnvVars: @patch("agents.main_agent.session.session_factory._discover_strategy_ids") @patch("agents.main_agent.session.session_factory.AgentCoreMemoryConfig") @patch("agents.main_agent.session.session_factory.RetrievalConfig") - @patch("agents.main_agent.session.session_factory.AgentCoreMemorySessionManager") @patch("agents.main_agent.session.turn_based_session_manager.TurnBasedSessionManager", create=True) def test_uses_default_thresholds( - self, mock_tbsm, mock_session_mgr, mock_retrieval, mock_mem_config, mock_discover, monkeypatch + self, mock_tbsm, mock_retrieval, mock_mem_config, mock_discover, monkeypatch ): """Default relevance_score=0.7 and top_k=10 when env vars not set.""" from agents.main_agent.session.session_factory import SessionFactory @@ -198,7 +197,7 @@ def test_uses_default_thresholds( monkeypatch.delenv("AGENTCORE_MEMORY_RELEVANCE_SCORE", raising=False) monkeypatch.delenv("AGENTCORE_MEMORY_TOP_K", raising=False) mock_discover.return_value = ("semantic-1", "pref-1", "sum-1") - mock_session_mgr.return_value = MagicMock() + mock_tbsm.return_value = MagicMock() SessionFactory._create_cloud_session_manager( memory_id="mem-1", session_id="s-1", user_id="u-1", @@ -213,10 +212,9 @@ def test_uses_default_thresholds( @patch("agents.main_agent.session.session_factory._discover_strategy_ids") @patch("agents.main_agent.session.session_factory.AgentCoreMemoryConfig") @patch("agents.main_agent.session.session_factory.RetrievalConfig") - @patch("agents.main_agent.session.session_factory.AgentCoreMemorySessionManager") @patch("agents.main_agent.session.turn_based_session_manager.TurnBasedSessionManager", create=True) def test_reads_custom_thresholds_from_env( - self, mock_tbsm, mock_session_mgr, mock_retrieval, mock_mem_config, mock_discover, monkeypatch + self, mock_tbsm, mock_retrieval, mock_mem_config, mock_discover, monkeypatch ): """Custom env vars are passed to RetrievalConfig.""" from agents.main_agent.session.session_factory import SessionFactory @@ -224,7 +222,7 @@ def test_reads_custom_thresholds_from_env( monkeypatch.setenv("AGENTCORE_MEMORY_RELEVANCE_SCORE", "0.85") monkeypatch.setenv("AGENTCORE_MEMORY_TOP_K", "20") mock_discover.return_value = ("semantic-1", "pref-1", "sum-1") - mock_session_mgr.return_value = MagicMock() + mock_tbsm.return_value = MagicMock() SessionFactory._create_cloud_session_manager( memory_id="mem-1", session_id="s-1", user_id="u-1", diff --git a/backend/tests/property/test_pbt_cleanup_service.py b/backend/tests/property/test_pbt_cleanup_service.py new file mode 100644 index 00000000..cca1f099 --- /dev/null +++ b/backend/tests/property/test_pbt_cleanup_service.py @@ -0,0 +1,384 @@ +""" +Property-based tests for cleanup service. + +Feature: reliable-document-deletion +""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from hypothesis import given, settings, strategies as st + + +# --------------------------------------------------------------------------- +# Shared Hypothesis strategies +# --------------------------------------------------------------------------- + +st_document_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789", + min_size=3, + max_size=12, +).map(lambda s: f"DOC-{s}") + +st_assistant_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789-", + min_size=3, + max_size=20, +).map(lambda s: f"AST-{s}") + +st_s3_key = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789/-_.", + min_size=5, + max_size=80, +) + +st_chunk_count = st.one_of(st.none(), st.integers(min_value=0, max_value=100)) + +st_max_retries = st.integers(min_value=1, max_value=5) + +st_base_delay = st.floats(min_value=0.01, max_value=1.0) + + +# --------------------------------------------------------------------------- +# Property 4: Cleanup retry bounded by max_retries +# --------------------------------------------------------------------------- + + +@given( + document_id=st_document_id, + assistant_id=st_assistant_id, + s3_key=st_s3_key, + chunk_count=st_chunk_count, + max_retries=st_max_retries, + base_delay=st_base_delay, +) +@settings(max_examples=100, deadline=None) +@pytest.mark.asyncio +async def test_cleanup_retry_bounded_by_max_retries( + document_id, assistant_id, s3_key, chunk_count, max_retries, base_delay +): + """ + **Validates: Requirements 4.1, 4.2** + + For any max_retries >= 1 and base_delay > 0, verify at most max_retries + attempts are made for each phase (vector deletion and S3 deletion), with + delay following base_delay * 2^attempt + jitter. + """ + vector_call_count = 0 + s3_call_count = 0 + + async def failing_deterministic(*args, **kwargs): + nonlocal vector_call_count + vector_call_count += 1 + raise Exception("vector deletion failed") + + async def failing_fallback(*args, **kwargs): + nonlocal vector_call_count + vector_call_count += 1 + raise Exception("vector deletion fallback failed") + + sleep_delays = [] + + async def mock_sleep(delay): + sleep_delays.append(delay) + + mock_s3_client = MagicMock() + + def failing_s3_delete(**kwargs): + nonlocal s3_call_count + s3_call_count += 1 + raise Exception("s3 deletion failed") + + mock_s3_client.delete_object = MagicMock(side_effect=failing_s3_delete) + + mock_hard_delete = AsyncMock() + + with ( + patch.dict("os.environ", {"S3_ASSISTANTS_DOCUMENTS_BUCKET_NAME": "test-bucket"}), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + side_effect=failing_deterministic, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + side_effect=failing_fallback, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + side_effect=mock_sleep, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=document_id, + assistant_id=assistant_id, + s3_key=s3_key, + chunk_count=chunk_count, + max_retries=max_retries, + base_delay=base_delay, + ) + + # Both phases fail, so result must be False + assert result is False + + # Vector deletion: exactly max_retries attempts + assert vector_call_count == max_retries, ( + f"Expected {max_retries} vector deletion attempts, got {vector_call_count}" + ) + + # S3 deletion: exactly max_retries attempts + assert s3_call_count == max_retries, ( + f"Expected {max_retries} S3 deletion attempts, got {s3_call_count}" + ) + + # Each phase sleeps (max_retries - 1) times (no sleep after last attempt) + expected_sleep_count = (max_retries - 1) * 2 + assert len(sleep_delays) == expected_sleep_count, ( + f"Expected {expected_sleep_count} sleep calls, got {len(sleep_delays)}" + ) + + # Verify each delay follows base_delay * 2^attempt + jitter (jitter in [0, 0.1]) + for phase in range(2): + for attempt in range(max_retries - 1): + idx = phase * (max_retries - 1) + attempt + delay = sleep_delays[idx] + min_expected = base_delay * (2 ** attempt) + max_expected = base_delay * (2 ** attempt) + 0.1 + assert min_expected <= delay <= max_expected, ( + f"Phase {phase}, attempt {attempt}: delay {delay} not in " + f"[{min_expected}, {max_expected}]" + ) + + # hard_delete should NOT have been called since cleanup failed + mock_hard_delete.assert_not_called() + + +# --------------------------------------------------------------------------- +# Property 5: Failed cleanup preserves DynamoDB record +# --------------------------------------------------------------------------- + + +@given( + document_id=st_document_id, + assistant_id=st_assistant_id, + s3_key=st_s3_key, + chunk_count=st_chunk_count, + max_retries=st_max_retries, + base_delay=st_base_delay, +) +@settings(max_examples=100, deadline=None) +@pytest.mark.asyncio +async def test_failed_cleanup_preserves_dynamodb_record( + document_id, assistant_id, s3_key, chunk_count, max_retries, base_delay +): + """ + **Validates: Requirement 4.4** + + Verify that when cleanup fails after all retries, hard_delete_document is + NOT called and the record remains with status="deleting" and valid TTL. + cleanup_document_resources returns False. + """ + async def failing_deterministic(*args, **kwargs): + raise Exception("vector deletion failed") + + async def failing_fallback(*args, **kwargs): + raise Exception("vector deletion fallback failed") + + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(side_effect=Exception("s3 deletion failed")) + + mock_hard_delete = AsyncMock() + + with ( + patch.dict("os.environ", {"S3_ASSISTANTS_DOCUMENTS_BUCKET_NAME": "test-bucket"}), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + side_effect=failing_deterministic, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + side_effect=failing_fallback, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + new_callable=AsyncMock, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=document_id, + assistant_id=assistant_id, + s3_key=s3_key, + chunk_count=chunk_count, + max_retries=max_retries, + base_delay=base_delay, + ) + + # Cleanup failed — result must be False + assert result is False, ( + "cleanup_document_resources should return False when both phases fail" + ) + + # hard_delete_document must NOT have been called + mock_hard_delete.assert_not_called() + + +# --------------------------------------------------------------------------- +# Property 6: Successful cleanup triggers hard-delete +# --------------------------------------------------------------------------- + + +@given( + document_id=st_document_id, + assistant_id=st_assistant_id, + s3_key=st_s3_key, + chunk_count=st_chunk_count, + max_retries=st_max_retries, + base_delay=st_base_delay, +) +@settings(max_examples=100, deadline=None) +@pytest.mark.asyncio +async def test_successful_cleanup_triggers_hard_delete( + document_id, assistant_id, s3_key, chunk_count, max_retries, base_delay +): + """ + **Validates: Requirements 4.3, 9.1** + + Verify that when both vector and S3 deletion succeed, + hard_delete_document IS called with correct assistant_id and document_id, + and cleanup_document_resources returns True. + """ + async def succeeding_deterministic(*args, **kwargs): + pass + + async def succeeding_fallback(*args, **kwargs): + pass + + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(return_value={}) + + mock_hard_delete = AsyncMock() + + with ( + patch.dict("os.environ", {"S3_ASSISTANTS_DOCUMENTS_BUCKET_NAME": "test-bucket"}), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + side_effect=succeeding_deterministic, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + side_effect=succeeding_fallback, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=document_id, + assistant_id=assistant_id, + s3_key=s3_key, + chunk_count=chunk_count, + max_retries=max_retries, + base_delay=base_delay, + ) + + # Cleanup succeeded — result must be True + assert result is True, ( + "cleanup_document_resources should return True when both phases succeed" + ) + + # hard_delete_document must have been called exactly once + mock_hard_delete.assert_called_once() + + # Verify it was called with the correct arguments + call_args = mock_hard_delete.call_args + assert call_args[0] == (assistant_id, document_id) or ( + call_args[1].get("assistant_id") == assistant_id + and call_args[1].get("document_id") == document_id + ), ( + f"hard_delete_document called with wrong args: {call_args}" + ) + + +# --------------------------------------------------------------------------- +# Property 9: Bulk cleanup counts are consistent +# --------------------------------------------------------------------------- + + +@given( + success_flags=st.lists(st.booleans(), min_size=0, max_size=15), +) +@settings(max_examples=100, deadline=None) +@pytest.mark.asyncio +async def test_bulk_cleanup_counts_are_consistent(success_flags): + """ + **Validates: Requirements 8.3** + + For any set of documents, verify that + success_count + failure_count == len(documents). + """ + # Build mock documents from the generated flags + documents = [] + for i, _ in enumerate(success_flags): + doc = MagicMock() + doc.document_id = f"DOC-{i}" + doc.s3_key = f"assistants/AST-test/{i}/file.pdf" + doc.chunk_count = i + documents.append(doc) + + # Create a side_effect iterator that returns True/False per document + cleanup_results = list(success_flags) + + async def mock_cleanup(document_id, assistant_id, s3_key, chunk_count, max_retries=3): + idx = int(document_id.split("-")[1]) + return cleanup_results[idx] + + with patch( + "apis.app_api.documents.services.cleanup_service.cleanup_document_resources", + side_effect=mock_cleanup, + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_assistant_documents, + ) + + success_count, failure_count = await cleanup_assistant_documents( + assistant_id="AST-test", + documents=documents, + max_retries=3, + ) + + assert success_count + failure_count == len(documents), ( + f"success_count ({success_count}) + failure_count ({failure_count}) " + f"!= len(documents) ({len(documents)})" + ) + + expected_successes = sum(1 for f in success_flags if f is True) + expected_failures = len(success_flags) - expected_successes + assert success_count == expected_successes, ( + f"Expected {expected_successes} successes, got {success_count}" + ) + assert failure_count == expected_failures, ( + f"Expected {expected_failures} failures, got {failure_count}" + ) diff --git a/backend/tests/property/test_pbt_document_deletion.py b/backend/tests/property/test_pbt_document_deletion.py new file mode 100644 index 00000000..def05985 --- /dev/null +++ b/backend/tests/property/test_pbt_document_deletion.py @@ -0,0 +1,447 @@ +""" +Property-based tests for reliable document deletion. + +Feature: reliable-document-deletion +""" + +import time +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from hypothesis import given, settings, strategies as st + +from apis.app_api.documents.models import Document + + +# --------------------------------------------------------------------------- +# Shared Hypothesis strategies +# --------------------------------------------------------------------------- + +# Valid pre-delete statuses (any status a document can be in before soft-delete) +st_pre_delete_status = st.sampled_from( + ["uploading", "chunking", "embedding", "complete", "failed"] +) + +# TTL days: positive integers, capped at a reasonable max +st_ttl_days = st.integers(min_value=1, max_value=365) + +# Simple ID strategies +st_assistant_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789-", + min_size=3, + max_size=20, +).map(lambda s: f"AST-{s}") + +st_document_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789", + min_size=3, + max_size=12, +).map(lambda s: f"DOC-{s}") + +st_owner_id = st.uuids().map(str) + +st_s3_key = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789/-_.", + min_size=5, + max_size=80, +) + +st_chunk_count = st.one_of(st.none(), st.integers(min_value=0, max_value=500)) + + +# --------------------------------------------------------------------------- +# Property 1: Soft-delete postconditions +# --------------------------------------------------------------------------- + + +@given( + status=st_pre_delete_status, + ttl_days=st_ttl_days, + assistant_id=st_assistant_id, + document_id=st_document_id, + owner_id=st_owner_id, + s3_key=st_s3_key, + chunk_count=st_chunk_count, +) +@settings(max_examples=100) +@pytest.mark.asyncio +async def test_soft_delete_postconditions( + status, ttl_days, assistant_id, document_id, owner_id, s3_key, chunk_count +): + """ + **Validates: Requirements 1.1, 1.2, 1.3, 1.4** + + For any valid document status and ttl_days > 0, verify the returned document + has status="deleting", TTL = now_epoch + ttl_days * 86400, updated updatedAt, + and preserved chunk_count / s3_key. + """ + # Capture time just before the call for TTL tolerance check + time_before = int(time.time()) + + # Build the DynamoDB ALL_NEW response that update_item would return. + # The function sets status, updatedAt, and ttl; everything else is preserved. + def fake_update_item(**kwargs): + expr_values = kwargs["ExpressionAttributeValues"] + return { + "Attributes": { + "PK": f"AST#{assistant_id}", + "SK": f"DOC#{document_id}", + "documentId": document_id, + "assistantId": assistant_id, + "filename": "test.pdf", + "contentType": "application/pdf", + "sizeBytes": 1024, + "s3Key": s3_key, + "status": expr_values[":deleting"], + "chunkCount": chunk_count, + "createdAt": "2024-01-01T00:00:00Z", + "updatedAt": expr_values[":now"], + "ttl": expr_values[":ttl_value"], + } + } + + # Mock boto3.resource inside the function's scope + mock_table = MagicMock() + mock_table.update_item = MagicMock(side_effect=fake_update_item) + + mock_dynamodb = MagicMock() + mock_dynamodb.Table.return_value = mock_table + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = mock_dynamodb + + with ( + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=MagicMock(), # ownership check passes + ), + patch.dict( + "os.environ", + {"DYNAMODB_ASSISTANTS_TABLE_NAME": "test-table"}, + ), + patch( + "boto3.resource", + return_value=mock_dynamodb, + ), + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document( + assistant_id=assistant_id, + document_id=document_id, + owner_id=owner_id, + ttl_days=ttl_days, + ) + + time_after = int(time.time()) + + # Postcondition checks + assert result is not None, "soft_delete_document should return a Document" + assert isinstance(result, Document) + + # 1.1: status is "deleting" + assert result.status == "deleting" + + # 1.2: TTL = now_epoch + ttl_days * 86400 (within tolerance of call duration) + expected_ttl_min = time_before + ttl_days * 86400 + expected_ttl_max = time_after + ttl_days * 86400 + assert expected_ttl_min <= result.ttl <= expected_ttl_max, ( + f"TTL {result.ttl} not in expected range [{expected_ttl_min}, {expected_ttl_max}]" + ) + + # 1.3: updatedAt is refreshed (non-empty ISO timestamp) + assert result.updated_at is not None + assert len(result.updated_at) > 0 + assert result.updated_at != "2024-01-01T00:00:00Z", ( + "updatedAt should be refreshed, not the original value" + ) + + # 1.4: chunk_count and s3_key are preserved + assert result.s3_key == s3_key + assert result.chunk_count == chunk_count + + +# --------------------------------------------------------------------------- +# Property 2: Idempotent soft-delete +# --------------------------------------------------------------------------- + + +@given( + ttl_days=st_ttl_days, + assistant_id=st_assistant_id, + document_id=st_document_id, + owner_id=st_owner_id, + s3_key=st_s3_key, + chunk_count=st_chunk_count, +) +@settings(max_examples=100) +@pytest.mark.asyncio +async def test_idempotent_soft_delete( + ttl_days, assistant_id, document_id, owner_id, s3_key, chunk_count +): + """ + **Validates: Requirement 1.6** + + For any document already in "deleting" status, calling soft_delete_document + again shall succeed without error and the document shall remain in "deleting" + status. + """ + import time as _time + + # Build the DynamoDB ALL_NEW response simulating a document already in + # "deleting" status. The update_item call should still succeed because + # the ConditionExpression only checks attribute_exists(PK). + def fake_update_item(**kwargs): + expr_values = kwargs["ExpressionAttributeValues"] + return { + "Attributes": { + "PK": f"AST#{assistant_id}", + "SK": f"DOC#{document_id}", + "documentId": document_id, + "assistantId": assistant_id, + "filename": "already-deleting.pdf", + "contentType": "application/pdf", + "sizeBytes": 2048, + "s3Key": s3_key, + "status": expr_values[":deleting"], # stays "deleting" + "chunkCount": chunk_count, + "createdAt": "2024-01-01T00:00:00Z", + "updatedAt": expr_values[":now"], + "ttl": expr_values[":ttl_value"], + } + } + + mock_table = MagicMock() + mock_table.update_item = MagicMock(side_effect=fake_update_item) + + mock_dynamodb = MagicMock() + mock_dynamodb.Table.return_value = mock_table + + with ( + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=MagicMock(), # ownership check passes + ), + patch.dict( + "os.environ", + {"DYNAMODB_ASSISTANTS_TABLE_NAME": "test-table"}, + ), + patch( + "boto3.resource", + return_value=mock_dynamodb, + ), + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document( + assistant_id=assistant_id, + document_id=document_id, + owner_id=owner_id, + ttl_days=ttl_days, + ) + + # The call must succeed (not None) — idempotent re-delete + assert result is not None, ( + "soft_delete_document on an already-deleting document should succeed" + ) + assert isinstance(result, Document) + + # Status must still be "deleting" + assert result.status == "deleting", ( + f"Expected status='deleting', got '{result.status}'" + ) + + +# --------------------------------------------------------------------------- +# Property 8: Bulk soft-delete covers all documents +# --------------------------------------------------------------------------- + + +@given( + assistant_id=st_assistant_id, + document_ids=st.lists(st_document_id, min_size=0, max_size=20), + ttl_days=st_ttl_days, +) +@settings(max_examples=100) +@pytest.mark.asyncio +async def test_bulk_soft_delete_covers_all_documents( + assistant_id, document_ids, ttl_days +): + """ + **Validates: Requirements 8.1** + + For any list of N document IDs, batch_soft_delete_documents marks all N + as "deleting" with TTL. Verify the returned count equals len(document_ids) + and update_item was called exactly N times (once per document). + """ + mock_table = MagicMock() + # All update_item calls succeed (no ConditionalCheckFailedException) + mock_table.update_item = MagicMock(return_value={}) + + mock_dynamodb = MagicMock() + mock_dynamodb.Table.return_value = mock_table + + with ( + patch.dict( + "os.environ", + {"DYNAMODB_ASSISTANTS_TABLE_NAME": "test-table"}, + ), + patch( + "boto3.resource", + return_value=mock_dynamodb, + ), + ): + from apis.app_api.documents.services.document_service import ( + batch_soft_delete_documents, + ) + + result = await batch_soft_delete_documents( + assistant_id=assistant_id, + document_ids=document_ids, + ttl_days=ttl_days, + ) + + n = len(document_ids) + + # Returned count equals the number of document IDs provided + assert result == n, ( + f"Expected {n} documents marked, got {result}" + ) + + # update_item was called exactly N times (once per document) + assert mock_table.update_item.call_count == n, ( + f"Expected {n} update_item calls, got {mock_table.update_item.call_count}" + ) + + # Each call targeted the correct assistant and document + for i, doc_id in enumerate(document_ids): + call_kwargs = mock_table.update_item.call_args_list[i][1] + expected_key = { + "PK": f"AST#{assistant_id}", + "SK": f"DOC#{doc_id}", + } + assert call_kwargs["Key"] == expected_key, ( + f"Call {i}: expected key {expected_key}, got {call_kwargs['Key']}" + ) + + # Verify the update sets status to "deleting" and includes a TTL + expr_values = call_kwargs["ExpressionAttributeValues"] + assert expr_values[":deleting"] == "deleting", ( + f"Call {i}: expected status 'deleting', got {expr_values[':deleting']}" + ) + assert isinstance(expr_values[":ttl_value"], int), ( + f"Call {i}: TTL should be an integer epoch, got {type(expr_values[':ttl_value'])}" + ) + assert expr_values[":ttl_value"] > 0, ( + f"Call {i}: TTL should be positive, got {expr_values[':ttl_value']}" + ) + + +# --------------------------------------------------------------------------- +# Shared strategy for all document statuses (including "deleting") +# --------------------------------------------------------------------------- + +st_all_statuses = st.sampled_from( + ["uploading", "chunking", "embedding", "complete", "failed", "deleting"] +) + + +# --------------------------------------------------------------------------- +# Property 10: List documents excludes deleting status +# --------------------------------------------------------------------------- + + +def _make_dynamo_item(assistant_id: str, doc_id: str, status: str) -> dict: + """Build a DynamoDB item dict matching the Document model shape.""" + return { + "PK": f"AST#{assistant_id}", + "SK": f"DOC#{doc_id}", + "documentId": doc_id, + "assistantId": assistant_id, + "filename": f"{doc_id}.pdf", + "contentType": "application/pdf", + "sizeBytes": 1024, + "s3Key": f"assistants/{assistant_id}/{doc_id}/file.pdf", + "status": status, + "createdAt": "2024-01-01T00:00:00Z", + "updatedAt": "2024-06-01T00:00:00Z", + } + + +@given( + assistant_id=st_assistant_id, + owner_id=st_owner_id, + statuses=st.lists(st_all_statuses, min_size=0, max_size=15), +) +@settings(max_examples=100) +@pytest.mark.asyncio +async def test_list_documents_excludes_deleting_status( + assistant_id, owner_id, statuses +): + """ + **Validates: Requirements 11.1** + + For any set of documents with mixed statuses, listing documents SHALL + never return documents with status="deleting". All non-deleting documents + SHALL be returned. + """ + # Build document IDs and DynamoDB Items for each generated status + doc_ids = [f"DOC-{i:04d}" for i in range(len(statuses))] + items = [ + _make_dynamo_item(assistant_id, doc_id, status) + for doc_id, status in zip(doc_ids, statuses) + ] + + # Mock DynamoDB table.query to return the generated items + mock_table = MagicMock() + mock_table.query = MagicMock(return_value={"Items": items}) + + mock_dynamodb = MagicMock() + mock_dynamodb.Table.return_value = mock_table + + with ( + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=MagicMock(), # ownership check passes + ), + patch.dict( + "os.environ", + {"DYNAMODB_ASSISTANTS_TABLE_NAME": "test-table"}, + ), + patch( + "boto3.resource", + return_value=mock_dynamodb, + ), + ): + from apis.app_api.documents.services.document_service import ( + list_assistant_documents, + ) + + documents, _ = await list_assistant_documents( + assistant_id=assistant_id, + owner_id=owner_id, + ) + + # Property: no returned document has status="deleting" + for doc in documents: + assert doc.status != "deleting", ( + f"Document {doc.document_id} has status='deleting' but should be excluded" + ) + + # Property: all non-deleting documents ARE returned + expected_non_deleting = { + doc_id + for doc_id, status in zip(doc_ids, statuses) + if status != "deleting" + } + returned_ids = {doc.document_id for doc in documents} + assert returned_ids == expected_non_deleting, ( + f"Expected non-deleting docs {expected_non_deleting}, got {returned_ids}" + ) diff --git a/backend/tests/property/test_pbt_search_filtering.py b/backend/tests/property/test_pbt_search_filtering.py new file mode 100644 index 00000000..06a5640b --- /dev/null +++ b/backend/tests/property/test_pbt_search_filtering.py @@ -0,0 +1,130 @@ +""" +Property-based tests for search path document status filtering. + +Feature: reliable-document-deletion +""" + +from unittest.mock import MagicMock, patch + +from hypothesis import given, settings, strategies as st + +# --------------------------------------------------------------------------- +# Shared Hypothesis strategies +# --------------------------------------------------------------------------- + +# Document statuses that can exist in DynamoDB (or None for missing record) +st_document_status = st.sampled_from( + ["complete", "deleting", "failed", "uploading", "chunking", "embedding", None] +) + +st_assistant_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789-", + min_size=3, + max_size=20, +).map(lambda s: f"AST-{s}") + +st_document_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyz0123456789", + min_size=3, + max_size=12, +).map(lambda s: f"DOC-{s}") + +# Strategy for a list of (document_id, status_or_none) pairs with unique doc IDs +st_doc_status_pairs = st.lists( + st.tuples(st_document_id, st_document_status), + min_size=1, + max_size=15, + unique_by=lambda pair: pair[0], +) + + +# --------------------------------------------------------------------------- +# Property 3: Search results only contain complete documents +# --------------------------------------------------------------------------- + + +@given( + assistant_id=st_assistant_id, + doc_status_pairs=st_doc_status_pairs, +) +@settings(max_examples=100, deadline=None) +def test_search_results_only_contain_complete_documents( + assistant_id, doc_status_pairs +): + """ + **Validates: Requirements 3.1, 3.2, 3.3** + + For any mix of document statuses (complete, deleting, failed, uploading, + chunking, embedding, or None for missing), verify that + _filter_vectors_by_document_status returns only chunks from documents + with status="complete". + """ + # Build vector results referencing these documents + vectors = [] + for doc_id, _ in doc_status_pairs: + vectors.append({ + "key": f"{doc_id}#0", + "distance": 0.5, + "metadata": { + "document_id": doc_id, + "text": f"chunk from {doc_id}", + }, + }) + + # Build a mock DynamoDB table that returns the appropriate status per doc + status_map = {doc_id: status for doc_id, status in doc_status_pairs} + + def mock_get_item(**kwargs): + key = kwargs["Key"] + doc_id = key["SK"].replace("DOC#", "") + status = status_map.get(doc_id) + if status is None: + # Missing document — no Item in response + return {} + return {"Item": {"status": status}} + + mock_table = MagicMock() + mock_table.get_item = MagicMock(side_effect=mock_get_item) + + mock_dynamodb = MagicMock() + mock_dynamodb.Table.return_value = mock_table + + with ( + patch.dict("os.environ", {"DYNAMODB_ASSISTANTS_TABLE_NAME": "test-table"}), + patch("boto3.resource", return_value=mock_dynamodb), + ): + from apis.shared.assistants.rag_service import ( + _filter_vectors_by_document_status, + ) + + filtered = _filter_vectors_by_document_status(vectors, assistant_id) + + # Determine which doc IDs should be in the results + expected_doc_ids = { + doc_id for doc_id, status in doc_status_pairs if status == "complete" + } + excluded_doc_ids = { + doc_id for doc_id, status in doc_status_pairs if status != "complete" + } + + # All returned chunks must be from "complete" documents + for v in filtered: + returned_doc_id = v["metadata"]["document_id"] + assert returned_doc_id in expected_doc_ids, ( + f"Chunk from doc '{returned_doc_id}' should not be in results " + f"(status={status_map.get(returned_doc_id)})" + ) + + # All "complete" document chunks must be present + returned_doc_ids = {v["metadata"]["document_id"] for v in filtered} + for doc_id in expected_doc_ids: + assert doc_id in returned_doc_ids, ( + f"Chunk from complete doc '{doc_id}' is missing from results" + ) + + # No excluded document chunks should be present + for doc_id in excluded_doc_ids: + assert doc_id not in returned_doc_ids, ( + f"Chunk from non-complete doc '{doc_id}' should be excluded " + f"(status={status_map.get(doc_id)})" + ) diff --git a/backend/tests/property/test_pbt_vector_deletion.py b/backend/tests/property/test_pbt_vector_deletion.py new file mode 100644 index 00000000..83dea28f --- /dev/null +++ b/backend/tests/property/test_pbt_vector_deletion.py @@ -0,0 +1,95 @@ +""" +Property-based tests for deterministic vector deletion. + +Feature: reliable-document-deletion +""" + +from unittest.mock import MagicMock, patch + +import pytest +from hypothesis import given, settings, strategies as st + + +# --------------------------------------------------------------------------- +# Strategies +# --------------------------------------------------------------------------- + +st_document_id = st.text( + alphabet="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", + min_size=1, + max_size=30, +) + +st_chunk_count = st.integers(min_value=0, max_value=2000) + + +# --------------------------------------------------------------------------- +# Property 7: Deterministic vector key generation +# --------------------------------------------------------------------------- + + +@given(document_id=st_document_id, chunk_count=st_chunk_count) +@settings(max_examples=100) +@pytest.mark.asyncio +async def test_deterministic_vector_key_generation(document_id, chunk_count): + """ + **Validates: Requirements 5.1** + + For any document_id (text) and chunk_count >= 0, verify: + - Exactly chunk_count keys are generated matching {document_id}#{i} + - Keys are batched into groups of at most 500 + - For chunk_count=0, no API calls are made + - The return value equals chunk_count + """ + mock_client = MagicMock() + mock_client.delete_vectors = MagicMock() + + with ( + patch.dict( + "os.environ", + { + "S3_ASSISTANTS_VECTOR_STORE_BUCKET_NAME": "test-bucket", + "S3_ASSISTANTS_VECTOR_STORE_INDEX_NAME": "test-index", + }, + ), + patch("boto3.client", return_value=mock_client), + ): + # Re-import to pick up patched env vars + import importlib + import apis.shared.embeddings.bedrock_embeddings as mod + + importlib.reload(mod) + + result = await mod.delete_vectors_for_document_deterministic( + document_id, chunk_count + ) + + # Return value equals chunk_count + assert result == chunk_count + + if chunk_count == 0: + # No API calls for zero chunks + mock_client.delete_vectors.assert_not_called() + return + + # Collect all keys across all delete_vectors calls + all_keys = [] + for call in mock_client.delete_vectors.call_args_list: + batch_keys = call[1]["keys"] + # No batch exceeds 500 keys + assert len(batch_keys) <= 500, ( + f"Batch size {len(batch_keys)} exceeds limit of 500" + ) + all_keys.extend(batch_keys) + + # Total keys equals chunk_count + assert len(all_keys) == chunk_count, ( + f"Expected {chunk_count} keys, got {len(all_keys)}" + ) + + # Each key matches {document_id}#{i} for sequential i + for i in range(chunk_count): + expected_key = f"{document_id}#{i}" + assert all_keys[i] == expected_key, ( + f"Key at index {i}: expected '{expected_key}', got '{all_keys[i]}'" + ) diff --git a/backend/tests/routes/test_cleanup_service.py b/backend/tests/routes/test_cleanup_service.py new file mode 100644 index 00000000..b4c78fa8 --- /dev/null +++ b/backend/tests/routes/test_cleanup_service.py @@ -0,0 +1,403 @@ +"""Unit tests for cleanup_service. + +Tests cover: +- cleanup_document_resources: retry logic, backoff, success/failure paths, independent phases +- cleanup_assistant_documents: concurrent processing, count consistency + +Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 8.2, 8.3, 8.4 +""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +ASSISTANT_ID = "ast-001" +DOCUMENT_ID = "DOC-abc123" +S3_KEY = "assistants/ast-001/documents/DOC-abc123/report.pdf" +CHUNK_COUNT = 5 + +ENV_PATCH = {"S3_ASSISTANTS_DOCUMENTS_BUCKET_NAME": "test-bucket"} + + +# ========================================================================= +# TestCleanupDocumentResources +# ========================================================================= + + +class TestCleanupDocumentResources: + """Unit tests for cleanup_document_resources.""" + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_returns_true_when_both_succeed(self): + """Req 4.3: Returns True when both vector and S3 deletion succeed.""" + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(return_value={}) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + new_callable=AsyncMock, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + ) + + assert result is True + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_returns_false_when_vectors_fail(self): + """Req 4.4: Returns False when vector deletion always fails.""" + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(return_value={}) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + new_callable=AsyncMock, + side_effect=Exception("vector error"), + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + side_effect=Exception("vector fallback error"), + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + new_callable=AsyncMock, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + ) + + assert result is False + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_returns_false_when_s3_fails(self): + """Req 4.4: Returns False when S3 deletion always fails.""" + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(side_effect=Exception("s3 error")) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + new_callable=AsyncMock, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + new_callable=AsyncMock, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + ) + + assert result is False + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_independent_phases(self): + """Req 4.5: S3 deletion is still attempted even when vector deletion fails.""" + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(return_value={}) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + new_callable=AsyncMock, + side_effect=Exception("vector error"), + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + side_effect=Exception("vector fallback error"), + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + new_callable=AsyncMock, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + ) + + # S3 delete_object was still called despite vector failure + mock_s3_client.delete_object.assert_called() + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_retries_on_failure(self): + """Req 4.1: Vector deletion retries up to max_retries (3 calls total).""" + call_count = 0 + + async def fail_twice_then_succeed(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count <= 2: + raise Exception("transient error") + + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(return_value={}) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + side_effect=fail_twice_then_succeed, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + new_callable=AsyncMock, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + result = await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + max_retries=3, + ) + + assert result is True + assert call_count == 3 + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_calls_hard_delete_on_success(self): + """Req 4.3, 9.1: hard_delete_document is called when both phases succeed.""" + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(return_value={}) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + new_callable=AsyncMock, + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + ) + + mock_hard_delete.assert_called_once_with(ASSISTANT_ID, DOCUMENT_ID) + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_cleanup_does_not_call_hard_delete_on_failure(self): + """Req 4.4: hard_delete_document is NOT called when cleanup fails.""" + mock_s3_client = MagicMock() + mock_s3_client.delete_object = MagicMock(side_effect=Exception("s3 error")) + mock_hard_delete = AsyncMock() + + with ( + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document_deterministic", + new_callable=AsyncMock, + side_effect=Exception("vector error"), + ), + patch( + "apis.shared.embeddings.bedrock_embeddings.delete_vectors_for_document", + new_callable=AsyncMock, + side_effect=Exception("vector fallback error"), + ), + patch("boto3.client", return_value=mock_s3_client), + patch( + "apis.app_api.documents.services.cleanup_service.asyncio.sleep", + new_callable=AsyncMock, + ), + patch( + "apis.app_api.documents.services.document_service.hard_delete_document", + mock_hard_delete, + ), + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_document_resources, + ) + + await cleanup_document_resources( + document_id=DOCUMENT_ID, + assistant_id=ASSISTANT_ID, + s3_key=S3_KEY, + chunk_count=CHUNK_COUNT, + ) + + mock_hard_delete.assert_not_called() + + +# ========================================================================= +# TestCleanupAssistantDocuments +# ========================================================================= + + +class TestCleanupAssistantDocuments: + """Unit tests for cleanup_assistant_documents.""" + + @pytest.mark.asyncio + async def test_bulk_cleanup_all_succeed(self): + """Req 8.2, 8.3, 8.4: All 3 documents succeed → (3, 0).""" + documents = [] + for i in range(3): + doc = MagicMock() + doc.document_id = f"DOC-{i}" + doc.s3_key = f"assistants/{ASSISTANT_ID}/{i}/file.pdf" + doc.chunk_count = 5 + documents.append(doc) + + async def mock_cleanup(**kwargs): + return True + + with patch( + "apis.app_api.documents.services.cleanup_service.cleanup_document_resources", + side_effect=mock_cleanup, + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_assistant_documents, + ) + + success, failure = await cleanup_assistant_documents( + assistant_id=ASSISTANT_ID, + documents=documents, + ) + + assert (success, failure) == (3, 0) + + @pytest.mark.asyncio + async def test_bulk_cleanup_mixed_results(self): + """Req 8.3: 2 succeed, 1 fails → (2, 1).""" + documents = [] + for i in range(3): + doc = MagicMock() + doc.document_id = f"DOC-{i}" + doc.s3_key = f"assistants/{ASSISTANT_ID}/{i}/file.pdf" + doc.chunk_count = 5 + documents.append(doc) + + call_idx = 0 + + async def mock_cleanup(**kwargs): + nonlocal call_idx + call_idx += 1 + # Third document fails + return call_idx != 3 + + with patch( + "apis.app_api.documents.services.cleanup_service.cleanup_document_resources", + side_effect=mock_cleanup, + ): + from apis.app_api.documents.services.cleanup_service import ( + cleanup_assistant_documents, + ) + + success, failure = await cleanup_assistant_documents( + assistant_id=ASSISTANT_ID, + documents=documents, + ) + + assert (success, failure) == (2, 1) + + @pytest.mark.asyncio + async def test_bulk_cleanup_empty_list(self): + """Req 8.3: Empty list → (0, 0).""" + from apis.app_api.documents.services.cleanup_service import ( + cleanup_assistant_documents, + ) + + success, failure = await cleanup_assistant_documents( + assistant_id=ASSISTANT_ID, + documents=[], + ) + + assert (success, failure) == (0, 0) diff --git a/backend/tests/routes/test_delete_endpoints.py b/backend/tests/routes/test_delete_endpoints.py new file mode 100644 index 00000000..4fa30be0 --- /dev/null +++ b/backend/tests/routes/test_delete_endpoints.py @@ -0,0 +1,241 @@ +"""Tests for refactored delete endpoints (soft-delete + background cleanup). + +Endpoints under test: +- DELETE /assistants/{assistant_id}/documents/{document_id} → 204 after soft-delete +- DELETE /assistants/{assistant_id} → 204 after soft-deleting docs + hard-deleting assistant + +Requirements: 2.1, 2.2, 8.1, 8.2 +""" + +from unittest.mock import AsyncMock, patch + +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from apis.app_api.documents.routes import router as documents_router +from apis.app_api.assistants.routes import router as assistants_router +from apis.app_api.documents.models import Document +from apis.shared.auth.dependencies import get_current_user_id, get_current_user +from apis.shared.auth.models import User + + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +ASSISTANT_ID = "ast-001" +USER_ID = "user-001" +DOC_SERVICE = "apis.app_api.documents.services.document_service" +CLEANUP_SERVICE = "apis.app_api.documents.services.cleanup_service" +ASSISTANT_SERVICE = "apis.shared.assistants.service" + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_document(**overrides) -> Document: + defaults = dict( + documentId="doc-001", + assistantId=ASSISTANT_ID, + filename="report.pdf", + contentType="application/pdf", + sizeBytes=1024, + s3Key=f"assistants/{ASSISTANT_ID}/documents/doc-001/report.pdf", + status="deleting", + chunkCount=5, + createdAt="2024-01-01T00:00:00Z", + updatedAt="2024-01-01T00:00:00Z", + ttl=1737504600, + ) + defaults.update(overrides) + return Document.model_validate(defaults) + + +def _make_user() -> User: + return User( + email="test@example.com", + user_id=USER_ID, + name="Test User", + roles=["User"], + ) + + +# --------------------------------------------------------------------------- +# TestDocumentDeleteEndpoint +# --------------------------------------------------------------------------- + + +class TestDocumentDeleteEndpoint: + """DELETE /assistants/{id}/documents/{doc_id} — soft-delete + background cleanup.""" + + @pytest.fixture + def app(self): + _app = FastAPI() + _app.include_router(documents_router) + _app.dependency_overrides[get_current_user_id] = lambda: USER_ID + return _app + + def test_delete_returns_204_after_soft_delete(self, app): + """Req 2.1: Endpoint returns 204 after successful soft-delete.""" + doc = _make_document() + + with patch( + f"{DOC_SERVICE}.soft_delete_document", + new_callable=AsyncMock, + return_value=doc, + ), patch( + f"{CLEANUP_SERVICE}.cleanup_document_resources", + new_callable=AsyncMock, + ), patch( + "asyncio.ensure_future", + ): + client = TestClient(app) + resp = client.delete(f"/assistants/{ASSISTANT_ID}/documents/doc-001") + + assert resp.status_code == 204 + + def test_delete_returns_404_when_not_found(self, app): + """Req 1.5: Returns 404 when soft_delete_document returns None.""" + with patch( + f"{DOC_SERVICE}.soft_delete_document", + new_callable=AsyncMock, + return_value=None, + ): + client = TestClient(app) + resp = client.delete(f"/assistants/{ASSISTANT_ID}/documents/doc-001") + + assert resp.status_code == 404 + + def test_delete_fires_cleanup_in_background(self, app): + """Req 2.2: Cleanup is scheduled as a background task via asyncio.ensure_future.""" + doc = _make_document() + + with patch( + f"{DOC_SERVICE}.soft_delete_document", + new_callable=AsyncMock, + return_value=doc, + ), patch( + f"{CLEANUP_SERVICE}.cleanup_document_resources", + new_callable=AsyncMock, + ) as mock_cleanup, patch( + "asyncio.ensure_future", + ) as mock_ensure: + client = TestClient(app) + resp = client.delete(f"/assistants/{ASSISTANT_ID}/documents/doc-001") + + assert resp.status_code == 204 + mock_ensure.assert_called_once() + + +# --------------------------------------------------------------------------- +# TestAssistantDeleteEndpoint +# --------------------------------------------------------------------------- + + +class TestAssistantDeleteEndpoint: + """DELETE /assistants/{id} — soft-delete docs, hard-delete assistant, background cleanup.""" + + ROUTES_MODULE = "apis.app_api.assistants.routes" + + @pytest.fixture + def app(self): + _app = FastAPI() + _app.include_router(assistants_router) + _app.dependency_overrides[get_current_user] = _make_user + return _app + + def test_delete_soft_deletes_all_docs(self, app): + """Req 8.1: All documents are batch soft-deleted before assistant is removed.""" + docs = [ + _make_document(documentId="doc-001"), + _make_document(documentId="doc-002"), + ] + + with patch( + f"{self.ROUTES_MODULE}.list_assistant_documents", + new_callable=AsyncMock, + return_value=(docs, None), + ), patch( + f"{DOC_SERVICE}.batch_soft_delete_documents", + new_callable=AsyncMock, + return_value=2, + ) as mock_batch, patch( + f"{self.ROUTES_MODULE}.delete_assistant", + new_callable=AsyncMock, + return_value=True, + ), patch( + f"{CLEANUP_SERVICE}.cleanup_assistant_documents", + new_callable=AsyncMock, + ), patch( + "asyncio.ensure_future", + ): + client = TestClient(app) + resp = client.delete(f"/assistants/{ASSISTANT_ID}") + + assert resp.status_code == 204 + mock_batch.assert_called_once_with( + assistant_id=ASSISTANT_ID, + document_ids=["doc-001", "doc-002"], + ) + + def test_delete_hard_deletes_assistant(self, app): + """Req 8.1: Assistant record is hard-deleted after soft-deleting docs.""" + docs = [_make_document(documentId="doc-001")] + + with patch( + f"{self.ROUTES_MODULE}.list_assistant_documents", + new_callable=AsyncMock, + return_value=(docs, None), + ), patch( + f"{DOC_SERVICE}.batch_soft_delete_documents", + new_callable=AsyncMock, + return_value=1, + ), patch( + f"{self.ROUTES_MODULE}.delete_assistant", + new_callable=AsyncMock, + return_value=True, + ) as mock_delete_ast, patch( + f"{CLEANUP_SERVICE}.cleanup_assistant_documents", + new_callable=AsyncMock, + ), patch( + "asyncio.ensure_future", + ): + client = TestClient(app) + resp = client.delete(f"/assistants/{ASSISTANT_ID}") + + assert resp.status_code == 204 + mock_delete_ast.assert_called_once_with( + assistant_id=ASSISTANT_ID, + owner_id=USER_ID, + ) + + def test_delete_fires_cleanup_in_background(self, app): + """Req 8.2: Background cleanup is scheduled via asyncio.ensure_future.""" + docs = [_make_document(documentId="doc-001")] + + with patch( + f"{self.ROUTES_MODULE}.list_assistant_documents", + new_callable=AsyncMock, + return_value=(docs, None), + ), patch( + f"{DOC_SERVICE}.batch_soft_delete_documents", + new_callable=AsyncMock, + return_value=1, + ), patch( + f"{self.ROUTES_MODULE}.delete_assistant", + new_callable=AsyncMock, + return_value=True, + ), patch( + f"{CLEANUP_SERVICE}.cleanup_assistant_documents", + new_callable=AsyncMock, + ), patch( + "asyncio.ensure_future", + ) as mock_ensure: + client = TestClient(app) + resp = client.delete(f"/assistants/{ASSISTANT_ID}") + + assert resp.status_code == 204 + mock_ensure.assert_called_once() diff --git a/backend/tests/routes/test_document_deletion.py b/backend/tests/routes/test_document_deletion.py new file mode 100644 index 00000000..4bd29568 --- /dev/null +++ b/backend/tests/routes/test_document_deletion.py @@ -0,0 +1,362 @@ +"""Tests for document deletion service functions. + +Tests cover: +- soft_delete_document: status transition, TTL, ownership, not-found, idempotency +- hard_delete_document: unconditional delete, no ownership check, error handling +- batch_soft_delete_documents: full batch, partial failures, empty list + +Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 8.1, 9.1, 9.2 +""" + +import time +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from botocore.exceptions import ClientError + +ASSISTANT_ID = "ast-001" +DOCUMENT_ID = "DOC-abc123" +OWNER_ID = "user-001" + +ENV_PATCH = {"DYNAMODB_ASSISTANTS_TABLE_NAME": "test-table"} + + +def _dynamo_doc_attrs( + *, + status="deleting", + ttl_value=None, + chunk_count=5, + s3_key="assistants/ast-001/documents/DOC-abc123/report.pdf", +): + """Build a DynamoDB Attributes dict that Document.model_validate can parse.""" + attrs = { + "PK": f"AST#{ASSISTANT_ID}", + "SK": f"DOC#{DOCUMENT_ID}", + "documentId": DOCUMENT_ID, + "assistantId": ASSISTANT_ID, + "filename": "report.pdf", + "contentType": "application/pdf", + "sizeBytes": 1024, + "s3Key": s3_key, + "status": status, + "chunkCount": chunk_count, + "createdAt": "2024-01-01T00:00:00Z", + "updatedAt": "2024-06-01T12:00:00Z", + } + if ttl_value is not None: + attrs["ttl"] = ttl_value + return attrs + + +def _mock_table(): + """Return a MagicMock that behaves like a DynamoDB Table resource.""" + table = MagicMock() + table.update_item = MagicMock() + table.delete_item = MagicMock() + return table + + +def _mock_dynamodb_resource(table): + """Return a MagicMock that behaves like boto3.resource('dynamodb').""" + resource = MagicMock() + resource.Table.return_value = table + return resource + + +# ========================================================================= +# TestSoftDeleteDocument +# ========================================================================= + + +class TestSoftDeleteDocument: + """Unit tests for soft_delete_document.""" + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_soft_delete_returns_document_with_deleting_status(self): + """Req 1.1: Soft-delete sets status to 'deleting' and returns the document.""" + ttl_value = int(time.time()) + 7 * 86400 + table = _mock_table() + table.update_item.return_value = { + "Attributes": _dynamo_doc_attrs(status="deleting", ttl_value=ttl_value) + } + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource), \ + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value={"assistantId": ASSISTANT_ID}, + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document(ASSISTANT_ID, DOCUMENT_ID, OWNER_ID) + + assert result is not None + assert result.status == "deleting" + assert result.document_id == DOCUMENT_ID + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_soft_delete_sets_ttl(self): + """Req 1.2: TTL is approximately now + 7*86400.""" + before = int(time.time()) + ttl_value = before + 7 * 86400 + table = _mock_table() + table.update_item.return_value = { + "Attributes": _dynamo_doc_attrs(status="deleting", ttl_value=ttl_value) + } + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource), \ + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value={"assistantId": ASSISTANT_ID}, + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document(ASSISTANT_ID, DOCUMENT_ID, OWNER_ID) + + assert result is not None + assert result.ttl is not None + expected_ttl = int(time.time()) + 7 * 86400 + assert abs(result.ttl - expected_ttl) < 5 + + # Verify update_item was called with TTL in expression values + call_kwargs = table.update_item.call_args + expr_values = call_kwargs.kwargs.get("ExpressionAttributeValues", {}) + assert ":ttl_value" in expr_values + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_soft_delete_returns_none_when_not_found(self): + """Req 1.5: Returns None when document doesn't exist (ConditionalCheckFailedException).""" + table = _mock_table() + table.update_item.side_effect = ClientError( + {"Error": {"Code": "ConditionalCheckFailedException", "Message": ""}}, + "UpdateItem", + ) + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource), \ + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value={"assistantId": ASSISTANT_ID}, + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document(ASSISTANT_ID, DOCUMENT_ID, OWNER_ID) + + assert result is None + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_soft_delete_returns_none_when_not_owned(self): + """Req 1.5: Returns None when assistant is not owned by user.""" + with patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=None, + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document(ASSISTANT_ID, DOCUMENT_ID, OWNER_ID) + + assert result is None + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_soft_delete_idempotent_on_deleting_status(self): + """Req 1.6: Re-deleting a document already in 'deleting' status succeeds.""" + ttl_value = int(time.time()) + 7 * 86400 + table = _mock_table() + table.update_item.return_value = { + "Attributes": _dynamo_doc_attrs(status="deleting", ttl_value=ttl_value) + } + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource), \ + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value={"assistantId": ASSISTANT_ID}, + ): + from apis.app_api.documents.services.document_service import ( + soft_delete_document, + ) + + result = await soft_delete_document(ASSISTANT_ID, DOCUMENT_ID, OWNER_ID) + + assert result is not None + assert result.status == "deleting" + + +# ========================================================================= +# TestHardDeleteDocument +# ========================================================================= + + +class TestHardDeleteDocument: + """Unit tests for hard_delete_document.""" + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_hard_delete_returns_true_on_success(self): + """Req 9.1: Hard-delete succeeds and returns True.""" + table = _mock_table() + table.delete_item.return_value = {} + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource): + from apis.app_api.documents.services.document_service import ( + hard_delete_document, + ) + + result = await hard_delete_document(ASSISTANT_ID, DOCUMENT_ID) + + assert result is True + table.delete_item.assert_called_once() + call_kwargs = table.delete_item.call_args + key = call_kwargs.kwargs.get("Key") or call_kwargs[1].get("Key") + assert key == {"PK": f"AST#{ASSISTANT_ID}", "SK": f"DOC#{DOCUMENT_ID}"} + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_hard_delete_no_ownership_check(self): + """Req 9.2: Hard-delete does NOT call get_assistant (no ownership check).""" + table = _mock_table() + table.delete_item.return_value = {} + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource), \ + patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + ) as mock_get_assistant: + from apis.app_api.documents.services.document_service import ( + hard_delete_document, + ) + + await hard_delete_document(ASSISTANT_ID, DOCUMENT_ID) + mock_get_assistant.assert_not_called() + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_hard_delete_returns_false_on_error(self): + """Req 9.1: Hard-delete returns False on ClientError.""" + table = _mock_table() + table.delete_item.side_effect = ClientError( + {"Error": {"Code": "InternalServerError", "Message": "boom"}}, + "DeleteItem", + ) + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource): + from apis.app_api.documents.services.document_service import ( + hard_delete_document, + ) + + result = await hard_delete_document(ASSISTANT_ID, DOCUMENT_ID) + + assert result is False + + +# ========================================================================= +# TestBatchSoftDeleteDocuments +# ========================================================================= + + +class TestBatchSoftDeleteDocuments: + """Unit tests for batch_soft_delete_documents.""" + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_batch_marks_all_documents(self): + """Req 8.1: All documents in the batch are marked as deleting.""" + table = _mock_table() + table.update_item.return_value = {} + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource): + from apis.app_api.documents.services.document_service import ( + batch_soft_delete_documents, + ) + + doc_ids = ["DOC-001", "DOC-002", "DOC-003"] + count = await batch_soft_delete_documents(ASSISTANT_ID, doc_ids) + + assert count == 3 + assert table.update_item.call_count == 3 + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_batch_skips_missing_documents(self): + """Req 8.1: Missing documents are skipped, partial count returned.""" + table = _mock_table() + table.update_item.side_effect = [ + {}, + ClientError( + {"Error": {"Code": "ConditionalCheckFailedException", "Message": ""}}, + "UpdateItem", + ), + {}, + ] + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource): + from apis.app_api.documents.services.document_service import ( + batch_soft_delete_documents, + ) + + doc_ids = ["DOC-001", "DOC-missing", "DOC-003"] + count = await batch_soft_delete_documents(ASSISTANT_ID, doc_ids) + + assert count == 2 + assert table.update_item.call_count == 3 + + @pytest.mark.asyncio + @patch.dict("os.environ", ENV_PATCH) + async def test_batch_empty_list_returns_zero(self): + """Req 8.1: Empty document list returns 0.""" + table = _mock_table() + + mock_boto3 = MagicMock() + mock_boto3.resource.return_value = _mock_dynamodb_resource(table) + + with patch("boto3.resource", mock_boto3.resource): + from apis.app_api.documents.services.document_service import ( + batch_soft_delete_documents, + ) + + count = await batch_soft_delete_documents(ASSISTANT_ID, []) + + assert count == 0 + table.update_item.assert_not_called() diff --git a/backend/tests/routes/test_sessions.py b/backend/tests/routes/test_sessions.py index 3ea40d72..4c988f15 100644 --- a/backend/tests/routes/test_sessions.py +++ b/backend/tests/routes/test_sessions.py @@ -262,9 +262,15 @@ def test_returns_204_on_success(self, app, make_user, authenticated_client): mock_service.delete_agentcore_memory = AsyncMock() mock_service.delete_session_files = AsyncMock() + mock_share_service = AsyncMock() + mock_share_service.delete_shares_for_session = AsyncMock(return_value=0) + with patch( "apis.app_api.sessions.routes.SessionService", return_value=mock_service, + ), patch( + "apis.app_api.sessions.routes.get_share_service", + return_value=mock_share_service, ): resp = client.delete("/sessions/sess-001") @@ -286,6 +292,32 @@ def test_returns_404_when_not_found(self, app, make_user, authenticated_client): assert resp.status_code == 404 + def test_queues_share_cleanup_on_delete(self, app, make_user, authenticated_client): + """Deleting a session should queue share snapshot cleanup as a background task.""" + user = make_user() + client = authenticated_client(app, user) + + mock_service = AsyncMock() + mock_service.delete_session = AsyncMock(return_value=True) + mock_service.delete_agentcore_memory = AsyncMock() + mock_service.delete_session_files = AsyncMock() + + mock_share_service = AsyncMock() + mock_share_service.delete_shares_for_session = AsyncMock(return_value=2) + + with patch( + "apis.app_api.sessions.routes.SessionService", + return_value=mock_service, + ), patch( + "apis.app_api.sessions.routes.get_share_service", + return_value=mock_share_service, + ): + resp = client.delete("/sessions/sess-001") + + assert resp.status_code == 204 + # Background task should have been called with the session id + mock_share_service.delete_shares_for_session.assert_called_once_with("sess-001") + # --------------------------------------------------------------------------- # Requirement 3.7: POST /sessions/bulk-delete returns 200 @@ -304,9 +336,15 @@ def test_returns_200_with_results(self, app, make_user, authenticated_client): mock_service.delete_agentcore_memory = AsyncMock() mock_service.delete_session_files = AsyncMock() + mock_share_service = AsyncMock() + mock_share_service.delete_shares_for_session = AsyncMock(return_value=0) + with patch( "apis.app_api.sessions.routes.SessionService", return_value=mock_service, + ), patch( + "apis.app_api.sessions.routes.get_share_service", + return_value=mock_share_service, ): resp = client.post( "/sessions/bulk-delete", @@ -331,9 +369,15 @@ def test_partial_failure(self, app, make_user, authenticated_client): mock_service.delete_agentcore_memory = AsyncMock() mock_service.delete_session_files = AsyncMock() + mock_share_service = AsyncMock() + mock_share_service.delete_shares_for_session = AsyncMock(return_value=0) + with patch( "apis.app_api.sessions.routes.SessionService", return_value=mock_service, + ), patch( + "apis.app_api.sessions.routes.get_share_service", + return_value=mock_share_service, ): resp = client.post( "/sessions/bulk-delete", @@ -345,6 +389,34 @@ def test_partial_failure(self, app, make_user, authenticated_client): assert body["deletedCount"] == 1 assert body["failedCount"] == 1 + def test_bulk_delete_queues_share_cleanup(self, app, make_user, authenticated_client): + """Bulk delete should queue share cleanup for each successfully deleted session.""" + user = make_user() + client = authenticated_client(app, user) + + mock_service = AsyncMock() + mock_service.delete_session = AsyncMock(side_effect=[True, True]) + mock_service.delete_agentcore_memory = AsyncMock() + mock_service.delete_session_files = AsyncMock() + + mock_share_service = AsyncMock() + mock_share_service.delete_shares_for_session = AsyncMock(return_value=1) + + with patch( + "apis.app_api.sessions.routes.SessionService", + return_value=mock_service, + ), patch( + "apis.app_api.sessions.routes.get_share_service", + return_value=mock_share_service, + ): + resp = client.post( + "/sessions/bulk-delete", + json={"sessionIds": ["sess-001", "sess-002"]}, + ) + + assert resp.status_code == 200 + assert mock_share_service.delete_shares_for_session.call_count == 2 + def test_rejects_empty_list(self, app, make_user, authenticated_client): """Req 3.7: Should return 422 for empty session_ids list.""" user = make_user() diff --git a/backend/tests/routes/test_share_export.py b/backend/tests/routes/test_share_export.py index 3073c5c9..6a091cde 100644 --- a/backend/tests/routes/test_share_export.py +++ b/backend/tests/routes/test_share_export.py @@ -174,23 +174,29 @@ async def test_copies_valid_messages(self, service): ] mock_mgr = MagicMock() - mock_mgr.append_message = MagicMock() + mock_mgr.create_message = MagicMock() + + # Mock SessionMessage.from_message to return a simple object + mock_session_msg = MagicMock() with patch.dict(os.environ, {"AGENTCORE_MEMORY_ID": "mem-123", "AWS_REGION": "us-east-1"}), \ patch("bedrock_agentcore.memory.integrations.strands.session_manager.AgentCoreMemorySessionManager", return_value=mock_mgr), \ - patch("bedrock_agentcore.memory.integrations.strands.config.AgentCoreMemoryConfig"): + patch("bedrock_agentcore.memory.integrations.strands.config.AgentCoreMemoryConfig"), \ + patch("strands.types.session.SessionMessage") as mock_session_msg_class: + mock_session_msg_class.from_message.return_value = mock_session_msg count = await service._copy_messages_to_memory("sess-new", "user-1", snapshot) assert count == 2 - assert mock_mgr.append_message.call_count == 2 + assert mock_mgr.create_message.call_count == 2 - # Verify first call was the user message in Converse format - first_call_msg = mock_mgr.append_message.call_args_list[0][0][0] - assert first_call_msg == {"role": "user", "content": [{"text": "Hello"}]} + # Verify calls used session_id and "default" namespace + first_call = mock_mgr.create_message.call_args_list[0] + assert first_call[0][0] == "sess-new" # session_id + assert first_call[0][1] == "default" # namespace - # Verify second call was the assistant message - second_call_msg = mock_mgr.append_message.call_args_list[1][0][0] - assert second_call_msg == {"role": "assistant", "content": [{"text": "Hi there"}]} + second_call = mock_mgr.create_message.call_args_list[1] + assert second_call[0][0] == "sess-new" + assert second_call[0][1] == "default" @pytest.mark.asyncio async def test_skips_unconvertible_messages(self, service): @@ -200,15 +206,18 @@ async def test_skips_unconvertible_messages(self, service): ] mock_mgr = MagicMock() - mock_mgr.append_message = MagicMock() + mock_mgr.create_message = MagicMock() + mock_session_msg = MagicMock() with patch.dict(os.environ, {"AGENTCORE_MEMORY_ID": "mem-123", "AWS_REGION": "us-east-1"}), \ patch("bedrock_agentcore.memory.integrations.strands.session_manager.AgentCoreMemorySessionManager", return_value=mock_mgr), \ - patch("bedrock_agentcore.memory.integrations.strands.config.AgentCoreMemoryConfig"): + patch("bedrock_agentcore.memory.integrations.strands.config.AgentCoreMemoryConfig"), \ + patch("strands.types.session.SessionMessage") as mock_session_msg_class: + mock_session_msg_class.from_message.return_value = mock_session_msg count = await service._copy_messages_to_memory("sess-new", "user-1", snapshot) assert count == 1 - assert mock_mgr.append_message.call_count == 1 + assert mock_mgr.create_message.call_count == 1 @pytest.mark.asyncio async def test_continues_on_individual_message_failure(self, service): @@ -220,15 +229,18 @@ async def test_continues_on_individual_message_failure(self, service): mock_mgr = MagicMock() # Second call raises, first and third succeed - mock_mgr.append_message = MagicMock(side_effect=[None, RuntimeError("boom"), None]) + mock_mgr.create_message = MagicMock(side_effect=[None, RuntimeError("boom"), None]) + mock_session_msg = MagicMock() with patch.dict(os.environ, {"AGENTCORE_MEMORY_ID": "mem-123", "AWS_REGION": "us-east-1"}), \ patch("bedrock_agentcore.memory.integrations.strands.session_manager.AgentCoreMemorySessionManager", return_value=mock_mgr), \ - patch("bedrock_agentcore.memory.integrations.strands.config.AgentCoreMemoryConfig"): + patch("bedrock_agentcore.memory.integrations.strands.config.AgentCoreMemoryConfig"), \ + patch("strands.types.session.SessionMessage") as mock_session_msg_class: + mock_session_msg_class.from_message.return_value = mock_session_msg count = await service._copy_messages_to_memory("sess-new", "user-1", snapshot) assert count == 2 # 1st and 3rd succeeded - assert mock_mgr.append_message.call_count == 3 + assert mock_mgr.create_message.call_count == 3 @pytest.mark.asyncio async def test_returns_zero_when_memory_id_missing(self, service): diff --git a/backend/tests/shared/test_assistants.py b/backend/tests/shared/test_assistants.py index f655b95e..201e663c 100644 --- a/backend/tests/shared/test_assistants.py +++ b/backend/tests/shared/test_assistants.py @@ -75,17 +75,6 @@ async def test_delete_assistant(self, assistants_table): ) assert await delete_assistant(created.assistant_id, "u1") is True - @pytest.mark.asyncio - async def test_archive_assistant(self, assistants_table): - from apis.shared.assistants.service import create_assistant, archive_assistant - created = await create_assistant( - owner_id="u1", owner_name="Alice", name="Bot", - description="d", instructions="hi", - ) - archived = await archive_assistant(created.assistant_id, "u1") - assert archived is not None - assert archived.status == "ARCHIVED" - @pytest.mark.asyncio async def test_share_and_check_access(self, assistants_table): from apis.shared.assistants.service import create_assistant, share_assistant, check_share_access diff --git a/backend/tests/shared/test_list_documents_filtering.py b/backend/tests/shared/test_list_documents_filtering.py new file mode 100644 index 00000000..c16bcd3b --- /dev/null +++ b/backend/tests/shared/test_list_documents_filtering.py @@ -0,0 +1,146 @@ +""" +Unit tests for list documents filtering — verifying that documents +with status="deleting" are excluded from list_assistant_documents results. + +Feature: reliable-document-deletion +Requirements: 11.1 +""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + + +ASSISTANT_ID = "AST-test-001" +OWNER_ID = "user-abc-123" +TABLE_NAME = "test-table" +ENV_PATCH = {"DYNAMODB_ASSISTANTS_TABLE_NAME": TABLE_NAME} + + +def _make_item(doc_id: str, status: str) -> dict: + """Build a DynamoDB item dict matching the Document model shape.""" + return { + "PK": f"AST#{ASSISTANT_ID}", + "SK": f"DOC#{doc_id}", + "documentId": doc_id, + "assistantId": ASSISTANT_ID, + "filename": f"{doc_id}.pdf", + "contentType": "application/pdf", + "sizeBytes": 1024, + "s3Key": f"assistants/{ASSISTANT_ID}/{doc_id}/file.pdf", + "status": status, + "createdAt": "2024-01-01T00:00:00Z", + "updatedAt": "2024-06-01T00:00:00Z", + } + + +def _setup_mocks(mock_boto3_resource, items): + """Wire mock boto3.resource to return a table whose query returns *items*.""" + mock_table = MagicMock() + mock_table.query = MagicMock(return_value={"Items": items}) + mock_dynamo = MagicMock() + mock_dynamo.Table.return_value = mock_table + mock_boto3_resource.return_value = mock_dynamo + return mock_table + + +# ----------------------------------------------------------------------- +# Requirement 11.1: Mix of statuses — "deleting" excluded +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=MagicMock(), +) +@patch.dict("os.environ", ENV_PATCH) +@pytest.mark.asyncio +async def test_list_excludes_deleting_documents(mock_get_assistant, mock_boto3_resource): + """Documents with status='deleting' must not appear in the returned list.""" + items = [ + _make_item("DOC-001", "complete"), + _make_item("DOC-002", "deleting"), + _make_item("DOC-003", "uploading"), + _make_item("DOC-004", "deleting"), + _make_item("DOC-005", "failed"), + ] + _setup_mocks(mock_boto3_resource, items) + + from apis.app_api.documents.services.document_service import ( + list_assistant_documents, + ) + + documents, _ = await list_assistant_documents(ASSISTANT_ID, OWNER_ID) + + returned_ids = {doc.document_id for doc in documents} + assert returned_ids == {"DOC-001", "DOC-003", "DOC-005"} + for doc in documents: + assert doc.status != "deleting" + + +# ----------------------------------------------------------------------- +# Requirement 11.1: All non-deleting documents are returned +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=MagicMock(), +) +@patch.dict("os.environ", ENV_PATCH) +@pytest.mark.asyncio +async def test_list_returns_all_non_deleting(mock_get_assistant, mock_boto3_resource): + """Every document NOT in 'deleting' status must be present in the result.""" + items = [ + _make_item("DOC-a", "uploading"), + _make_item("DOC-b", "chunking"), + _make_item("DOC-c", "embedding"), + _make_item("DOC-d", "complete"), + _make_item("DOC-e", "failed"), + ] + _setup_mocks(mock_boto3_resource, items) + + from apis.app_api.documents.services.document_service import ( + list_assistant_documents, + ) + + documents, _ = await list_assistant_documents(ASSISTANT_ID, OWNER_ID) + + returned_ids = {doc.document_id for doc in documents} + assert returned_ids == {"DOC-a", "DOC-b", "DOC-c", "DOC-d", "DOC-e"} + assert len(documents) == 5 + + +# ----------------------------------------------------------------------- +# Requirement 11.1: All deleting → empty list +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch( + "apis.shared.assistants.service.get_assistant", + new_callable=AsyncMock, + return_value=MagicMock(), +) +@patch.dict("os.environ", ENV_PATCH) +@pytest.mark.asyncio +async def test_list_empty_when_all_deleting(mock_get_assistant, mock_boto3_resource): + """When every document is in 'deleting' status, the result must be empty.""" + items = [ + _make_item("DOC-x", "deleting"), + _make_item("DOC-y", "deleting"), + _make_item("DOC-z", "deleting"), + ] + _setup_mocks(mock_boto3_resource, items) + + from apis.app_api.documents.services.document_service import ( + list_assistant_documents, + ) + + documents, _ = await list_assistant_documents(ASSISTANT_ID, OWNER_ID) + + assert documents == [] diff --git a/backend/tests/shared/test_search_filtering.py b/backend/tests/shared/test_search_filtering.py new file mode 100644 index 00000000..daa3a295 --- /dev/null +++ b/backend/tests/shared/test_search_filtering.py @@ -0,0 +1,209 @@ +""" +Unit tests for search path document status filtering. + +Tests the `_filter_vectors_by_document_status` helper in rag_service.py +which filters vector search results to only include chunks from documents +with status='complete' in DynamoDB. + +Feature: reliable-document-deletion +Requirements: 3.1, 3.2, 3.3, 3.4 +""" + +from unittest.mock import MagicMock, patch + + + +def _make_vector(doc_id, chunk_idx=0): + """Build a vector result dict matching the shape returned by S3 Vectors.""" + return { + "key": f"{doc_id}#{chunk_idx}", + "distance": 0.5, + "metadata": {"document_id": doc_id, "text": f"chunk from {doc_id}"}, + } + + +ASSISTANT_ID = "ast-test-001" +TABLE_NAME = "test-table" +ENV_PATCH = {"DYNAMODB_ASSISTANTS_TABLE_NAME": TABLE_NAME} + + +def _build_mock_table(status_map): + """Return a mock DynamoDB table whose get_item returns statuses from *status_map*. + + Keys present in *status_map* return {"Item": {"status": value}}. + Keys absent simulate a missing record (no "Item" key). + """ + mock_table = MagicMock() + + def _get_item(**kwargs): + doc_id = kwargs["Key"]["SK"].replace("DOC#", "") + if doc_id in status_map: + return {"Item": {"status": status_map[doc_id]}} + return {} # no Item → record not found + + mock_table.get_item = MagicMock(side_effect=_get_item) + return mock_table + + +def _setup_dynamo_mock(mock_boto3_resource, status_map): + """Wire a mock boto3.resource('dynamodb') to return a table with *status_map*.""" + mock_table = _build_mock_table(status_map) + mock_dynamo = MagicMock() + mock_dynamo.Table.return_value = mock_table + mock_boto3_resource.return_value = mock_dynamo + return mock_table + + +# ----------------------------------------------------------------------- +# Requirement 3.1, 3.2: Only chunks from complete documents are returned +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch.dict("os.environ", ENV_PATCH) +def test_filter_keeps_only_complete_documents(mock_boto3_resource): + """Mix of complete and deleting docs — only complete chunks returned.""" + _setup_dynamo_mock(mock_boto3_resource, { + "doc-a": "complete", + "doc-b": "deleting", + "doc-c": "complete", + }) + + from apis.shared.assistants.rag_service import _filter_vectors_by_document_status + + vectors = [ + _make_vector("doc-a", 0), + _make_vector("doc-b", 0), + _make_vector("doc-b", 1), + _make_vector("doc-c", 0), + ] + + result = _filter_vectors_by_document_status(vectors, ASSISTANT_ID) + + doc_ids = [v["metadata"]["document_id"] for v in result] + assert doc_ids == ["doc-a", "doc-c"] + + +# ----------------------------------------------------------------------- +# Requirement 3.2: All deleting → empty result +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch.dict("os.environ", ENV_PATCH) +def test_filter_excludes_all_deleting(mock_boto3_resource): + """All documents in 'deleting' status — result should be empty.""" + _setup_dynamo_mock(mock_boto3_resource, { + "doc-x": "deleting", + "doc-y": "deleting", + }) + + from apis.shared.assistants.rag_service import _filter_vectors_by_document_status + + vectors = [_make_vector("doc-x"), _make_vector("doc-y")] + + result = _filter_vectors_by_document_status(vectors, ASSISTANT_ID) + + assert result == [] + + +# ----------------------------------------------------------------------- +# Requirement 3.2: All complete → all chunks returned +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch.dict("os.environ", ENV_PATCH) +def test_filter_keeps_all_complete(mock_boto3_resource): + """All documents in 'complete' status — all chunks returned.""" + _setup_dynamo_mock(mock_boto3_resource, { + "doc-1": "complete", + "doc-2": "complete", + }) + + from apis.shared.assistants.rag_service import _filter_vectors_by_document_status + + vectors = [ + _make_vector("doc-1", 0), + _make_vector("doc-1", 1), + _make_vector("doc-2", 0), + ] + + result = _filter_vectors_by_document_status(vectors, ASSISTANT_ID) + + assert len(result) == 3 + doc_ids = [v["metadata"]["document_id"] for v in result] + assert doc_ids == ["doc-1", "doc-1", "doc-2"] + + +# ----------------------------------------------------------------------- +# Requirement 3.3: Missing DynamoDB record → excluded +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch.dict("os.environ", ENV_PATCH) +def test_filter_excludes_missing_records(mock_boto3_resource): + """Document not found in DynamoDB (no Item) — chunks excluded.""" + _setup_dynamo_mock(mock_boto3_resource, { + "doc-exists": "complete", + # "doc-missing" intentionally absent + }) + + from apis.shared.assistants.rag_service import _filter_vectors_by_document_status + + vectors = [ + _make_vector("doc-exists", 0), + _make_vector("doc-missing", 0), + ] + + result = _filter_vectors_by_document_status(vectors, ASSISTANT_ID) + + doc_ids = [v["metadata"]["document_id"] for v in result] + assert doc_ids == ["doc-exists"] + + +# ----------------------------------------------------------------------- +# Requirement 3.4: DynamoDB error → graceful degradation (unfiltered) +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch.dict("os.environ", ENV_PATCH) +def test_filter_graceful_degradation_on_dynamo_error(mock_boto3_resource): + """DynamoDB raises exception — return unfiltered results.""" + mock_dynamo = MagicMock() + mock_dynamo.Table.side_effect = Exception("DynamoDB unavailable") + mock_boto3_resource.return_value = mock_dynamo + + from apis.shared.assistants.rag_service import _filter_vectors_by_document_status + + vectors = [ + _make_vector("doc-a", 0), + _make_vector("doc-b", 0), + ] + + result = _filter_vectors_by_document_status(vectors, ASSISTANT_ID) + + # Graceful degradation: all vectors returned unfiltered + assert len(result) == 2 + doc_ids = [v["metadata"]["document_id"] for v in result] + assert doc_ids == ["doc-a", "doc-b"] + + +# ----------------------------------------------------------------------- +# Edge case: Empty vector list +# ----------------------------------------------------------------------- + + +@patch("boto3.resource") +@patch.dict("os.environ", ENV_PATCH) +def test_filter_empty_vectors(mock_boto3_resource): + """Empty vector list — returns empty without calling DynamoDB.""" + from apis.shared.assistants.rag_service import _filter_vectors_by_document_status + + result = _filter_vectors_by_document_status([], ASSISTANT_ID) + + assert result == [] + # boto3.resource should not be called for empty input + mock_boto3_resource.assert_not_called() diff --git a/backend/tests/shared/test_sessions_metadata.py b/backend/tests/shared/test_sessions_metadata.py index 1732d387..e1d64edb 100644 --- a/backend/tests/shared/test_sessions_metadata.py +++ b/backend/tests/shared/test_sessions_metadata.py @@ -121,3 +121,112 @@ async def test_missing_env_raises(self, sessions_metadata_table, monkeypatch): from apis.shared.sessions.metadata import list_user_sessions with pytest.raises(RuntimeError): await list_user_sessions("u1") + + +class TestStoreUserDisplayText: + """Tests for the displayText feature (D# records).""" + + @pytest.mark.asyncio + async def test_store_and_retrieve_display_text(self, sessions_metadata_table): + """displayText stored via D# record is merged into get_all_message_metadata.""" + from apis.shared.sessions.metadata import store_user_display_text, get_all_message_metadata + + await store_user_display_text( + session_id="s1", user_id="u1", message_id=0, display_text="Hello world", + ) + result = await get_all_message_metadata("s1", "u1") + assert "0" in result + assert result["0"]["displayText"] == "Hello world" + + @pytest.mark.asyncio + async def test_display_text_merged_with_cost_record(self, sessions_metadata_table): + """When both a cost record and displayText exist for the same message, they merge.""" + from apis.shared.sessions.metadata import ( + store_message_metadata, store_user_display_text, get_all_message_metadata, + ) + + await store_message_metadata( + session_id="s1", user_id="u1", message_id=0, message_metadata=_make_message_metadata(), + ) + await store_user_display_text( + session_id="s1", user_id="u1", message_id=0, display_text="What is AWS?", + ) + result = await get_all_message_metadata("s1", "u1") + assert "0" in result + # Should have both cost data and displayText + assert result["0"]["displayText"] == "What is AWS?" + assert "cost" in result["0"] + + @pytest.mark.asyncio + async def test_display_text_without_cost_record(self, sessions_metadata_table): + """displayText record alone creates an entry even without a matching cost record.""" + from apis.shared.sessions.metadata import store_user_display_text, get_all_message_metadata + + await store_user_display_text( + session_id="s1", user_id="u1", message_id=2, display_text="standalone text", + ) + result = await get_all_message_metadata("s1", "u1") + assert "2" in result + assert result["2"] == {"displayText": "standalone text"} + + @pytest.mark.asyncio + async def test_display_text_sk_pattern(self, sessions_metadata_table): + """D# records use the correct SK and GSI_SK patterns.""" + from apis.shared.sessions.metadata import store_user_display_text + + await store_user_display_text( + session_id="s1", user_id="u1", message_id=4, display_text="test", + ) + items = sessions_metadata_table.scan()["Items"] + d_items = [i for i in items if i["SK"].startswith("D#")] + assert len(d_items) == 1 + assert d_items[0]["SK"] == "D#s1#4" + assert d_items[0]["GSI_PK"] == "SESSION#s1" + assert d_items[0]["GSI_SK"] == "D#4" + + @pytest.mark.asyncio + async def test_display_text_skips_preview_session(self, sessions_metadata_table): + """Preview sessions should not persist displayText records.""" + from apis.shared.sessions.metadata import store_user_display_text + + await store_user_display_text( + session_id="preview-abc123", user_id="u1", message_id=0, display_text="ignored", + ) + items = sessions_metadata_table.scan()["Items"] + d_items = [i for i in items if i["SK"].startswith("D#")] + assert len(d_items) == 0 + + @pytest.mark.asyncio + async def test_display_text_multiple_messages(self, sessions_metadata_table): + """Multiple displayText records in the same session are all retrievable.""" + from apis.shared.sessions.metadata import store_user_display_text, get_all_message_metadata + + await store_user_display_text(session_id="s1", user_id="u1", message_id=0, display_text="first") + await store_user_display_text(session_id="s1", user_id="u1", message_id=2, display_text="second") + await store_user_display_text(session_id="s1", user_id="u1", message_id=4, display_text="third") + + result = await get_all_message_metadata("s1", "u1") + assert result["0"]["displayText"] == "first" + assert result["2"]["displayText"] == "second" + assert result["4"]["displayText"] == "third" + + @pytest.mark.asyncio + async def test_display_text_user_isolation(self, sessions_metadata_table): + """displayText from a different user should not leak into another user's query.""" + from apis.shared.sessions.metadata import store_user_display_text, get_all_message_metadata + + await store_user_display_text(session_id="s1", user_id="u1", message_id=0, display_text="user1 msg") + await store_user_display_text(session_id="s1", user_id="u2", message_id=0, display_text="user2 msg") + + result_u1 = await get_all_message_metadata("s1", "u1") + assert result_u1.get("0", {}).get("displayText") == "user1 msg" + + @pytest.mark.asyncio + async def test_missing_env_raises(self, sessions_metadata_table, monkeypatch): + """store_user_display_text raises RuntimeError when env var is missing.""" + monkeypatch.delenv("DYNAMODB_SESSIONS_METADATA_TABLE_NAME", raising=False) + from apis.shared.sessions.metadata import store_user_display_text + with pytest.raises(RuntimeError): + await store_user_display_text( + session_id="s1", user_id="u1", message_id=0, display_text="boom", + ) diff --git a/backend/tests/shared/test_vector_deletion.py b/backend/tests/shared/test_vector_deletion.py new file mode 100644 index 00000000..082c2d33 --- /dev/null +++ b/backend/tests/shared/test_vector_deletion.py @@ -0,0 +1,114 @@ +""" +Unit tests for deterministic vector deletion. + +Feature: reliable-document-deletion +Requirements: 5.1, 5.2, 5.3 +""" + +import importlib +from unittest.mock import MagicMock, patch + +import pytest + + +@pytest.fixture() +def mock_s3vectors(): + """Provide a mock s3vectors client and reload the module with patched env vars.""" + mock_client = MagicMock() + mock_client.delete_vectors = MagicMock() + + with ( + patch.dict( + "os.environ", + { + "S3_ASSISTANTS_VECTOR_STORE_BUCKET_NAME": "test-bucket", + "S3_ASSISTANTS_VECTOR_STORE_INDEX_NAME": "test-index", + }, + ), + patch("boto3.client", return_value=mock_client), + ): + import apis.shared.embeddings.bedrock_embeddings as mod + + importlib.reload(mod) + yield mod, mock_client + + +@pytest.mark.asyncio +async def test_deterministic_delete_generates_correct_keys(mock_s3vectors): + """For chunk_count=5, keys should be DOC-123#0 through DOC-123#4.""" + mod, mock_client = mock_s3vectors + + result = await mod.delete_vectors_for_document_deterministic("DOC-123", 5) + + assert result == 5 + mock_client.delete_vectors.assert_called_once() + call_kwargs = mock_client.delete_vectors.call_args[1] + assert call_kwargs["vectorBucketName"] == "test-bucket" + assert call_kwargs["indexName"] == "test-index" + assert call_kwargs["keys"] == [ + "DOC-123#0", + "DOC-123#1", + "DOC-123#2", + "DOC-123#3", + "DOC-123#4", + ] + + +@pytest.mark.asyncio +async def test_deterministic_delete_batches_at_500(mock_s3vectors): + """For chunk_count=1200, expect 3 batches: 500, 500, 200.""" + mod, mock_client = mock_s3vectors + + result = await mod.delete_vectors_for_document_deterministic("DOC-456", 1200) + + assert result == 1200 + assert mock_client.delete_vectors.call_count == 3 + + batches = [call[1]["keys"] for call in mock_client.delete_vectors.call_args_list] + assert len(batches[0]) == 500 + assert len(batches[1]) == 500 + assert len(batches[2]) == 200 + + # Verify key continuity across batches + assert batches[0][0] == "DOC-456#0" + assert batches[0][-1] == "DOC-456#499" + assert batches[1][0] == "DOC-456#500" + assert batches[1][-1] == "DOC-456#999" + assert batches[2][0] == "DOC-456#1000" + assert batches[2][-1] == "DOC-456#1199" + + +@pytest.mark.asyncio +async def test_deterministic_delete_zero_chunks(mock_s3vectors): + """For chunk_count=0, return 0 and make no API calls.""" + mod, mock_client = mock_s3vectors + + result = await mod.delete_vectors_for_document_deterministic("DOC-789", 0) + + assert result == 0 + mock_client.delete_vectors.assert_not_called() + + +@pytest.mark.asyncio +async def test_deterministic_delete_returns_chunk_count(mock_s3vectors): + """Return value should always equal chunk_count.""" + mod, _ = mock_s3vectors + + for count in [1, 10, 499, 500, 501, 1000]: + result = await mod.delete_vectors_for_document_deterministic("DOC-X", count) + assert result == count + + +@pytest.mark.asyncio +async def test_deterministic_delete_single_batch(mock_s3vectors): + """For chunk_count=500, expect exactly 1 batch of 500 keys.""" + mod, mock_client = mock_s3vectors + + result = await mod.delete_vectors_for_document_deterministic("DOC-EXACT", 500) + + assert result == 500 + assert mock_client.delete_vectors.call_count == 1 + keys = mock_client.delete_vectors.call_args[1]["keys"] + assert len(keys) == 500 + assert keys[0] == "DOC-EXACT#0" + assert keys[-1] == "DOC-EXACT#499" diff --git a/backend/uv.lock b/backend/uv.lock index b9d9566f..7d696dd5 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -1,15 +1,16 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10" resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version < '3.11'", ] [[package]] name = "agentcore-stack" -version = "1.0.0b19" +version = "1.0.0b20" source = { editable = "." } dependencies = [ { name = "aiofiles" }, @@ -71,31 +72,31 @@ requires-dist = [ { name = "agentcore-stack", extras = ["agentcore", "dev"], marker = "extra == 'all'" }, { name = "aiofiles", specifier = "==25.1.0" }, { name = "authlib", specifier = "==1.6.9" }, - { name = "aws-opentelemetry-distro", marker = "extra == 'agentcore'", specifier = "==0.14.2" }, - { name = "bedrock-agentcore", marker = "extra == 'agentcore'", specifier = "==1.4.7" }, + { name = "aws-opentelemetry-distro", marker = "extra == 'agentcore'", specifier = "==0.16.0" }, + { name = "bedrock-agentcore", marker = "extra == 'agentcore'", specifier = "==1.4.8" }, { name = "black", marker = "extra == 'dev'", specifier = "==26.3.1" }, - { name = "boto3", specifier = "==1.42.73" }, - { name = "cachetools", specifier = "==7.0.5" }, + { name = "boto3", specifier = "==1.42.78" }, + { name = "cachetools", specifier = "==6.2.4" }, { name = "fastapi", specifier = "==0.135.2" }, - { name = "google-genai", marker = "extra == 'agentcore'", specifier = "==1.68.0" }, + { name = "google-genai", marker = "extra == 'agentcore'", specifier = "==1.69.0" }, { name = "httpx", specifier = "==0.28.1" }, - { name = "hypothesis", marker = "extra == 'dev'", specifier = "==6.151.9" }, + { name = "hypothesis", marker = "extra == 'dev'", specifier = "==6.151.10" }, { name = "moto", extras = ["dynamodb"], marker = "extra == 'dev'", specifier = "==5.1.22" }, { name = "mypy", marker = "extra == 'dev'", specifier = "==1.19.1" }, { name = "numpy", marker = "extra == 'dev'", specifier = "==2.2.6" }, - { name = "openai", marker = "extra == 'agentcore'", specifier = "==2.29.0" }, + { name = "openai", marker = "extra == 'agentcore'", specifier = "==2.30.0" }, { name = "pyjwt", extras = ["crypto"], specifier = "==2.12.1" }, { name = "pytest", marker = "extra == 'dev'", specifier = "==9.0.2" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = "==1.3.0" }, { name = "pytest-cov", marker = "extra == 'dev'", specifier = "==7.1.0" }, { name = "python-dotenv", specifier = "==1.2.2" }, - { name = "ruff", marker = "extra == 'dev'", specifier = "==0.15.7" }, + { name = "ruff", marker = "extra == 'dev'", specifier = "==0.15.8" }, { name = "starlette", specifier = "==1.0.0" }, - { name = "strands-agents", marker = "extra == 'agentcore'", specifier = "==1.32.0" }, - { name = "strands-agents-tools", marker = "extra == 'agentcore'", specifier = "==0.2.23" }, + { name = "strands-agents", marker = "extra == 'agentcore'", specifier = "==1.33.0" }, + { name = "strands-agents-tools", marker = "extra == 'agentcore'", specifier = "==0.3.0" }, { name = "tiktoken", marker = "extra == 'dev'", specifier = "==0.12.0" }, { name = "types-aiofiles", marker = "extra == 'dev'", specifier = "==25.1.0.20251011" }, - { name = "uvicorn", extras = ["standard"], specifier = "==0.35.0" }, + { name = "uvicorn", extras = ["standard"], specifier = "==0.42.0" }, ] provides-extras = ["agentcore", "dev", "all"] @@ -326,9 +327,10 @@ wheels = [ [[package]] name = "aws-opentelemetry-distro" -version = "0.14.2" +version = "0.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "cachetools" }, { name = "opentelemetry-api" }, { name = "opentelemetry-distro" }, { name = "opentelemetry-exporter-otlp-proto-common" }, @@ -386,10 +388,12 @@ dependencies = [ { name = "opentelemetry-propagator-ot-trace" }, { name = "opentelemetry-sdk" }, { name = "opentelemetry-sdk-extension-aws" }, + { name = "protobuf" }, + { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/54/1f9757e1d1e4bfeaa3b89ef02f7c51fe67c4fef30730ee29322b0d5eb2b1/aws_opentelemetry_distro-0.14.2.tar.gz", hash = "sha256:a7deb86b0294ae41e3c12912f31ef780d7cd40cf1f71f3a088c630f754605572", size = 253317, upload-time = "2026-01-20T17:11:51.314Z" } +sdist = { url = "https://files.pythonhosted.org/packages/97/e4/4bce0ddae504b8054f16796d1387038875af44aea69edfd5f32afe52e2ab/aws_opentelemetry_distro-0.16.0.tar.gz", hash = "sha256:7fa0eaac7f8303ac0506bf68931b2de4f6173b41a28b837216e4949afafe2d8d", size = 281980, upload-time = "2026-03-13T20:29:29.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/6c/7e6b34db19023a70a28713c74bbeb9e182e9fc216e1845825b73e8fb36ec/aws_opentelemetry_distro-0.14.2-py3-none-any.whl", hash = "sha256:23c091e933d62fe9c238e08518897f3b759fa6f076d1a6899f4848026b853f71", size = 169058, upload-time = "2026-01-20T17:11:49.672Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9c/736768e8ca5236accda9fd5ede42862a4a4278a7e7aba8c957de983890e5/aws_opentelemetry_distro-0.16.0-py3-none-any.whl", hash = "sha256:a5a651e312ec845bbe185de42cb6cd8ff283060766a6b5eaf2b7426dd43abab1", size = 188918, upload-time = "2026-03-13T20:29:27.988Z" }, ] [[package]] @@ -428,7 +432,7 @@ wheels = [ [[package]] name = "bedrock-agentcore" -version = "1.4.7" +version = "1.4.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3" }, @@ -440,9 +444,9 @@ dependencies = [ { name = "uvicorn" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/5c/2ad1747ff2bc4b6bb8828fdc8e769f6c34daa0c4ca4d853cff603ea04aeb/bedrock_agentcore-1.4.7.tar.gz", hash = "sha256:422805482e47593010128a86495dff644507624b00c6e09950613c7241ae5375", size = 483923, upload-time = "2026-03-18T22:46:37.944Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/99/b08e9e6b849599316100b898f5ba27057f5141aaea36bcaf0ed3f695fdd5/bedrock_agentcore-1.4.8.tar.gz", hash = "sha256:ebabf85307b3590ef58c5ea25234cc3824560b5a7579f725bb00a8df7df6c4d4", size = 487391, upload-time = "2026-03-26T22:33:31.873Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/e7/df6cbe814292353460c0988504eee4e90cc08dde03bf5e1da85176b5f0b4/bedrock_agentcore-1.4.7-py3-none-any.whl", hash = "sha256:7515ddf779a4f32fd4a5c8dcf29c9399babe0ea14ea9004d2c69bcad40754622", size = 148250, upload-time = "2026-03-18T22:46:36.662Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/677b74eaa3c1f6601ff28b4c652c4c2cf313b7fb84cf36949d8ca0869029/bedrock_agentcore-1.4.8-py3-none-any.whl", hash = "sha256:9f0fb653d0f3cadca082132b49e73852483527824d93728eb4cefc0ce60545b4", size = 149662, upload-time = "2026-03-26T22:33:30.016Z" }, ] [[package]] @@ -491,39 +495,39 @@ wheels = [ [[package]] name = "boto3" -version = "1.42.73" +version = "1.42.78" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/8b/d00575be514744ca4839e7d85bf4a8a3c7b6b4574433291e58d14c68ae09/boto3-1.42.73.tar.gz", hash = "sha256:d37b58d6cd452ca808dd6823ae19ca65b6244096c5125ef9052988b337298bae", size = 112775, upload-time = "2026-03-20T19:39:52.814Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/2b/ebdad075934cf6bb78bf81fe31d83339bcd804ad6c856f7341376cbc88b6/boto3-1.42.78.tar.gz", hash = "sha256:cef2ebdb9be5c0e96822f8d3941ac4b816c90a5737a7ffb901d664c808964b63", size = 112789, upload-time = "2026-03-27T19:28:07.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/05/1fcf03d90abaa3d0b42a6bfd10231dd709493ecbacf794aa2eea5eae6841/boto3-1.42.73-py3-none-any.whl", hash = "sha256:1f81b79b873f130eeab14bb556417a7c66d38f3396b7f2fe3b958b3f9094f455", size = 140556, upload-time = "2026-03-20T19:39:50.298Z" }, + { url = "https://files.pythonhosted.org/packages/57/bb/1f6dade1f1e86858bef7bd332bc8106c445f2dbabec7b32ab5d7d118c9b6/boto3-1.42.78-py3-none-any.whl", hash = "sha256:480a34a077484a5ca60124dfd150ba3ea6517fc89963a679e45b30c6db614d26", size = 140556, upload-time = "2026-03-27T19:28:06.125Z" }, ] [[package]] name = "botocore" -version = "1.42.73" +version = "1.42.80" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/23/0c88ca116ef63b1ae77c901cd5d2095d22a8dbde9e80df74545db4a061b4/botocore-1.42.73.tar.gz", hash = "sha256:575858641e4949aaf2af1ced145b8524529edf006d075877af6b82ff96ad854c", size = 15008008, upload-time = "2026-03-20T19:39:40.082Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/42/d0ce09fe5b494e2a9de513206dec90fbe72bcb101457a60f526a6b1c300b/botocore-1.42.80.tar.gz", hash = "sha256:fe32af53dc87f5f4d61879bc231e2ca2cc0719b19b8f6d268e82a34f713a8a09", size = 15110373, upload-time = "2026-03-31T19:33:33.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/65/971f3d55015f4d133a6ff3ad74cd39f4b8dd8f53f7775a3c2ad378ea5145/botocore-1.42.73-py3-none-any.whl", hash = "sha256:7b62e2a12f7a1b08eb7360eecd23bb16fe3b7ab7f5617cf91b25476c6f86a0fe", size = 14681861, upload-time = "2026-03-20T19:39:35.341Z" }, + { url = "https://files.pythonhosted.org/packages/17/b0/c03f2ed8e7817db1c22d70720636a1b22a2a4d3aa3c09da0257072b30bc5/botocore-1.42.80-py3-none-any.whl", hash = "sha256:7291632b2ede71b7c69e6e366480bb6e2a5d2fae8f7d2d2eb49215e32b7c7a12", size = 14787168, upload-time = "2026-03-31T19:33:29.396Z" }, ] [[package]] name = "cachetools" -version = "7.0.5" +version = "6.2.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/dd/57fe3fdb6e65b25a5987fd2cdc7e22db0aef508b91634d2e57d22928d41b/cachetools-7.0.5.tar.gz", hash = "sha256:0cd042c24377200c1dcd225f8b7b12b0ca53cc2c961b43757e774ebe190fd990", size = 37367, upload-time = "2026-03-09T20:51:29.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/f3/39cf3367b8107baa44f861dc802cbf16263c945b62d8265d36034fc07bea/cachetools-7.0.5-py3-none-any.whl", hash = "sha256:46bc8ebefbe485407621d0a4264b23c080cedd913921bad7ac3ed2f26c183114", size = 13918, upload-time = "2026-03-09T20:51:27.33Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, ] [[package]] @@ -921,18 +925,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, ] -[[package]] -name = "deprecated" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/49/85/12f0a49a7c4ffb70572b6c2ef13c90c88fd190debda93b23f026b25f9634/deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223", size = 2932523, upload-time = "2025-10-30T08:19:02.757Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" }, -] - [[package]] name = "dill" version = "0.4.1" @@ -1143,7 +1135,7 @@ requests = [ [[package]] name = "google-genai" -version = "1.68.0" +version = "1.69.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1157,9 +1149,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/2c/f059982dbcb658cc535c81bbcbe7e2c040d675f4b563b03cdb01018a4bc3/google_genai-1.68.0.tar.gz", hash = "sha256:ac30c0b8bc630f9372993a97e4a11dae0e36f2e10d7c55eacdca95a9fa14ca96", size = 511285, upload-time = "2026-03-18T01:03:18.243Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/5e/c0a5e6ff60d18d3f19819a9b1fbd6a1ef2162d025696d8660550739168dc/google_genai-1.69.0.tar.gz", hash = "sha256:5f1a6a478e0c5851506a3d337534bab27b3c33120e27bf9174507ea79dfb8673", size = 519538, upload-time = "2026-03-28T15:33:27.308Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/de/7d3ee9c94b74c3578ea4f88d45e8de9405902f857932334d81e89bce3dfa/google_genai-1.68.0-py3-none-any.whl", hash = "sha256:a1bc9919c0e2ea2907d1e319b65471d3d6d58c54822039a249fe1323e4178d15", size = 750912, upload-time = "2026-03-18T01:03:15.983Z" }, + { url = "https://files.pythonhosted.org/packages/42/58/ef0586019f54b2ebb36deed7608ccb5efe1377564d2aaea6b1e295d1fadc/google_genai-1.69.0-py3-none-any.whl", hash = "sha256:252e714d724aba74949647b9de511a6a6f7804b3b317ab39ddee9cc2f001cacc", size = 760551, upload-time = "2026-03-28T15:33:24.957Z" }, ] [[package]] @@ -1326,15 +1318,15 @@ wheels = [ [[package]] name = "hypothesis" -version = "6.151.9" +version = "6.151.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/e1/ef365ff480903b929d28e057f57b76cae51a30375943e33374ec9a165d9c/hypothesis-6.151.9.tar.gz", hash = "sha256:2f284428dda6c3c48c580de0e18470ff9c7f5ef628a647ee8002f38c3f9097ca", size = 463534, upload-time = "2026-02-16T22:59:23.09Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/dd/633e2cd62377333b7681628aee2ec1d88166f5bdf916b08c98b1e8288ad3/hypothesis-6.151.10.tar.gz", hash = "sha256:6c9565af8b4aa3a080b508f66ce9c2a77dd613c7e9073e27fc7e4ef9f45f8a27", size = 463762, upload-time = "2026-03-29T01:06:22.19Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/f7/5cc291d701094754a1d327b44d80a44971e13962881d9a400235726171da/hypothesis-6.151.9-py3-none-any.whl", hash = "sha256:7b7220585c67759b1b1ef839b1e6e9e3d82ed468cfc1ece43c67184848d7edd9", size = 529307, upload-time = "2026-02-16T22:59:20.443Z" }, + { url = "https://files.pythonhosted.org/packages/40/da/439bb2e451979f5e88c13bbebc3e9e17754429cfb528c93677b2bd81783b/hypothesis-6.151.10-py3-none-any.whl", hash = "sha256:b0d7728f0c8c2be009f89fcdd6066f70c5439aa0f94adbb06e98261d05f49b05", size = 529493, upload-time = "2026-03-29T01:06:19.161Z" }, ] [[package]] @@ -2033,7 +2025,7 @@ wheels = [ [[package]] name = "openai" -version = "2.29.0" +version = "2.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2045,89 +2037,89 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b4/15/203d537e58986b5673e7f232453a2a2f110f22757b15921cbdeea392e520/openai-2.29.0.tar.gz", hash = "sha256:32d09eb2f661b38d3edd7d7e1a2943d1633f572596febe64c0cd370c86d52bec", size = 671128, upload-time = "2026-03-17T17:53:49.599Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/15/52580c8fbc16d0675d516e8749806eda679b16de1e4434ea06fb6feaa610/openai-2.30.0.tar.gz", hash = "sha256:92f7661c990bda4b22a941806c83eabe4896c3094465030dd882a71abe80c885", size = 676084, upload-time = "2026-03-25T22:08:59.96Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/b1/35b6f9c8cf9318e3dbb7146cc82dab4cf61182a8d5406fc9b50864362895/openai-2.29.0-py3-none-any.whl", hash = "sha256:b7c5de513c3286d17c5e29b92c4c98ceaf0d775244ac8159aeb1bddf840eb42a", size = 1141533, upload-time = "2026-03-17T17:53:47.348Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9e/5bfa2270f902d5b92ab7d41ce0475b8630572e71e349b2a4996d14bdda93/openai-2.30.0-py3-none-any.whl", hash = "sha256:9a5ae616888eb2748ec5e0c5b955a51592e0b201a11f4262db920f2a78c5231d", size = 1146656, upload-time = "2026-03-25T22:08:58.2Z" }, ] [[package]] name = "opentelemetry-api" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "importlib-metadata" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/8d/1f5a45fbcb9a7d87809d460f09dc3399e3fbd31d7f3e14888345e9d29951/opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8", size = 65002, upload-time = "2025-05-16T18:52:41.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/44/4c45a34def3506122ae61ad684139f0bbc4e00c39555d4f7e20e0e001c8a/opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83", size = 65771, upload-time = "2025-05-16T18:52:17.419Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, ] [[package]] name = "opentelemetry-distro" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/0b/0012cb5947c255d6755cb91e3b9fd9bb1876b7e14d5ab67131c030fd90b2/opentelemetry_distro-0.54b1.tar.gz", hash = "sha256:61d6b97bb7a245fddbb829345bb4ad18be39eb52f770fab89a127107fca3149f", size = 2593, upload-time = "2025-05-16T19:03:19.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/00/1f8acc51326956a596fefaf67751380001af36029132a7a07d4debce3c06/opentelemetry_distro-0.61b0.tar.gz", hash = "sha256:975b845f50181ad53753becf4fd4b123b54fa04df5a9d78812264436d6518981", size = 2590, upload-time = "2026-03-04T14:20:12.453Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b1/5f008a2909d59c02c7b88aa595502d438ca21c15e88edd7620c697a56ce8/opentelemetry_distro-0.54b1-py3-none-any.whl", hash = "sha256:009486513b32b703e275bb2f9ccaf5791676bbf5e2dcfdd90201ddc8f56f122b", size = 3348, upload-time = "2025-05-16T19:02:11.624Z" }, + { url = "https://files.pythonhosted.org/packages/56/2c/efcc995cd7484e6e55b1d26bd7fa6c55ca96bd415ff94310b52c19f330b0/opentelemetry_distro-0.61b0-py3-none-any.whl", hash = "sha256:f21d1ac0627549795d75e332006dd068877f00e461b1b2e8fe4568d6eb7b9590", size = 3349, upload-time = "2026-03-04T14:18:57.788Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7a/18/a1ec9dcb6713a48b4bdd10f1c1e4d5d2489d3912b80d2bcc059a9a842836/opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131", size = 20828, upload-time = "2025-05-16T18:52:43.795Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/bc/1559d46557fe6eca0b46c88d4c2676285f1f3be2e8d06bb5d15fbffc814a/opentelemetry_exporter_otlp_proto_common-1.40.0.tar.gz", hash = "sha256:1cbee86a4064790b362a86601ee7934f368b81cd4cc2f2e163902a6e7818a0fa", size = 20416, upload-time = "2026-03-04T14:17:23.801Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/52/9bcb17e2c29c1194a28e521b9d3f2ced09028934c3c52a8205884c94b2df/opentelemetry_exporter_otlp_proto_common-1.33.1-py3-none-any.whl", hash = "sha256:b81c1de1ad349785e601d02715b2d29d6818aed2c809c20219f3d1f20b038c36", size = 18839, upload-time = "2025-05-16T18:52:22.447Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ca/8f122055c97a932311a3f640273f084e738008933503d0c2563cd5d591fc/opentelemetry_exporter_otlp_proto_common-1.40.0-py3-none-any.whl", hash = "sha256:7081ff453835a82417bf38dccf122c827c3cbc94f2079b03bba02a3165f25149", size = 18369, upload-time = "2026-03-04T14:17:04.796Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "googleapis-common-protos" }, { name = "grpcio" }, { name = "opentelemetry-api" }, { name = "opentelemetry-exporter-otlp-proto-common" }, { name = "opentelemetry-proto" }, { name = "opentelemetry-sdk" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/5f/75ef5a2a917bd0e6e7b83d3fb04c99236ee958f6352ba3019ea9109ae1a6/opentelemetry_exporter_otlp_proto_grpc-1.33.1.tar.gz", hash = "sha256:345696af8dc19785fac268c8063f3dc3d5e274c774b308c634f39d9c21955728", size = 22556, upload-time = "2025-05-16T18:52:44.76Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/7f/b9e60435cfcc7590fa87436edad6822240dddbc184643a2a005301cc31f4/opentelemetry_exporter_otlp_proto_grpc-1.40.0.tar.gz", hash = "sha256:bd4015183e40b635b3dab8da528b27161ba83bf4ef545776b196f0fb4ec47740", size = 25759, upload-time = "2026-03-04T14:17:24.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/ec/6047e230bb6d092c304511315b13893b1c9d9260044dd1228c9d48b6ae0e/opentelemetry_exporter_otlp_proto_grpc-1.33.1-py3-none-any.whl", hash = "sha256:7e8da32c7552b756e75b4f9e9c768a61eb47dee60b6550b37af541858d669ce1", size = 18591, upload-time = "2025-05-16T18:52:23.772Z" }, + { url = "https://files.pythonhosted.org/packages/96/6f/7ee0980afcbdcd2d40362da16f7f9796bd083bf7f0b8e038abfbc0300f5d/opentelemetry_exporter_otlp_proto_grpc-1.40.0-py3-none-any.whl", hash = "sha256:2aa0ca53483fe0cf6405087a7491472b70335bc5c7944378a0a8e72e86995c52", size = 20304, upload-time = "2026-03-04T14:17:05.942Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "googleapis-common-protos" }, { name = "opentelemetry-api" }, { name = "opentelemetry-exporter-otlp-proto-common" }, { name = "opentelemetry-proto" }, { name = "opentelemetry-sdk" }, { name = "requests" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/48/e4314ac0ed2ad043c07693d08c9c4bf5633857f5b72f2fefc64fd2b114f6/opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38", size = 15353, upload-time = "2025-05-16T18:52:45.522Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/fa/73d50e2c15c56be4d000c98e24221d494674b0cc95524e2a8cb3856d95a4/opentelemetry_exporter_otlp_proto_http-1.40.0.tar.gz", hash = "sha256:db48f5e0f33217588bbc00274a31517ba830da576e59503507c839b38fa0869c", size = 17772, upload-time = "2026-03-04T14:17:25.324Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/63/ba/5a4ad007588016fe37f8d36bf08f325fe684494cc1e88ca8fa064a4c8f57/opentelemetry_exporter_otlp_proto_http-1.33.1-py3-none-any.whl", hash = "sha256:ebd6c523b89a2ecba0549adb92537cc2bf647b4ee61afbbd5a4c6535aa3da7cf", size = 17733, upload-time = "2025-05-16T18:52:25.137Z" }, + { url = "https://files.pythonhosted.org/packages/a0/3a/8865d6754e61c9fb170cdd530a124a53769ee5f740236064816eb0ca7301/opentelemetry_exporter_otlp_proto_http-1.40.0-py3-none-any.whl", hash = "sha256:a8d1dab28f504c5d96577d6509f80a8150e44e8f45f82cdbe0e34c99ab040069", size = 19960, upload-time = "2026-03-04T14:17:07.153Z" }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2135,28 +2127,28 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/fd/5756aea3fdc5651b572d8aef7d94d22a0a36e49c8b12fcb78cb905ba8896/opentelemetry_instrumentation-0.54b1.tar.gz", hash = "sha256:7658bf2ff914b02f246ec14779b66671508125c0e4227361e56b5ebf6cef0aec", size = 28436, upload-time = "2025-05-16T19:03:22.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/37/6bf8e66bfcee5d3c6515b79cb2ee9ad05fe573c20f7ceb288d0e7eeec28c/opentelemetry_instrumentation-0.61b0.tar.gz", hash = "sha256:cb21b48db738c9de196eba6b805b4ff9de3b7f187e4bbf9a466fa170514f1fc7", size = 32606, upload-time = "2026-03-04T14:20:16.825Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/89/0790abc5d9c4fc74bd3e03cb87afe2c820b1d1a112a723c1163ef32453ee/opentelemetry_instrumentation-0.54b1-py3-none-any.whl", hash = "sha256:a4ae45f4a90c78d7006c51524f57cd5aa1231aef031eae905ee34d5423f5b198", size = 31019, upload-time = "2025-05-16T19:02:15.611Z" }, + { url = "https://files.pythonhosted.org/packages/d8/3e/f6f10f178b6316de67f0dfdbbb699a24fbe8917cf1743c1595fb9dcdd461/opentelemetry_instrumentation-0.61b0-py3-none-any.whl", hash = "sha256:92a93a280e69788e8f88391247cc530fd81f16f2b011979d4d6398f805cfbc63", size = 33448, upload-time = "2026-03-04T14:19:02.447Z" }, ] [[package]] name = "opentelemetry-instrumentation-aio-pika" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/e7/b92741e7dc1c38d512fcd0c3d6b3270cbbe3f3965f4280810c3f48688b1f/opentelemetry_instrumentation_aio_pika-0.54b1.tar.gz", hash = "sha256:a1b9f2d2735f1e9808bac263776f445c446c19580c3a24d0ecc02e289b55b21d", size = 10092, upload-time = "2025-05-16T19:03:25.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/24/d8a58e517d398fea40f09b009d9a1a04f90c4cee83ff550d12c01471e538/opentelemetry_instrumentation_aio_pika-0.61b0.tar.gz", hash = "sha256:ca55f1a1211fcea0433972e1366dc2d6feea9633434deed375e0ae0aaa9a52f6", size = 10284, upload-time = "2026-03-04T14:20:17.767Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/46/b77e99e0e3a4f473e8a38e46d12269a5ef28ed0f7d52306a06c6b82f2aff/opentelemetry_instrumentation_aio_pika-0.54b1-py3-none-any.whl", hash = "sha256:c1d1a52296937e54a8c69878434c86bdc038d53c1eba6f133c0e63f479484990", size = 13462, upload-time = "2025-05-16T19:02:16.816Z" }, + { url = "https://files.pythonhosted.org/packages/71/7e/bfe7cd89d9111651d633159c7126d2a062019b62050fe46f7b420abff263/opentelemetry_instrumentation_aio_pika-0.61b0-py3-none-any.whl", hash = "sha256:bcd1a63045c981df6f755a87809f7b4033f61f6f258d3c10246d826a1d9f5e9e", size = 13624, upload-time = "2026-03-04T14:19:03.811Z" }, ] [[package]] name = "opentelemetry-instrumentation-aiohttp-client" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2165,28 +2157,29 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/fe/535efdb090543cb8e23149271c3ef27e37d3862865c52e2b2b58f7b5cb8d/opentelemetry_instrumentation_aiohttp_client-0.54b1.tar.gz", hash = "sha256:c51c643a5587b9efce6c4cae0f5e2202a25fac69caa89643465f57d5d8ba3789", size = 13643, upload-time = "2025-05-16T19:03:27.156Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/24fed4de661de107f2426b28bbd87b51eaab28a2339b62f269a36ae24505/opentelemetry_instrumentation_aiohttp_client-0.61b0.tar.gz", hash = "sha256:c53ab3b88efcb7ce98c1129cc0389f0a1f214eb3675269b6c157770adcf47877", size = 19292, upload-time = "2026-03-04T14:20:18.408Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/de/07f25301d57bb83f29ee1eb5503871bddc132d4362ff9897c605e8c54c04/opentelemetry_instrumentation_aiohttp_client-0.54b1-py3-none-any.whl", hash = "sha256:d9b53c04865e8a4c984c1330e4f1d5570bc28543833a4718cbe4265091ee0e71", size = 11661, upload-time = "2025-05-16T19:02:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/df/f3/1edc42716521a3f754ac32ffb908f102e0f131f8e43fcd9ab29cab286723/opentelemetry_instrumentation_aiohttp_client-0.61b0-py3-none-any.whl", hash = "sha256:09bc47514c162507b357366ce15578743fd6305078cf7d872db1c99c13fa6972", size = 14534, upload-time = "2026-03-04T14:19:05.165Z" }, ] [[package]] name = "opentelemetry-instrumentation-aiokafka" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/64/e37ecc02d01f5db44750d9460e3e244d5d8a4866e355df9fb85a5a0ae519/opentelemetry_instrumentation_aiokafka-0.54b1.tar.gz", hash = "sha256:977d733bf21f5891f2a0830d02a996d8cf111f00b03a76d419803f8d208b48a4", size = 12521, upload-time = "2025-05-16T19:03:28.466Z" } +sdist = { url = "https://files.pythonhosted.org/packages/08/33/e05648ab6e8d3e1e19366c99dc33c16ec272c431f89def590922c6eda0cb/opentelemetry_instrumentation_aiokafka-0.61b0.tar.gz", hash = "sha256:1c2fabbe0c16e2f9d6bf6c34cb182af6974868e96d8dbb00f45c27f5ac48ee6a", size = 14431, upload-time = "2026-03-04T14:20:19.736Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/45/f0d22618c546b4fe9050a05981bf60dd2a7170c0f4a6bc921341571e3e9d/opentelemetry_instrumentation_aiokafka-0.54b1-py3-none-any.whl", hash = "sha256:af1f69b5c3399b25ac574b2dceebb9a0c6fdf18f3d61850ccc4c69e8e81f8ee1", size = 12128, upload-time = "2025-05-16T19:02:20.731Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e3/a440f692a96af4f7b270d88cf8a3d76fbe132c6268111c4965bb8c01f66d/opentelemetry_instrumentation_aiokafka-0.61b0-py3-none-any.whl", hash = "sha256:2bb6c9e492a5c1961f48a1e8d2982f3adeddc18f3bb089ac32ace2291e9d0d2e", size = 13510, upload-time = "2026-03-04T14:19:07.779Z" }, ] [[package]] name = "opentelemetry-instrumentation-aiopg" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2194,14 +2187,14 @@ dependencies = [ { name = "opentelemetry-instrumentation-dbapi" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/4b/ef14e66e9b7b8bf859844c08d78bbb921c7ec41e2008bd657942a15a5797/opentelemetry_instrumentation_aiopg-0.54b1.tar.gz", hash = "sha256:d00a6845bb8f8d45e81d42bc8ba38df88bb7efdc2cd0e572968dc5359f5b8355", size = 11808, upload-time = "2025-05-16T19:03:29.548Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/8e/f5d9297ca91cfff5415352e3ca61620d3905a3f92e891a9443938c03e540/opentelemetry_instrumentation_aiopg-0.61b0.tar.gz", hash = "sha256:9522951243ca93d990f97ef35745ff313cfa2e645ff888fd72d3fad511527803", size = 11837, upload-time = "2026-03-04T14:20:20.413Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/eb/1b7d0ff786ec1734766b082ebceea729c33b5f7d986816411fb8feb74373/opentelemetry_instrumentation_aiopg-0.54b1-py3-none-any.whl", hash = "sha256:1d162793c4dee9db469d89c962f161801027abc55002eeb23c076ab5f1f334d4", size = 12455, upload-time = "2025-05-16T19:02:21.718Z" }, + { url = "https://files.pythonhosted.org/packages/e5/7e/3f1704949895e21367b37bd85261e3fa21b37eda16429852427fd8ca1e81/opentelemetry_instrumentation_aiopg-0.61b0-py3-none-any.whl", hash = "sha256:e5a5381f661fa5f93176e79aef762842b77edd95c1a618e4641a9abfef9a504e", size = 12453, upload-time = "2026-03-04T14:19:09.714Z" }, ] [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asgiref" }, @@ -2210,56 +2203,56 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/f7/a3377f9771947f4d3d59c96841d3909274f446c030dbe8e4af871695ddee/opentelemetry_instrumentation_asgi-0.54b1.tar.gz", hash = "sha256:ab4df9776b5f6d56a78413c2e8bbe44c90694c67c844a1297865dc1bd926ed3c", size = 24230, upload-time = "2025-05-16T19:03:30.234Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/3e/143cf5c034e58037307e6a24f06e0dd64b2c49ae60a965fc580027581931/opentelemetry_instrumentation_asgi-0.61b0.tar.gz", hash = "sha256:9d08e127244361dc33976d39dd4ca8f128b5aa5a7ae425208400a80a095019b5", size = 26691, upload-time = "2026-03-04T14:20:21.038Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/24/7a6f0ae79cae49927f528ecee2db55a5bddd87b550e310ce03451eae7491/opentelemetry_instrumentation_asgi-0.54b1-py3-none-any.whl", hash = "sha256:84674e822b89af563b283a5283c2ebb9ed585d1b80a1c27fb3ac20b562e9f9fc", size = 16338, upload-time = "2025-05-16T19:02:22.808Z" }, + { url = "https://files.pythonhosted.org/packages/19/78/154470cf9d741a7487fbb5067357b87386475bbb77948a6707cae982e158/opentelemetry_instrumentation_asgi-0.61b0-py3-none-any.whl", hash = "sha256:e4b3ce6b66074e525e717efff20745434e5efd5d9df6557710856fba356da7a4", size = 16980, upload-time = "2026-03-04T14:19:10.894Z" }, ] [[package]] name = "opentelemetry-instrumentation-asyncpg" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/66/d2e2ccbb13cf6d6f6c7c8d907021e9bd8b56585c59e28d99ebc74138c3d1/opentelemetry_instrumentation_asyncpg-0.54b1.tar.gz", hash = "sha256:58e50de68b40221c2d6e22d626e5d03d9d6b950ba59504a5fc060c95cdc7c4fb", size = 8717, upload-time = "2025-05-16T19:03:32.31Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/12/c704def946c66c1e039f2ebff9c8605f4d5a857a67acdfa5d0c3da0bb6b5/opentelemetry_instrumentation_asyncpg-0.61b0.tar.gz", hash = "sha256:a620bec93409e23335fac135231da0a16df705faab8521286a622fcc87666424", size = 8736, upload-time = "2026-03-04T14:20:22.918Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/93/c17ef16b63d6e073f875bfe4624b9711269a3d208ee11cdfc5cc1b3537d8/opentelemetry_instrumentation_asyncpg-0.54b1-py3-none-any.whl", hash = "sha256:2348843f0c6f0cefb0badc974cbeae244ee89c57e1ae2a587e5f641c23e16fdc", size = 10062, upload-time = "2025-05-16T19:02:26.371Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ab/0d8ace084c87fa1156e7c8ad144e7867fbdeda3e15ff6c4c50c9e4730a13/opentelemetry_instrumentation_asyncpg-0.61b0-py3-none-any.whl", hash = "sha256:81039b94b3a0b014199cf7c2bd2753679fe600929620065619c13831e579a892", size = 10089, upload-time = "2026-03-04T14:19:14.6Z" }, ] [[package]] name = "opentelemetry-instrumentation-aws-lambda" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-propagator-aws-xray" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/fd/57a1360203efa8410637679b00b61603782dd84ca9c0b3619192c07e0d1f/opentelemetry_instrumentation_aws_lambda-0.54b1.tar.gz", hash = "sha256:c40f011581abf3cd28d8833fb6218bac75eec3adda7774ff2685f41b279a9fdd", size = 17904, upload-time = "2025-05-16T19:03:33.658Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/70/93ea4b84d241205a863eb2c60a1cef17c5d2119d6bc20b7a26536f747026/opentelemetry_instrumentation_aws_lambda-0.61b0.tar.gz", hash = "sha256:9fa74e96071e60063cbb3584f48f01d41144a9d265b1a20c8ac6f6d953d01a33", size = 18560, upload-time = "2026-03-04T14:20:23.844Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/f3/c08fee6ae3f2d2b461ee7e7c2b3ac8de52281b236f3593146ba456cd0db7/opentelemetry_instrumentation_aws_lambda-0.54b1-py3-none-any.whl", hash = "sha256:51bc4301b9733fcda616d68197ee5f15108175a217f5fd8db349d53ba14cc172", size = 12484, upload-time = "2025-05-16T19:02:27.421Z" }, + { url = "https://files.pythonhosted.org/packages/05/4a/4ecd5fe45de221148c9ff0fe5d48e230ca2243ae54183b12ece1e1c05e8e/opentelemetry_instrumentation_aws_lambda-0.61b0-py3-none-any.whl", hash = "sha256:51b721a17ab275a41e7de513b39339257bc22fdfe7899d74c94b99459fe1b6a0", size = 12900, upload-time = "2026-03-04T14:19:15.853Z" }, ] [[package]] name = "opentelemetry-instrumentation-boto" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/b5/5b777b6b1f3ce586141485584a52f0fdd3d63398011b0d02feb822f46f0a/opentelemetry_instrumentation_boto-0.54b1.tar.gz", hash = "sha256:83407a5f6f69cd0bebff802da0d228eb13196a1de713b43e1348b77f80033c6a", size = 9716, upload-time = "2025-05-16T19:03:34.364Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/e2/dcf07272aac51758af6c55e0a62f23e645d1f8f54ec41b107f1a3e765ee1/opentelemetry_instrumentation_boto-0.61b0.tar.gz", hash = "sha256:f8066f5b8a32bc0fe98d0416d161cfcf5a4b94f25f351a49c772679a4a5f09d7", size = 9711, upload-time = "2026-03-04T14:20:24.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/5e/8f8bfb5fa1c51aa66b6af7e4a64d9be9dc9aba6ff2d8c0f405204a5069ea/opentelemetry_instrumentation_boto-0.54b1-py3-none-any.whl", hash = "sha256:b52b1216bee095858bcd0d992360911b6e870acc4f4c9090f8ca1081d9fdede6", size = 10146, upload-time = "2025-05-16T19:02:28.417Z" }, + { url = "https://files.pythonhosted.org/packages/99/87/bdfa97c692f2cfc99cd80d39d4469c0816f68ec5ef13049cac2da2f2e641/opentelemetry_instrumentation_boto-0.61b0-py3-none-any.whl", hash = "sha256:d6e8fd937fd47d675b9e98eecff872aa60ed688f4bee75f3c372e74c45440218", size = 10160, upload-time = "2026-03-04T14:19:17.111Z" }, ] [[package]] name = "opentelemetry-instrumentation-boto3sqs" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2267,14 +2260,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/be/44/232d566fb06a640f386ce2bdd271e64ecaaae9bdcc5c68f84f2552c5e585/opentelemetry_instrumentation_boto3sqs-0.54b1.tar.gz", hash = "sha256:c8bf67bc836bb66da6a1b000e6c1b07229481c75731ea6a0ed0b59b256e035b9", size = 11715, upload-time = "2025-05-16T19:03:35.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/3e/03c99979613ab73dbe71af6e66af1ba8cd2683abc8aac0bcfca1d75fe515/opentelemetry_instrumentation_boto3sqs-0.61b0.tar.gz", hash = "sha256:56fe935306e45269fc7b970a6f4d3a946b4467ce0a5336b1f6c07884e6f036d0", size = 11718, upload-time = "2026-03-04T14:20:25.707Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/db/62ebd5d172eb3997038f24a238792b5ebe604bc70dbda1cba91c3d36a655/opentelemetry_instrumentation_boto3sqs-0.54b1-py3-none-any.whl", hash = "sha256:40ae98fe53584e5b1d61725fc8e153a1be2d6b308f65f56deb4f276a23b43cf4", size = 11672, upload-time = "2025-05-16T19:02:29.62Z" }, + { url = "https://files.pythonhosted.org/packages/1f/6e/0aa228e78c3f699df8fe8817a9c15a727d1678c634ee0ca448738308113b/opentelemetry_instrumentation_boto3sqs-0.61b0-py3-none-any.whl", hash = "sha256:efc799e2fc637379dde68740f5f211fb81dc1966b4904c58592fd5cd4b7e9ee9", size = 11678, upload-time = "2026-03-04T14:19:18.11Z" }, ] [[package]] name = "opentelemetry-instrumentation-botocore" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2282,14 +2275,14 @@ dependencies = [ { name = "opentelemetry-propagator-aws-xray" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/c9/88127b0714881e6801e4921bca445de634b0b3568e607ccc4a606f711ea7/opentelemetry_instrumentation_botocore-0.54b1.tar.gz", hash = "sha256:54f7b0b48398dfc8b8e98deec89df5b4c8c359d803a0d6c8ce4bd972d50c03dd", size = 110252, upload-time = "2025-05-16T19:03:35.805Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/7f/2acb6d4e8cc70726cfbb24885a653ef5fdc3ac2d0a26ca3e6bff58416c2e/opentelemetry_instrumentation_botocore-0.61b0.tar.gz", hash = "sha256:49dee5f48d133b3bfadaa29bcbff28225899dc495ca14a9c1bb60b74fb4cd84d", size = 121236, upload-time = "2026-03-04T14:20:26.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/0e/22e35a74e6566feacd8a80f5899242920765f134c0edbb0b943ddb369c0e/opentelemetry_instrumentation_botocore-0.54b1-py3-none-any.whl", hash = "sha256:74d3a36d5bab8447669b25f915a3db6c37ae14a5faa198500471d5b1bbd1902f", size = 35461, upload-time = "2025-05-16T19:02:30.621Z" }, + { url = "https://files.pythonhosted.org/packages/69/43/0ed1ac34b52a62b3694c22aee5c7a9c5b6120938d927d79de53ad9edb0cf/opentelemetry_instrumentation_botocore-0.61b0-py3-none-any.whl", hash = "sha256:b019d2f60562265319e64a75c1c9e7bad31c00b2a568def3cc5c57eaf9a06057", size = 38359, upload-time = "2026-03-04T14:19:19.028Z" }, ] [[package]] name = "opentelemetry-instrumentation-cassandra" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2297,42 +2290,42 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/fb/9a405a3fed8389603bbcd63a74ea303d55992c2c7e9abdc8daeba1945fa9/opentelemetry_instrumentation_cassandra-0.54b1.tar.gz", hash = "sha256:f9a79c0139888eaedb58bb50da42709c7bc6ead9b9f5263164873e4275cefbce", size = 7581, upload-time = "2025-05-16T19:03:36.591Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/33/048a04723d82abd9ef070d50007b6069a89a8438bf84baebc0fd084c135b/opentelemetry_instrumentation_cassandra-0.61b0.tar.gz", hash = "sha256:858d11e2b8c5d111b187e4268d13d5f51aed39449871beb4fc434fe90a05464e", size = 8322, upload-time = "2026-03-04T14:20:27.14Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/ca/e726bfd5dc40eef7961aa5a7a5e7238eb407c84bd709cb531abd09c62302/opentelemetry_instrumentation_cassandra-0.54b1-py3-none-any.whl", hash = "sha256:81b8d963a02ea43ea4a9d00c88cd0b01dda69daf914d6e4984b2e98b1e8fdeb7", size = 8899, upload-time = "2025-05-16T19:02:31.738Z" }, + { url = "https://files.pythonhosted.org/packages/24/88/030a39b9d748cb37bfcacea27ca7c0bcd700830fe11af419a7b57972cc38/opentelemetry_instrumentation_cassandra-0.61b0-py3-none-any.whl", hash = "sha256:c31c294d2dba901bedbc2f63011c60fa1e189bb58b45ca329548e47afdf987dc", size = 9143, upload-time = "2026-03-04T14:19:20.043Z" }, ] [[package]] name = "opentelemetry-instrumentation-celery" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/71/4ac353874e0f7ca93591e1a74b7a290dec2027733bbb31bd76da3a74f97f/opentelemetry_instrumentation_celery-0.54b1.tar.gz", hash = "sha256:f2bd019afe9286214083ae2db95ed24adf9a0aa2e943177462d64ceb8380d78e", size = 14778, upload-time = "2025-05-16T19:03:37.376Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/43/e79108a804d16b1dc8ff28edd0e94ac393cf6359a5adcd7cdd2ec4be85f4/opentelemetry_instrumentation_celery-0.61b0.tar.gz", hash = "sha256:0e352a567dc89ed8bc083fc635035ce3c5b96bbbd92831ffd676e93b87f8e94f", size = 14780, upload-time = "2026-03-04T14:20:27.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/be/90e2b7d26915639cfcdf6e200b309c9d64027ff752c56145bc149cd67d68/opentelemetry_instrumentation_celery-0.54b1-py3-none-any.whl", hash = "sha256:892ec6bf829a0d60cf3bffd1a8bb6fd8055f1194167b4e132e33321de8e05c24", size = 13809, upload-time = "2025-05-16T19:02:33.046Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ed/c05f3c84b455654eb6c047474ffde61ed92efc24030f64213c98bca9d44b/opentelemetry_instrumentation_celery-0.61b0-py3-none-any.whl", hash = "sha256:01235733ff0cdf571cb03b270645abb14b9c8d830313dc5842097ec90146320b", size = 13856, upload-time = "2026-03-04T14:19:20.98Z" }, ] [[package]] name = "opentelemetry-instrumentation-confluent-kafka" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/a8/472ddb40f8caab693de4a5c2084b1513b67f879060e5e46cfb2f96bc0872/opentelemetry_instrumentation_confluent_kafka-0.54b1.tar.gz", hash = "sha256:1e378b5c88170c7fcd23b07054a61d2af7a7ec5af1aba120446514ef27b7ad82", size = 11615, upload-time = "2025-05-16T19:03:39.409Z" } +sdist = { url = "https://files.pythonhosted.org/packages/39/0e/76db8f0f41678da1c50926367eaf7c3a984cb849235e83b699ad801341b0/opentelemetry_instrumentation_confluent_kafka-0.61b0.tar.gz", hash = "sha256:06c7a13b0ccfa77701d90df19e755e92f3ae3ca6f88c0b1cca64700b4ea1af78", size = 11900, upload-time = "2026-03-04T14:20:29.172Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/9e/107e45d5eb41961a187c28eb4d0da02d133d371dfdd149b1f7ef96e78926/opentelemetry_instrumentation_confluent_kafka-0.54b1-py3-none-any.whl", hash = "sha256:9dc896233a973705e1ac25950ababe23322338f4cd3fff0ccd509759aeb2e802", size = 12624, upload-time = "2025-05-16T19:02:35.018Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a2/0a79dcd319a7a8de6aaf91ba1ca1d995a51e63ca0fbf545ae0eeec993a81/opentelemetry_instrumentation_confluent_kafka-0.61b0-py3-none-any.whl", hash = "sha256:2ce36aa3287b870c30b62584090360d591d882f179da07d729f009d442799f16", size = 12810, upload-time = "2026-03-04T14:19:23.468Z" }, ] [[package]] name = "opentelemetry-instrumentation-dbapi" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2340,14 +2333,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7b/b7/b74e2c7c858cde8909516cbe77cb0e841167d38795c90df524d84440e1f1/opentelemetry_instrumentation_dbapi-0.54b1.tar.gz", hash = "sha256:69421c36994114040d197f7e846c01869d663084c6c2025e85b2d6cfce2f8299", size = 14145, upload-time = "2025-05-16T19:03:40.074Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/ed/ba91c9e4a3ec65781e9c59982109f0a36de9fa574f622596b33d1985dab5/opentelemetry_instrumentation_dbapi-0.61b0.tar.gz", hash = "sha256:02fa800682c1de87dcad0e59f2092b3b6fb8b8ea0636518f989e1166b418dcb9", size = 16761, upload-time = "2026-03-04T14:20:29.782Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/6a/98d409ae5ca60ae4e41295a42256d81bb96bd5a7a386ca0343e27494d53d/opentelemetry_instrumentation_dbapi-0.54b1-py3-none-any.whl", hash = "sha256:21bc20cd878a78bf44bab686e9679cef1eed77e53c754c0a09f0ca49f5fd0283", size = 12450, upload-time = "2025-05-16T19:02:36.041Z" }, + { url = "https://files.pythonhosted.org/packages/73/a5/d26c68f3fd33eb7410985cef7700bb426e2c4a26de9207902cbbffb19a3f/opentelemetry_instrumentation_dbapi-0.61b0-py3-none-any.whl", hash = "sha256:8f762c39c8edd20c6aef3282550a2cfbfec76c3f431bf5c36327dcf9ece2e5a0", size = 14134, upload-time = "2026-03-04T14:19:24.718Z" }, ] [[package]] name = "opentelemetry-instrumentation-django" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2356,14 +2349,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/93/8d194bda118fc4c369b9a3091c39eec384137b46f33421272359883c53d9/opentelemetry_instrumentation_django-0.54b1.tar.gz", hash = "sha256:38414f989f60e9dba82928e13f6a20a26baf5cc700f1d891f27e0703ca577802", size = 24866, upload-time = "2025-05-16T19:03:41.183Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/ef/6bc1a6560630f26b1c010af86b28f42bfbe6a601bd1647d1436e0d3436aa/opentelemetry_instrumentation_django-0.61b0.tar.gz", hash = "sha256:9885154dc128578de0e6b5ce49e965c786f8ab071175bec005dcd454510be951", size = 25996, upload-time = "2026-03-04T14:20:30.453Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/75/1b0ae1b8b7d6a85d5d54e8092c84b18669bd5da6f5ceb3410047674db3c0/opentelemetry_instrumentation_django-0.54b1-py3-none-any.whl", hash = "sha256:462fbd577991021f56152df21ca1fdcd7c4abdc10dd44254a44d515b8e3d61ca", size = 19541, upload-time = "2025-05-16T19:02:37.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/3b/74dad6d98fdee1d137f1c2748548d4159578508f21e3aef581c110e64041/opentelemetry_instrumentation_django-0.61b0-py3-none-any.whl", hash = "sha256:26c1b0b325a9783d4a2f4df660ba05cf929c3eda2ae9b07916b649bb44e1c5b6", size = 20773, upload-time = "2026-03-04T14:19:25.675Z" }, ] [[package]] name = "opentelemetry-instrumentation-elasticsearch" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2371,14 +2364,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/8b/e7d57ab4aab2d63e2094001e0301d848ec83b86ee428e538101922cd27ed/opentelemetry_instrumentation_elasticsearch-0.54b1.tar.gz", hash = "sha256:d5b6996919679c91e5791457de24d9ff6472887a4e1426b8f2345c52f6ba6f10", size = 14379, upload-time = "2025-05-16T19:03:41.939Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/40/270d0613d62f0b7e90dda54b0628ed6622441e62bd4bde98c554622ef671/opentelemetry_instrumentation_elasticsearch-0.61b0.tar.gz", hash = "sha256:00b9bfa406096d9f3daaca4afe3de658f31474af366fc9a694045027a11e2b6f", size = 14844, upload-time = "2026-03-04T14:20:31.115Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/bd/4919e716190454895c895c37745bbf22d59231d864862a9bc4ac68f4c8d8/opentelemetry_instrumentation_elasticsearch-0.54b1-py3-none-any.whl", hash = "sha256:9f5c968954d72f15e133d06760294f13886d98c4da626374168094035f6dec50", size = 12607, upload-time = "2025-05-16T19:02:38.944Z" }, + { url = "https://files.pythonhosted.org/packages/28/42/61755ffbd3095d9a087a266963ee40048b42d2df3d59cb050f6d56351995/opentelemetry_instrumentation_elasticsearch-0.61b0-py3-none-any.whl", hash = "sha256:de66e6fd221c4d343fb4394d81247288ac19dc63d6f7bf322dc6333e0692e9ab", size = 12447, upload-time = "2026-03-04T14:19:26.666Z" }, ] [[package]] name = "opentelemetry-instrumentation-falcon" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2388,14 +2381,14 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/7d/73df17199014ea57ae71bb128a5155ea4d81d86d0b61d4c852cec485ccb1/opentelemetry_instrumentation_falcon-0.54b1.tar.gz", hash = "sha256:06e72aac39fd4ac65555a8cb056428d7c4366bb1fafa65e60474d6e3d6c3eada", size = 17176, upload-time = "2025-05-16T19:03:42.651Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/88/d20d9508e047548495737305aca01391713ae5e913b90b23a6818f5a7260/opentelemetry_instrumentation_falcon-0.61b0.tar.gz", hash = "sha256:0b64fde2edea0c5602c62f8a438d4e248ee5461140c0df670113fa92c41e292d", size = 17117, upload-time = "2026-03-04T14:20:32.095Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/40/65a3cecd312ac380477ff44306c737b6a3d0cb7ec1ec28e09aacdc8904ac/opentelemetry_instrumentation_falcon-0.54b1-py3-none-any.whl", hash = "sha256:6eaf3bf714a6e3398a5ddc132c3e77de851331ee00989302f88a4d4ce829e679", size = 14206, upload-time = "2025-05-16T19:02:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/8b/64/02a5fcfed927b0ca4a005de9ddd437ccab846fe5363e515ca0f78e205e4b/opentelemetry_instrumentation_falcon-0.61b0-py3-none-any.whl", hash = "sha256:daf8e8f135bc3c4d2990edb35d4a3cdc7d7ea27fc221abbdabb27ff9b362d4fe", size = 14145, upload-time = "2026-03-04T14:19:29.367Z" }, ] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2404,14 +2397,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/3b/9a262cdc1a4defef0e52afebdde3e8add658cc6f922e39e9dcee0da98349/opentelemetry_instrumentation_fastapi-0.54b1.tar.gz", hash = "sha256:1fcad19cef0db7092339b571a59e6f3045c9b58b7fd4670183f7addc459d78df", size = 19325, upload-time = "2025-05-16T19:03:45.359Z" } +sdist = { url = "https://files.pythonhosted.org/packages/37/35/aa727bb6e6ef930dcdc96a617b83748fece57b43c47d83ba8d83fbeca657/opentelemetry_instrumentation_fastapi-0.61b0.tar.gz", hash = "sha256:3a24f35b07c557ae1bbc483bf8412221f25d79a405f8b047de8b670722e2fa9f", size = 24800, upload-time = "2026-03-04T14:20:32.759Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/9c/6b2b0f9d6c5dea7528ae0bf4e461dd765b0ae35f13919cd452970bb0d0b3/opentelemetry_instrumentation_fastapi-0.54b1-py3-none-any.whl", hash = "sha256:fb247781cfa75fd09d3d8713c65e4a02bd1e869b00e2c322cc516d4b5429860c", size = 12125, upload-time = "2025-05-16T19:02:41.172Z" }, + { url = "https://files.pythonhosted.org/packages/91/05/acfeb2cccd434242a0a7d0ea29afaf077e04b42b35b485d89aee4e0d9340/opentelemetry_instrumentation_fastapi-0.61b0-py3-none-any.whl", hash = "sha256:a1a844d846540d687d377516b2ff698b51d87c781b59f47c214359c4a241047c", size = 13485, upload-time = "2026-03-04T14:19:30.351Z" }, ] [[package]] name = "opentelemetry-instrumentation-flask" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2421,14 +2414,14 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/b8/d46dcb20889713a355de418a0d31d552089bf4454e1baf48c7b6b3fb6035/opentelemetry_instrumentation_flask-0.54b1.tar.gz", hash = "sha256:683f9963f06d065fc07ceaffa106df1f6f20075318530328f69fde39dfb1192f", size = 19221, upload-time = "2025-05-16T19:03:46.063Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/33/d6852d8f2c3eef86f2f8c858d6f5315983c7063e07e595519e96d4c31c06/opentelemetry_instrumentation_flask-0.61b0.tar.gz", hash = "sha256:e9faf58dfd9860a1868442d180142645abdafc1a652dd73d469a5efd106a7d49", size = 24071, upload-time = "2026-03-04T14:20:33.437Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/85/aaaed416e9ee7d5c4ab98b3dba3d66675f44cfdcbf5d683e144a10fafad0/opentelemetry_instrumentation_flask-0.54b1-py3-none-any.whl", hash = "sha256:1f9d44b8ca9bc7d52e2aeb539bc64a88d6fc04f2f67c1ffb278148c99cc8ec6a", size = 14626, upload-time = "2025-05-16T19:02:42.202Z" }, + { url = "https://files.pythonhosted.org/packages/3e/41/619f3530324a58491f2d20f216a10dd7393629b29db4610dda642a27f4ed/opentelemetry_instrumentation_flask-0.61b0-py3-none-any.whl", hash = "sha256:e8ce474d7ce543bfbbb3e93f8a6f8263348af9d7b45502f387420cf3afa71253", size = 15996, upload-time = "2026-03-04T14:19:31.304Z" }, ] [[package]] name = "opentelemetry-instrumentation-grpc" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2436,14 +2429,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7a/a2e879f5b39d77091181c944064bf99e11646a58242f1e8efa829646bcb1/opentelemetry_instrumentation_grpc-0.54b1.tar.gz", hash = "sha256:4198aab2a380b2807a50112892f9b8a50772169a3722fa99634ef70c6c017ea2", size = 30926, upload-time = "2025-05-16T19:03:46.813Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/38/8c23bc3669fc0119452720171c35efac8c76a9587538e48007f0dde013ab/opentelemetry_instrumentation_grpc-0.61b0.tar.gz", hash = "sha256:47ad4ff31885153c7ae6b5c466a96a00977dff60d8f1f7281d4fa4bd1d113053", size = 31435, upload-time = "2026-03-04T14:20:34.094Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/51/22ca8af0b9f78029657957f33604813c07dde18fb035dd37a60e2a4070d8/opentelemetry_instrumentation_grpc-0.54b1-py3-none-any.whl", hash = "sha256:c01114c5c147c216f9144da065d4a84bffb2a43b3cb05763b40ec744bbf5206e", size = 27112, upload-time = "2025-05-16T19:02:43.853Z" }, + { url = "https://files.pythonhosted.org/packages/b6/47/1ffcd8fd36e1b7272884390d27f8d26a9c3c56da72627998822104079e13/opentelemetry_instrumentation_grpc-0.61b0-py3-none-any.whl", hash = "sha256:ec96eb28c7c904be9765e2a24402d6480c06506aac9a8fe08e2ae888866a01ee", size = 27237, upload-time = "2026-03-04T14:19:32.605Z" }, ] [[package]] name = "opentelemetry-instrumentation-httpx" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2452,83 +2445,83 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/64/65b2e599c5043a5dbd14c251d48dec4947e2ec8713f601df197ea9b51246/opentelemetry_instrumentation_httpx-0.54b1.tar.gz", hash = "sha256:37e1cd0190f98508d960ec1667c9f148f8c8ad9a6cab127b57c9ad92c37493c3", size = 17734, upload-time = "2025-05-16T19:03:47.762Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/2a/e2becd55e33c29d1d9ef76e2579040ed1951cb33bacba259f6aff2fdd2a6/opentelemetry_instrumentation_httpx-0.61b0.tar.gz", hash = "sha256:6569ec097946c5551c2a4252f74c98666addd1bf047c1dde6b4ef426719ff8dd", size = 24104, upload-time = "2026-03-04T14:20:34.752Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/63/f92e93b613b51344a979dc6674641f2c0d24b031f6a08557304398962e41/opentelemetry_instrumentation_httpx-0.54b1-py3-none-any.whl", hash = "sha256:99b8e43ebf1d945ca298d84d32298ba26d1c3431738cea9f69a26c442661745f", size = 14129, upload-time = "2025-05-16T19:02:45.418Z" }, + { url = "https://files.pythonhosted.org/packages/af/88/dde310dce56e2d85cf1a09507f5888544955309edc4b8d22971d6d3d1417/opentelemetry_instrumentation_httpx-0.61b0-py3-none-any.whl", hash = "sha256:dee05c93a6593a5dc3ae5d9d5c01df8b4e2c5d02e49275e5558534ee46343d5e", size = 17198, upload-time = "2026-03-04T14:19:33.585Z" }, ] [[package]] name = "opentelemetry-instrumentation-jinja2" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/9d/48836360719cfc0aaa892440b42d2fc3cf83bb84d4f92cda0ad9af7dd598/opentelemetry_instrumentation_jinja2-0.54b1.tar.gz", hash = "sha256:21e435e2029e876e9c91277fb88e9cf235211f96973c64e494b8be7551c7b3e1", size = 8468, upload-time = "2025-05-16T19:03:48.499Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/9b/f19886dae0a1f35dcb0f50a5b5a48710c6cf85eec005d1f8b07900e5b8a5/opentelemetry_instrumentation_jinja2-0.61b0.tar.gz", hash = "sha256:08641ca2a4b17208d527304174c672bdb74e522da41c16fb3e4be749912d3b86", size = 8463, upload-time = "2026-03-04T14:20:36.19Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/d4/213e701c74541f860bfc89211ab54b7c9d3c89576dc461bed14d6f1d0e2f/opentelemetry_instrumentation_jinja2-0.54b1-py3-none-any.whl", hash = "sha256:bcefb00e177c3481a0f735ffe96589ee40ba6b603092c19fca7b03fcb5c72a19", size = 9428, upload-time = "2025-05-16T19:02:46.544Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/2ff543567bade305884db2d8ccb14256a2a9fdd0a88ac5f25301f6864ea1/opentelemetry_instrumentation_jinja2-0.61b0-py3-none-any.whl", hash = "sha256:16608f3f9cf916a059848bbf469165f14e5c7489b1fe0add801d48482bf37cd7", size = 9427, upload-time = "2026-03-04T14:19:34.627Z" }, ] [[package]] name = "opentelemetry-instrumentation-kafka-python" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/1c/232ffeb76dd519d82c6b0f1b28dc33f6583f3a90b35dd3360179d46e0c72/opentelemetry_instrumentation_kafka_python-0.54b1.tar.gz", hash = "sha256:8b3f18be44939a270ca55b8017c5f822b94bdc1372b59a49464b990c715d0ba4", size = 10535, upload-time = "2025-05-16T19:03:49.198Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/1a/863b54403e90947acdffadb8e3a399e0ba78047adac92733b56359df7dcd/opentelemetry_instrumentation_kafka_python-0.61b0.tar.gz", hash = "sha256:85cf9d9aaee6a740ccce4bede88016a40cffb51e29cfce600f52a5297e6447fe", size = 10559, upload-time = "2026-03-04T14:20:36.8Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/88/9998fac3940d818100f0b3b1b67992481df233516d4d0a14fce43d6dcbc8/opentelemetry_instrumentation_kafka_python-0.54b1-py3-none-any.whl", hash = "sha256:ab53ed8af3281a337feb5c1fa01059d5af99ec7aa84f2b360627a20fed385ab7", size = 11502, upload-time = "2025-05-16T19:02:48.012Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c8/461ec6170536303f4dfcb4ef07864073c276673dcea13c461f6bd4d6a1d4/opentelemetry_instrumentation_kafka_python-0.61b0-py3-none-any.whl", hash = "sha256:8f74c60ba9fa33148a55c6ebcdaf1f79f6df728e29674d821ee4791430c2e369", size = 11522, upload-time = "2026-03-04T14:19:35.843Z" }, ] [[package]] name = "opentelemetry-instrumentation-logging" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/5b/88ed39f22e8c6eb4f6192ab9a62adaa115579fcbcadb3f0241ee645eea56/opentelemetry_instrumentation_logging-0.54b1.tar.gz", hash = "sha256:893a3cbfda893b64ff71b81991894e2fd6a9267ba85bb6c251f51c0419fbe8fa", size = 9976, upload-time = "2025-05-16T19:03:49.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/e0/69473f925acfe2d4edf5c23bcced36906ac3627aa7c5722a8e3f60825f3b/opentelemetry_instrumentation_logging-0.61b0.tar.gz", hash = "sha256:feaa30b700acd2a37cc81db5f562ab0c3a5b6cc2453595e98b72c01dcf649584", size = 17906, upload-time = "2026-03-04T14:20:37.398Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/0c/b441fb30d860f25040eaed61e89d68f4d9ee31873159ed18cbc1b92eba56/opentelemetry_instrumentation_logging-0.54b1-py3-none-any.whl", hash = "sha256:01a4cec54348f13941707d857b850b0febf9d49f45d0fcf0673866e079d7357b", size = 12579, upload-time = "2025-05-16T19:02:49.039Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0e/2137db5239cc5e564495549a4d11488a7af9b48fc76520a0eea20e69ddae/opentelemetry_instrumentation_logging-0.61b0-py3-none-any.whl", hash = "sha256:6d87e5ded6a0128d775d41511f8380910a1b610671081d16efb05ac3711c0074", size = 17076, upload-time = "2026-03-04T14:19:36.765Z" }, ] [[package]] name = "opentelemetry-instrumentation-mysql" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1e/6e/8b203e0f0afb994a2b8734d37d4ffe8a70cd45202bf021c3a531d7b1cb9d/opentelemetry_instrumentation_mysql-0.54b1.tar.gz", hash = "sha256:de3a9367886523f30bd04b51edcf8d0777de7eac4a2467f52478231f51405b49", size = 9390, upload-time = "2025-05-16T19:03:50.66Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/24/626f0371da69bddad9544609d6397b18c9a1dde81b4af8c9fd6747a846ab/opentelemetry_instrumentation_mysql-0.61b0.tar.gz", hash = "sha256:124cfe3d103c1d80994ea735f93dda4ae318a6a5c6a029b6a376d387390d7097", size = 10150, upload-time = "2026-03-04T14:20:38.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/18/aeae1a3cc4dd17f4338d105592a8e6cba572ef9d94089649d4b8a0d7b4dc/opentelemetry_instrumentation_mysql-0.54b1-py3-none-any.whl", hash = "sha256:07cd8c3003b439e0626e2b77f2b7f28f73c75879e28d9260f8d9a9600fb85fc2", size = 10100, upload-time = "2025-05-16T19:02:49.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/ec/a6f1330bc0a39c8dec4aaf17be23b0639d239587b1c3a59f86c9fc450128/opentelemetry_instrumentation_mysql-0.61b0-py3-none-any.whl", hash = "sha256:cc5b569d1cbc9c7892a042388db813fbf86a004368d7e85fd12d0b5939b52ae7", size = 10649, upload-time = "2026-03-04T14:19:38.104Z" }, ] [[package]] name = "opentelemetry-instrumentation-mysqlclient" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/c6/27ac94688611cb51d20d83855b1dbd8610009f8ccf73e0fdca40648b4db4/opentelemetry_instrumentation_mysqlclient-0.54b1.tar.gz", hash = "sha256:c14abdc5e19015ab7d6aa23ce96122c4f966fac629489eaa614e28da84e94d88", size = 9330, upload-time = "2025-05-16T19:03:51.382Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/4a/5b306bde4f55aa1c33bdb760704b5be81c568bf3b0b9f374a15ae0e04a28/opentelemetry_instrumentation_mysqlclient-0.61b0.tar.gz", hash = "sha256:ca65b3d47cc896f26e2629ee777334e7fcb8b5f2871e2243651efc5bd4066aa6", size = 9852, upload-time = "2026-03-04T14:20:38.746Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/4d/9d8a5e571c370331c771467a4c51bb2da5ced1c2601bd2990c2a2bdc0caa/opentelemetry_instrumentation_mysqlclient-0.54b1-py3-none-any.whl", hash = "sha256:462972e140586e00a5c0f0025585b2decfd0c4d7189cd42e2f786ca8e9fdab27", size = 10125, upload-time = "2025-05-16T19:02:51.422Z" }, + { url = "https://files.pythonhosted.org/packages/a2/99/e9e380a9215ace82ffd2d9c55ca3e968e0c051512a953eaea3928a54a788/opentelemetry_instrumentation_mysqlclient-0.61b0-py3-none-any.whl", hash = "sha256:59566e7efa6949768b6048d09a87ce43a85cb91bcd9776eba2d5a1b335b73429", size = 10551, upload-time = "2026-03-04T14:19:39.138Z" }, ] [[package]] name = "opentelemetry-instrumentation-pika" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2536,28 +2529,28 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5d/8b/e7510900b383a2aaaec728034d8353d9112ce6fb75df1b53094185deae10/opentelemetry_instrumentation_pika-0.54b1.tar.gz", hash = "sha256:b8e20202233fee5aca35bd58db431bdcfeeddd85f83067800ab494c234479f51", size = 12993, upload-time = "2025-05-16T19:03:52.055Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/ae/8eae59c9282445b0eaf21ca269bf6152b69602b215a1552b4666da838ca4/opentelemetry_instrumentation_pika-0.61b0.tar.gz", hash = "sha256:0c138f73139fd0bba00584fa2ffd53f7af1a78ccc2db1860adc3700633f36a62", size = 13320, upload-time = "2026-03-04T14:20:39.333Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/68/c1dd5a8fcf3e98644ff3d1dfc3db9a7ac65a9ae964011c139343756b1e24/opentelemetry_instrumentation_pika-0.54b1-py3-none-any.whl", hash = "sha256:3098ba31cdf3b390deb18c9eb824fccff9b8a2d51878fdcc7b69f1e6218963dc", size = 13661, upload-time = "2025-05-16T19:02:52.407Z" }, + { url = "https://files.pythonhosted.org/packages/97/c3/f90b9280e5b0d74eb757323765e1984b7cce2361b2251beda2b71f8e9529/opentelemetry_instrumentation_pika-0.61b0-py3-none-any.whl", hash = "sha256:c4e52540fb4ce43346f138d77f580e36cb1dd81f303bdfef4bcf49975ac6ea0e", size = 13753, upload-time = "2026-03-04T14:19:40.077Z" }, ] [[package]] name = "opentelemetry-instrumentation-psycopg2" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/09/dd6e55a852c87ee6402d745486d7d2e32577e728781bc1c89812d2645f48/opentelemetry_instrumentation_psycopg2-0.54b1.tar.gz", hash = "sha256:6e899baf7b6687320491b25d5ceadde5c614a95fb379da8e2a513d430f28102f", size = 10663, upload-time = "2025-05-16T19:03:53.817Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/28/f28d52b1088e7a09761566f8700507b54d3d83a6f9c93c0ce02f53619e83/opentelemetry_instrumentation_psycopg2-0.61b0.tar.gz", hash = "sha256:863ccf9687b71e73dd489c7bb117278768bdf26aa0dafe7dc974a2425e05b5d7", size = 11676, upload-time = "2026-03-04T14:20:41.269Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/d0/4915e34533c26f319ba9b5346c0d1aa48d099bb29719674dbace3e4d643b/opentelemetry_instrumentation_psycopg2-0.54b1-py3-none-any.whl", hash = "sha256:2f493b180c2028bcab2ecaff8bd25560dd92a538bba8b9510411f182dd2a075e", size = 10709, upload-time = "2025-05-16T19:02:54.388Z" }, + { url = "https://files.pythonhosted.org/packages/2f/f1/4341d0584c288765c73e28c30ba58e7aedb50c01108f17f947b872657f79/opentelemetry_instrumentation_psycopg2-0.61b0-py3-none-any.whl", hash = "sha256:36b96983beda05c927179bb66b6c72f07a8d9a591f76ce9da88b1dd1587cb083", size = 11491, upload-time = "2026-03-04T14:19:42.018Z" }, ] [[package]] name = "opentelemetry-instrumentation-pymemcache" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2565,42 +2558,42 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/58/66b4eb77a1279816b108d41b852f5ae02c69c8442522fb37539c119ff056/opentelemetry_instrumentation_pymemcache-0.54b1.tar.gz", hash = "sha256:03a272e3a416a633f83ee5b494a346d37fbe8249271bbf5e02686c354ae810a9", size = 10606, upload-time = "2025-05-16T19:03:54.485Z" } +sdist = { url = "https://files.pythonhosted.org/packages/28/f0/65d9341760daf33d4426e34e4542eeeabae6f2d7c47bfcda21d9d47eab3d/opentelemetry_instrumentation_pymemcache-0.61b0.tar.gz", hash = "sha256:79cfbba2b87acb4b62ce0c7a8696efe33da0829fd462c622045af1abad3fb7f3", size = 10628, upload-time = "2026-03-04T14:20:41.995Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/91/678a2215292ce4cdfb28e282bef97e63bb497b42e2d677a24db7b979474d/opentelemetry_instrumentation_pymemcache-0.54b1-py3-none-any.whl", hash = "sha256:d752ccc03214cb079733d8d811ba9e624a7b6c76454ce96e30edccfed1f75f91", size = 9685, upload-time = "2025-05-16T19:02:55.389Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8d/4e36c523a9f7b469236c2669043d10b65cb662dfa2f454bf85f021aecce4/opentelemetry_instrumentation_pymemcache-0.61b0-py3-none-any.whl", hash = "sha256:50a0e5681242bfbebdf03c039867c07eda87caefdb6b82a329473003801feef1", size = 9716, upload-time = "2026-03-04T14:19:43.257Z" }, ] [[package]] name = "opentelemetry-instrumentation-pymongo" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/4c/e214f98f6d0885cd1a4e09740fc68d59dfb5e108c310c0003415eb593a47/opentelemetry_instrumentation_pymongo-0.54b1.tar.gz", hash = "sha256:75cbcfe499009d535e508b869825113fc0888d4d60c544d4337ef65eb4d299f0", size = 9614, upload-time = "2025-05-16T19:03:55.135Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/0d/a4d1bd1e993a1065613e857e91adcfa290b9339936cca6dc44562678209a/opentelemetry_instrumentation_pymongo-0.61b0.tar.gz", hash = "sha256:30259f3f55f9620052fbbb17a80d06b04da8455b5f92854f739eac4367ddfe4f", size = 10319, upload-time = "2026-03-04T14:20:42.948Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/f4/b4504705ce678ac6118e4c5226b566d940aa4f7baf8e6c585abad36d1197/opentelemetry_instrumentation_pymongo-0.54b1-py3-none-any.whl", hash = "sha256:2331f4f0cbd5a5053edebb956b4dd288d60eb8971d9b6d5927f0753d0651161e", size = 11314, upload-time = "2025-05-16T19:02:56.958Z" }, + { url = "https://files.pythonhosted.org/packages/13/00/dc7033c41b9e927bd2f5d5a04e72d20a8ada94c67793f10ce3309dbc39ff/opentelemetry_instrumentation_pymongo-0.61b0-py3-none-any.whl", hash = "sha256:fdf8576d837fc52ef33ef770f39bd4515bc56352b0f244a3aae2ee5e481a5796", size = 11409, upload-time = "2026-03-04T14:19:44.355Z" }, ] [[package]] name = "opentelemetry-instrumentation-pymysql" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/f5/f6f3f593c6f95994470eea001960c4891ead94d6583698862d2c1c2eb046/opentelemetry_instrumentation_pymysql-0.54b1.tar.gz", hash = "sha256:c22501ee104c34b70e37e5cdc59d74ffb833d473ac3ecfe899b707bf194e914b", size = 9208, upload-time = "2025-05-16T19:03:57.478Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/c1/74e6d98e167cc8591e54a77ce144703201cbefd31dff45dbdf27ec8d2b09/opentelemetry_instrumentation_pymysql-0.61b0.tar.gz", hash = "sha256:60ba66a806e4664308bd86fe45f329a6f3bb520c3e9759f68f379b7c9466047f", size = 9730, upload-time = "2026-03-04T14:20:44.177Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/2f/e7a0e6555757cb14c54a4e923f0ba0a0ed9833cfae0fe8334e698d6a2767/opentelemetry_instrumentation_pymysql-0.54b1-py3-none-any.whl", hash = "sha256:54cb13c6ab559cf14e6de94f778e286d8bc89a2262cff59ee3566a41c6ab5dd1", size = 9984, upload-time = "2025-05-16T19:02:58.926Z" }, + { url = "https://files.pythonhosted.org/packages/76/e7/cfd76d758f6efccafbb38f8aa6abd08328aa695846b1fe52ac9a55930937/opentelemetry_instrumentation_pymysql-0.61b0-py3-none-any.whl", hash = "sha256:00aca55c3fd767ebe484affa2f0c2f47edd4095038b509c6aca1a4cfed78af15", size = 10523, upload-time = "2026-03-04T14:19:46.535Z" }, ] [[package]] name = "opentelemetry-instrumentation-pyramid" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2610,14 +2603,14 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/be/488a87bf48049c260da15ecc5ebec0e99287aaabf0a9e94d759066b84872/opentelemetry_instrumentation_pyramid-0.54b1.tar.gz", hash = "sha256:c68d46de5cbf1e804b2b730f7f60bf87f0bc9735e3d21b8359d35705ff8457b3", size = 15046, upload-time = "2025-05-16T19:03:58.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/fa/75964814689cf0c06fa357a67f6504ef12703587269fd33dda20074cc147/opentelemetry_instrumentation_pyramid-0.61b0.tar.gz", hash = "sha256:cf3ff0c7b1efdea287ee3e569c8c690284b789e34a0e0ab1f27bff41b619230f", size = 16779, upload-time = "2026-03-04T14:20:45.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/eb/456f9a79c0e3ac26036a0d262235b9cde3a085b88c8ec17e1f062b2d2327/opentelemetry_instrumentation_pyramid-0.54b1-py3-none-any.whl", hash = "sha256:11b7f210ff45b754db30f7522bb2e27be902ddea38a59cc16c08e16dd8061f42", size = 13999, upload-time = "2025-05-16T19:02:59.938Z" }, + { url = "https://files.pythonhosted.org/packages/c9/4c/1bd04b67306f8e78ef4ea9ea01ccd6e303ed9649482a8b4d3e6b59b5f48d/opentelemetry_instrumentation_pyramid-0.61b0-py3-none-any.whl", hash = "sha256:04440b3b2594c4b1a8f459aa6faeb26b19dcb26ee625a167713051d70a5b043d", size = 14691, upload-time = "2026-03-04T14:19:47.561Z" }, ] [[package]] name = "opentelemetry-instrumentation-redis" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2625,28 +2618,28 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c2/01/fad85231c3518bf6349a7ef483ef06a27100da8d1b7531dec9d8d09b94d8/opentelemetry_instrumentation_redis-0.54b1.tar.gz", hash = "sha256:89024c4752147d528e8c51fff0034193e628da339848cda78afe0cf4eb0c7ccb", size = 13908, upload-time = "2025-05-16T19:03:58.876Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/21/26205f89358a5f2be3ee5512d3d3bce16b622977f64aeaa9d3fa8887dd39/opentelemetry_instrumentation_redis-0.61b0.tar.gz", hash = "sha256:ae0fbb56be9a641e621d55b02a7d62977a2c77c5ee760addd79b9b266e46e523", size = 14781, upload-time = "2026-03-04T14:20:45.694Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/c1/78f18965f16e34a8fecc5b10c52aca1243e75a512a0a0320556a69583f36/opentelemetry_instrumentation_redis-0.54b1-py3-none-any.whl", hash = "sha256:e98992bd38e93081158f9947a1a8eea51d96e8bfe5054894a5b8d1d82117c0c8", size = 14924, upload-time = "2025-05-16T19:03:01.07Z" }, + { url = "https://files.pythonhosted.org/packages/a5/e1/8f4c8e4194291dbe828aeabe779050a8497b379ad90040a5a0a7074b1d08/opentelemetry_instrumentation_redis-0.61b0-py3-none-any.whl", hash = "sha256:8d4e850bbb5f8eeafa44c0eac3a007990c7125de187bc9c3659e29ff7e091172", size = 15506, upload-time = "2026-03-04T14:19:48.588Z" }, ] [[package]] name = "opentelemetry-instrumentation-remoulade" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/f5/d360444cd559f67a6d6f2467ca3f036db1894d3ba8c4a82a2c443eae674f/opentelemetry_instrumentation_remoulade-0.54b1.tar.gz", hash = "sha256:0c2f5571985375c55532402238dafb09d0e6b4b8c2a3c18925ef461bb3896c96", size = 8131, upload-time = "2025-05-16T19:03:59.804Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/37/c96cff08c12105df4eae981582a293ea4b44da25f2b52f1b10ccde22b079/opentelemetry_instrumentation_remoulade-0.61b0.tar.gz", hash = "sha256:38fbaddad86d2af7a6ae8bf979fdba09654b19fe3e83aacade7d45297c8fe445", size = 8137, upload-time = "2026-03-04T14:20:46.355Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/35/0a17505193fd93e16d26d18a0605a9dedb5bdde9c4aed56f391160ed657b/opentelemetry_instrumentation_remoulade-0.54b1-py3-none-any.whl", hash = "sha256:5d50d298a1d456e1008166d0a20cb7ccada93b502b99cf74f344fb6d1df947c9", size = 10130, upload-time = "2025-05-16T19:03:02.152Z" }, + { url = "https://files.pythonhosted.org/packages/97/82/fab3c62e3f03f73f76ebccacc5c53cd95c8f941c47f2c81d786c11311bf0/opentelemetry_instrumentation_remoulade-0.61b0-py3-none-any.whl", hash = "sha256:841aa88afd01dbd28a686462ed4b58d90a40668d5054c0abc4cdcd79dbc15b7d", size = 10139, upload-time = "2026-03-04T14:19:50.172Z" }, ] [[package]] name = "opentelemetry-instrumentation-requests" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2654,14 +2647,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e7/45/116da84930d3dc2f5cdd876283ca96e9b96547bccee7eaa0bd01ce6bf046/opentelemetry_instrumentation_requests-0.54b1.tar.gz", hash = "sha256:3eca5d697c5564af04c6a1dd23b6a3ffbaf11e64887c6051655cee03998f4654", size = 15148, upload-time = "2025-05-16T19:04:00.488Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/c7/7a47cb85c7aa93a9c820552e414889185bcf91245271d12e5d443e5f834d/opentelemetry_instrumentation_requests-0.61b0.tar.gz", hash = "sha256:15f879ce8fb206bd7e6fdc61663ea63481040a845218c0cf42902ce70bd7e9d9", size = 18379, upload-time = "2026-03-04T14:20:46.959Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/b1/6e33d2c3d3cc9e3ae20a9a77625ec81a509a0e5d7fa87e09e7f879468990/opentelemetry_instrumentation_requests-0.54b1-py3-none-any.whl", hash = "sha256:a0c4cd5d946224f336d6bd73cdabdecc6f80d5c39208f84eb96eb15f16cd41a0", size = 12968, upload-time = "2025-05-16T19:03:03.131Z" }, + { url = "https://files.pythonhosted.org/packages/5e/a1/a7a133b273d1f53950f16a370fc94367eff472c9c2576e8e9e28c62dcc9f/opentelemetry_instrumentation_requests-0.61b0-py3-none-any.whl", hash = "sha256:cce19b379949fe637eb73ba39b02c57d2d0805447ca6d86534aa33fcb141f683", size = 14207, upload-time = "2026-03-04T14:19:51.765Z" }, ] [[package]] name = "opentelemetry-instrumentation-sqlalchemy" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2670,28 +2663,28 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/33/78a25ae4233d42058bb0b363ba4fea7d7210e53c24e5e31f16d5cf6cf957/opentelemetry_instrumentation_sqlalchemy-0.54b1.tar.gz", hash = "sha256:97839acf1c9b96ded857fca57a09b86a56cf8d9eb6d706b7ceaee9352a460e03", size = 14620, upload-time = "2025-05-16T19:04:01.215Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/4f/3a325b180944610697a0a926d49d782b41a86120050d44fefb2715b630ac/opentelemetry_instrumentation_sqlalchemy-0.61b0.tar.gz", hash = "sha256:13a3a159a2043a52f0180b3757fbaa26741b0e08abb50deddce4394c118956e6", size = 15343, upload-time = "2026-03-04T14:20:47.648Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/2b/1c954885815614ef5c1e8c7bbf57a5275e64cd6fb5946b65e17162a34037/opentelemetry_instrumentation_sqlalchemy-0.54b1-py3-none-any.whl", hash = "sha256:d2ca5edb4c7ecef120d51aad6793b7da1cc80207ccfd31c437ee18f098e7c4c4", size = 14169, upload-time = "2025-05-16T19:03:04.119Z" }, + { url = "https://files.pythonhosted.org/packages/1f/97/b906a930c6a1a20c53ecc8b58cabc2cdd0ce560a2b5d44259084ffe4333e/opentelemetry_instrumentation_sqlalchemy-0.61b0-py3-none-any.whl", hash = "sha256:f115e0be54116ba4c327b8d7b68db4045ee18d44439d888ab8130a549c50d1c1", size = 14547, upload-time = "2026-03-04T14:19:53.088Z" }, ] [[package]] name = "opentelemetry-instrumentation-sqlite3" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/07/cae18dbc2ba1997a382e63f1ee7527dff9557675c2802709ca8a011341c4/opentelemetry_instrumentation_sqlite3-0.54b1.tar.gz", hash = "sha256:e32ec80a2f50df035bf16de142527157b98a60a3863ddcb6aa20beae8a64a24d", size = 7929, upload-time = "2025-05-16T19:04:02.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/86/92/0d561326bf4026817abef097ec6be3ae7a86dd02d64e1a80c2218057a999/opentelemetry_instrumentation_sqlite3-0.61b0.tar.gz", hash = "sha256:96d4f0fa35ba7ee9aa683aa17726cb358c8029cc7b3cf55668ccc77254c29ca5", size = 7933, upload-time = "2026-03-04T14:20:48.301Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/8a/7a6b6b1cabc65e237ebbfd10429997579eaa4281c169429c28eb5a60e177/opentelemetry_instrumentation_sqlite3-0.54b1-py3-none-any.whl", hash = "sha256:756c8f51a3b738f4cd52556b2146a6e2e6a33516b494aa4dbb7478702af4a475", size = 9342, upload-time = "2025-05-16T19:03:05.641Z" }, + { url = "https://files.pythonhosted.org/packages/76/a8/4454bd16b3cd7edebc71327c3c4946e7bf969e8fcd8fe5581d9f5d92ec2d/opentelemetry_instrumentation_sqlite3-0.61b0-py3-none-any.whl", hash = "sha256:202a18e7f9d231bfa44771fdb068bff16f24a6fa5e424a0df4d9232b1a818693", size = 9341, upload-time = "2026-03-04T14:19:54.426Z" }, ] [[package]] name = "opentelemetry-instrumentation-starlette" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2700,42 +2693,42 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f9/43/c8095007bcc800a5465ebe50b097ab0da8b1d973f9afdcea04d98d2cb81d/opentelemetry_instrumentation_starlette-0.54b1.tar.gz", hash = "sha256:04f5902185166ad0a96bbc5cc184983bdf535ac92b1edc7a6093e9d14efa00d1", size = 14492, upload-time = "2025-05-16T19:04:03.012Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/2c/5a81129c249a608e8226a3b1879e475354c8668b870f996379f994431e34/opentelemetry_instrumentation_starlette-0.61b0.tar.gz", hash = "sha256:6e4633bf0271aa2e00692dd46963df711c5ee32db13849e54edd8afefe9e1112", size = 14709, upload-time = "2026-03-04T14:20:48.904Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1d/9215d1696a428bbc0c46b8fc7c0189693ba5cdd9032f1dbeff04e9526828/opentelemetry_instrumentation_starlette-0.54b1-py3-none-any.whl", hash = "sha256:533e730308b5e6e99ab2a219c891f8e08ef5e67db76a148cc2f6c4fd5b6bcc0e", size = 11740, upload-time = "2025-05-16T19:03:07.079Z" }, + { url = "https://files.pythonhosted.org/packages/a5/32/8ae96898bc5cb42be5638d0725dfd29b2e1468c1dd60e2aed77bbd2c232f/opentelemetry_instrumentation_starlette-0.61b0-py3-none-any.whl", hash = "sha256:59378793b12d5c67143f27dd8b7eedbb4566abc24c924793933e8a33416ef883", size = 11947, upload-time = "2026-03-04T14:19:55.425Z" }, ] [[package]] name = "opentelemetry-instrumentation-system-metrics" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "psutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/cc/0db64253beac5a58dca621114f1be8c95af3ec8ac31785fb28b6ed82021e/opentelemetry_instrumentation_system_metrics-0.54b1.tar.gz", hash = "sha256:2846ba1019e1672fb605eff3d3af198fa1b8f1540ece70da82a2d20d9b95779b", size = 15007, upload-time = "2025-05-16T19:04:03.758Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/68/a403ade03a7ccba3d113a02c041942ab8feb4471101eb3a02da6403e9258/opentelemetry_instrumentation_system_metrics-0.61b0.tar.gz", hash = "sha256:3eb55f9a058797cf915946cbb7445e00b31316ac3e55050475792edf3367c321", size = 17637, upload-time = "2026-03-04T14:20:49.591Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/fd/e9bd23fd734bbdc028e7ebe3d25855381b696ceca214f80ad7fe74e9079c/opentelemetry_instrumentation_system_metrics-0.54b1-py3-none-any.whl", hash = "sha256:1b6f23cc8cf18b525bdb285c3664b521ce81b1e82c4f3db6a82210b8c37af1e4", size = 13093, upload-time = "2025-05-16T19:03:08.516Z" }, + { url = "https://files.pythonhosted.org/packages/8d/2b/3142c6e0f3c9a5be3e5187933bc28b0c8b7e77c04937aec317eee96e8fdb/opentelemetry_instrumentation_system_metrics-0.61b0-py3-none-any.whl", hash = "sha256:7d4fe3e0ce14e0e6eb18f5826100d6cc1af662e5a8ebc74e9b91fe23f192f3e8", size = 14909, upload-time = "2026-03-04T14:19:56.306Z" }, ] [[package]] name = "opentelemetry-instrumentation-threading" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a0/bd/561245292e7cc78ac7a0a75537873aea87440cb9493d41371421b3308c2b/opentelemetry_instrumentation_threading-0.54b1.tar.gz", hash = "sha256:3a081085b59675baf7bd93126a681903e6304a5f283df5eaecdd44bcb66df578", size = 8774, upload-time = "2025-05-16T19:04:04.482Z" } +sdist = { url = "https://files.pythonhosted.org/packages/12/8f/8dedba66100cda58af057926449a5e58e6c008bec02bc2746c03c3d85dcd/opentelemetry_instrumentation_threading-0.61b0.tar.gz", hash = "sha256:38e0263c692d15a7a458b3fa0286d29290448fa4ac4c63045edac438c6113433", size = 9163, upload-time = "2026-03-04T14:20:50.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/10/d87ec07d69546adaad525ba5d40d27324a45cba29097d9854a53d9af5047/opentelemetry_instrumentation_threading-0.54b1-py3-none-any.whl", hash = "sha256:bc229e6cd3f2b29fafe0a8dd3141f452e16fcb4906bca4fbf52609f99fb1eb42", size = 9314, upload-time = "2025-05-16T19:03:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/e8/77/c06d960aede1a014812aa4fafde0ae546d790f46416fbeafa2b32095aae3/opentelemetry_instrumentation_threading-0.61b0-py3-none-any.whl", hash = "sha256:735f4a1dc964202fc8aff475efc12bb64e6566f22dff52d5cb5de864b3fe1a70", size = 9337, upload-time = "2026-03-04T14:19:57.983Z" }, ] [[package]] name = "opentelemetry-instrumentation-tornado" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2743,28 +2736,28 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/61/9da044c2ae3cea9a4f0e4cf28bbc1a5aaf7052c2b00ad9f305a107da9110/opentelemetry_instrumentation_tornado-0.54b1.tar.gz", hash = "sha256:73a5ba0f915688907dd4640653d3970167715c42a5ef4a948bbcf93ad9682b8d", size = 17089, upload-time = "2025-05-16T19:04:05.666Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/2f/1c1663ef2d71b21501f6431e93d4a15fdd57773d49066cf5a699ac6751e5/opentelemetry_instrumentation_tornado-0.61b0.tar.gz", hash = "sha256:cbc12bc2f3ade09f5a438df8e2d9193ed0d99648b7ed049ba20aa2d886e426bd", size = 22816, upload-time = "2026-03-04T14:20:51.435Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/70/858aabf04ef24f409995c032c06c9a96e7c8bb9a257c9981b7fb380b7458/opentelemetry_instrumentation_tornado-0.54b1-py3-none-any.whl", hash = "sha256:3f4773cb3adfd6fdd592f182a72be85ca6cf01500a9973ac17947ce81d9872ee", size = 15327, upload-time = "2025-05-16T19:03:10.527Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e2/8c2144ce49e24ad22f467d0b7f05bfde954ddc16f5d2516d4a8624c2de40/opentelemetry_instrumentation_tornado-0.61b0-py3-none-any.whl", hash = "sha256:815316bb602c4c5e869e28f87fe719f7226ca3a954a6c7b8715eb923fac3de48", size = 17491, upload-time = "2026-03-04T14:19:59.341Z" }, ] [[package]] name = "opentelemetry-instrumentation-tortoiseorm" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/ec/c1c2916e9448ea2c5fde2700bf6577d42db5a2ed0fda856e388d34e42872/opentelemetry_instrumentation_tortoiseorm-0.54b1.tar.gz", hash = "sha256:f9ffe00bcdfa895dfa1a512f4fde186ebd816a4636afd26a7716f258b4c7e3f9", size = 8263, upload-time = "2025-05-16T19:04:06.372Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/96/28a1da05198ddb8b0826ec20f9cb68d69318ee3ffea0745c60f92048efa1/opentelemetry_instrumentation_tortoiseorm-0.61b0.tar.gz", hash = "sha256:af5ed2af3554423af1e095e80c10344cd37da466fe9c05673b1ce43066994e59", size = 8810, upload-time = "2026-03-04T14:20:52.102Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/e0/81eb1ec3cbe436030c32ada365f6fcf9e034c882d8c3060dfe35ffdfabc0/opentelemetry_instrumentation_tortoiseorm-0.54b1-py3-none-any.whl", hash = "sha256:0335efcd4f5e240efecc36f909939dbc6fb8c9b0733dc3f0615a39c3f6544c7e", size = 10158, upload-time = "2025-05-16T19:03:11.572Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f0/dd6f65379a8f37f9297593576f826c24e9a3127459e07d7da5ead85ad65c/opentelemetry_instrumentation_tortoiseorm-0.61b0-py3-none-any.whl", hash = "sha256:9e70417807281492de070f07df9fd3812538b098011d1eaacb9dba5fbe7eab24", size = 10215, upload-time = "2026-03-04T14:20:00.352Z" }, ] [[package]] name = "opentelemetry-instrumentation-urllib" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2772,14 +2765,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/52/47ecbce59d47e4543286ab88753efe1903f40a80c05397407375b4e600c2/opentelemetry_instrumentation_urllib-0.54b1.tar.gz", hash = "sha256:99943400b6814ebf072735e0fb42dc5c74705f30b64ebed3778f0e7c6e16d63e", size = 13788, upload-time = "2025-05-16T19:04:07.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/37/77cd326b083390e74280c08bbd585153809619dad068e2d1b253fec1164d/opentelemetry_instrumentation_urllib-0.61b0.tar.gz", hash = "sha256:6a15ff862fc1603e0ea5ea75558f76f36436b02e0ae48daecedcb5e574cce160", size = 16894, upload-time = "2026-03-04T14:20:52.726Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/2a/d8c9876d80d89f728c89439a880eaccedab3ffe1cc83b2c49abf17b81038/opentelemetry_instrumentation_urllib-0.54b1-py3-none-any.whl", hash = "sha256:94744470733f61f3dd282be7868e93f5bc277f07a0aeda7c836c913cbcf4f416", size = 12625, upload-time = "2025-05-16T19:03:12.701Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fc/a88fbfd8b9eb16ba1c21f0514c12696441be7fc42c7e319f3ee793bf9e96/opentelemetry_instrumentation_urllib-0.61b0-py3-none-any.whl", hash = "sha256:d7e409876580fb41102e3522ce81a756e53a74073c036a267a1c280cc0fa09b0", size = 13970, upload-time = "2026-03-04T14:20:01.24Z" }, ] [[package]] name = "opentelemetry-instrumentation-urllib3" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2788,14 +2781,14 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/6f/76a46806cd21002cac1bfd087f5e4674b195ab31ab44c773ca534b6bb546/opentelemetry_instrumentation_urllib3-0.54b1.tar.gz", hash = "sha256:0d30ba3b230e4100cfadaad29174bf7bceac70e812e4f5204e681e4b55a74cd9", size = 15697, upload-time = "2025-05-16T19:04:07.709Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/80/7ad8da30f479c6117768e72d6f2f3f0bd3495338707d6f61de042149578a/opentelemetry_instrumentation_urllib3-0.61b0.tar.gz", hash = "sha256:f00037bc8ff813153c4b79306f55a14618c40469a69c6c03a3add29dc7e8b928", size = 19325, upload-time = "2026-03-04T14:20:53.386Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/7a/d75bec41edb6deaf1d2859bab66a84c8ba03e822e7eafdb245da205e53f6/opentelemetry_instrumentation_urllib3-0.54b1-py3-none-any.whl", hash = "sha256:e87958c297ddd36d30e1c9069f34a9690e845e4ccc2662dd80e99ed976d4c03e", size = 13123, upload-time = "2025-05-16T19:03:14.053Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01359e55b9f2fb2b1d4d9e85e77773a96697207895118533f3be718a3326/opentelemetry_instrumentation_urllib3-0.61b0-py3-none-any.whl", hash = "sha256:9644f8c07870266e52f129e6226859ff3a35192555abe46fa0ef9bbbf5b6b46d", size = 14339, upload-time = "2026-03-04T14:20:02.681Z" }, ] [[package]] name = "opentelemetry-instrumentation-wsgi" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2803,99 +2796,99 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a9/0f/442eba02bd277fae2f5eb3ac5f8dd5f8cc52ddbe080506748871b91a63ab/opentelemetry_instrumentation_wsgi-0.54b1.tar.gz", hash = "sha256:261ad737e0058812aaae6bb7d6e0fa7344de62464c5df30c82bea180e735b903", size = 18244, upload-time = "2025-05-16T19:04:08.448Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/e5/189f2845362cfe78e356ba127eab21456309def411c6874aa4800c3de816/opentelemetry_instrumentation_wsgi-0.61b0.tar.gz", hash = "sha256:380f2ae61714e5303275a80b2e14c58571573cd1fddf496d8c39fb9551c5e532", size = 19898, upload-time = "2026-03-04T14:20:54.068Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/2f/075156d123e589d6728cc4c1a43d0335fa16e8f4a9f723a4af9267d91169/opentelemetry_instrumentation_wsgi-0.54b1-py3-none-any.whl", hash = "sha256:6d99dca32ce232251cd321bf86e8c9d0a60c5f088bcbe5ad55d12a2006fe056e", size = 14378, upload-time = "2025-05-16T19:03:15.074Z" }, + { url = "https://files.pythonhosted.org/packages/96/75/d6b42ba26f3c921be6d01b16561b7bb863f843bad7ac3a5011f62617bcab/opentelemetry_instrumentation_wsgi-0.61b0-py3-none-any.whl", hash = "sha256:bd33b0824166f24134a3400648805e8d2e6a7951f070241294e8b8866611d7fa", size = 14628, upload-time = "2026-03-04T14:20:03.934Z" }, ] [[package]] name = "opentelemetry-processor-baggage" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/47/6ebc196ca33a79e6e8839d33ebf1b9a7d88646f48b12c5687e5a90300879/opentelemetry_processor_baggage-0.54b1.tar.gz", hash = "sha256:d3ec2a99fb8b88ca1153cf9b1b8eae76bd2bb518fb900f758a8d24e439276055", size = 7579, upload-time = "2025-05-16T19:04:09.148Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/0d/4afee20490ef53a449b1781b0671d84858742a2ccfb01c08de398a5d1ccd/opentelemetry_processor_baggage-0.61b0.tar.gz", hash = "sha256:4d1d2a624e3aa9a8b6c6d1f560ba2951f97acf875f57502a274c5078043a69d5", size = 7573, upload-time = "2026-03-04T14:20:54.941Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/9f/db3a2e7162dc73f012b440c5600acaab301170cffe8d8ccce5e069bc4176/opentelemetry_processor_baggage-0.54b1-py3-none-any.whl", hash = "sha256:1502475016c90b68642c9377803fd77b7f295d0b33e0d3449ba113b405de2b49", size = 8877, upload-time = "2025-05-16T19:03:16.127Z" }, + { url = "https://files.pythonhosted.org/packages/ac/24/0ef2cf49e6ac9b2b422400abbf528230a409c9e174572f2d13e2dff7ec7c/opentelemetry_processor_baggage-0.61b0-py3-none-any.whl", hash = "sha256:f6b5937e93bda8f380d8f5f667355c7d127e9296b38dfacf39fd328ab410262c", size = 8881, upload-time = "2026-03-04T14:20:05.25Z" }, ] [[package]] name = "opentelemetry-propagator-aws-xray" -version = "1.0.1" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/13/310a7f3c789eb9bb51f8ee9b88fb4b9f4f1e7191c8c96c7ea6f15eaa99b5/opentelemetry-propagator-aws-xray-1.0.1.tar.gz", hash = "sha256:6e8be667bbcf17c3d81d70b2a7cdec0b11257ff64d3829ffe75b810ba1b49f86", size = 8932, upload-time = "2021-10-18T22:07:40.108Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/31/40004e9e55b1e5694ef3a7526f0b7637df44196fc68a8b7d248a3684680f/opentelemetry_propagator_aws_xray-1.0.2.tar.gz", hash = "sha256:6b2cee5479d2ef0172307b66ed2ed151f598a0fd29b3c01133ac87ca06326260", size = 10994, upload-time = "2024-08-05T17:45:57.601Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/24/2b1694b9452ac7ab3567dcb80902f7c5c8a39962751d5a4c54a357caa49e/opentelemetry_propagator_aws_xray-1.0.1-py3-none-any.whl", hash = "sha256:49267a1d72b3f04880ac75e24f9ef38fe323e2f3156c4531e0e00c71c0829c0f", size = 10812, upload-time = "2021-10-18T22:07:38.08Z" }, + { url = "https://files.pythonhosted.org/packages/ea/89/849a0847871fd9745315896ad9e23d6479db84d90b8b36c4c26dc46e92b8/opentelemetry_propagator_aws_xray-1.0.2-py3-none-any.whl", hash = "sha256:1c99181ee228e99bddb638a0c911a297fa21f1c3a0af951f841e79919b5f1934", size = 10856, upload-time = "2024-08-05T17:45:56.492Z" }, ] [[package]] name = "opentelemetry-propagator-b3" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "opentelemetry-api" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/b4/4fe00e8c63175e35c310ac4e5091b3c22a468a6098e8a5eacd8b991d6989/opentelemetry_propagator_b3-1.33.1.tar.gz", hash = "sha256:46bbe76d95ac7e1f50b263230aa1ce86445120f10c7008d66cb08266468561a3", size = 9618, upload-time = "2025-05-16T18:52:50.973Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/fe/e0c84af5c654ec42165ba57af83c7f67e4b8af77f836ddc29dee59ff73c6/opentelemetry_propagator_b3-1.40.0.tar.gz", hash = "sha256:59b6925498947c08a1b7e0dd38193ff97e5009bec74ec23824300c2e32f77bcf", size = 9587, upload-time = "2026-03-04T14:17:30.079Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/4a/16676216b5b8db95a6bdeb529bf17603e14c70ac15fcadca8de2bd135c65/opentelemetry_propagator_b3-1.33.1-py3-none-any.whl", hash = "sha256:5c65708fbecb317ab4f1880e81f7bb0bf48caa2e1d52fe31f89d1cb86172a69c", size = 8936, upload-time = "2025-05-16T18:52:34.125Z" }, + { url = "https://files.pythonhosted.org/packages/8f/84/8654cc0539b5145046b2e60d058cebad401a600dd0b1240f1711c6788643/opentelemetry_propagator_b3-1.40.0-py3-none-any.whl", hash = "sha256:cb72a1698fd1d1b434f70dc90c1de62da8ade1dd84850d1f040eccf6a420fa7b", size = 8922, upload-time = "2026-03-04T14:17:14.732Z" }, ] [[package]] name = "opentelemetry-propagator-jaeger" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/28/2be617ef9bf804f65864d17eef13af582992d529c61d58a8a17d711b918a/opentelemetry_propagator_jaeger-1.33.1.tar.gz", hash = "sha256:b4cd3f123a720db872401e2179f7384c70922a6b9bab2873f003419be82bb5e3", size = 8676, upload-time = "2025-05-16T18:52:51.559Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/5f/c5d548ccf2452e809f2adfa944843ae148d532e09a7342d97bd9a45a840e/opentelemetry_propagator_jaeger-1.40.0.tar.gz", hash = "sha256:8afa11a33242b5cd33a0fabb91d3aef0f3d7fc6cac0a1402e241531c0800fe54", size = 8637, upload-time = "2026-03-04T14:17:30.645Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/24/a20343cfa49b38192ca6e314294b50a76d427c7dcbfd1a3ddb19706fed71/opentelemetry_propagator_jaeger-1.33.1-py3-none-any.whl", hash = "sha256:d5cfd139b245b32b45edda478b7be1fc52ecc93a199aa6ed7fd074086d81d083", size = 8778, upload-time = "2025-05-16T18:52:34.976Z" }, + { url = "https://files.pythonhosted.org/packages/48/ab/7b4a12581b5d3b7b528ba138d4d0dfe5836fd3d09cd7e210141c2c7c0e28/opentelemetry_propagator_jaeger-1.40.0-py3-none-any.whl", hash = "sha256:e70e5aa8c06fc0d19a83a212eb408a3ac3d10de8dab203dc988ddf07a99e9479", size = 8761, upload-time = "2026-03-04T14:17:15.58Z" }, ] [[package]] name = "opentelemetry-propagator-ot-trace" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/a3/b1bc6a7dc4aa7467b7d4537452a4fb089cb82246138fed6a3272e9ec2de9/opentelemetry_propagator_ot_trace-0.54b1.tar.gz", hash = "sha256:ce6bbebe9a3e57d8abada605b3ef296d363c764bb9a075677ea6f7aed7ddf8e6", size = 5026, upload-time = "2025-05-16T19:04:10.126Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/6f/f47b3cac29be73f326a29d957844655f5e639b4ee4fab4abc2a5584c5b47/opentelemetry_propagator_ot_trace-0.61b0.tar.gz", hash = "sha256:4d10268596ad51a8161132684fcefc24af753d6f278bb33b705eb70e4135d302", size = 5025, upload-time = "2026-03-04T14:20:55.807Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/62/cab99d81b9de2f74e80cf5deac45c31ec110d65a6d9b043152cffe2e3edd/opentelemetry_propagator_ot_trace-0.54b1-py3-none-any.whl", hash = "sha256:3c7885bdee37b28562e17cd8cb72747102fdccd9d4e557f5b4afb109092db829", size = 4769, upload-time = "2025-05-16T19:03:17.047Z" }, + { url = "https://files.pythonhosted.org/packages/48/48/aee81654390a43328e3d69f27f90ec20502e2310592c791f39322a78e3e6/opentelemetry_propagator_ot_trace-0.61b0-py3-none-any.whl", hash = "sha256:2f9a623af44a7aa8a14d1137b50b60738b3b6d6429caf481685bf4a8d7354ef7", size = 4771, upload-time = "2026-03-04T14:20:06.154Z" }, ] [[package]] name = "opentelemetry-proto" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f6/dc/791f3d60a1ad8235930de23eea735ae1084be1c6f96fdadf38710662a7e5/opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68", size = 34363, upload-time = "2025-05-16T18:52:52.141Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/77/dd38991db037fdfce45849491cb61de5ab000f49824a00230afb112a4392/opentelemetry_proto-1.40.0.tar.gz", hash = "sha256:03f639ca129ba513f5819810f5b1f42bcb371391405d99c168fe6937c62febcd", size = 45667, upload-time = "2026-03-04T14:17:31.194Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/29/48609f4c875c2b6c80930073c82dd1cafd36b6782244c01394007b528960/opentelemetry_proto-1.33.1-py3-none-any.whl", hash = "sha256:243d285d9f29663fc7ea91a7171fcc1ccbbfff43b48df0774fd64a37d98eda70", size = 55854, upload-time = "2025-05-16T18:52:36.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b2/189b2577dde745b15625b3214302605b1353436219d42b7912e77fa8dc24/opentelemetry_proto-1.40.0-py3-none-any.whl", hash = "sha256:266c4385d88923a23d63e353e9761af0f47a6ed0d486979777fe4de59dc9b25f", size = 72073, upload-time = "2026-03-04T14:17:16.673Z" }, ] [[package]] name = "opentelemetry-sdk" -version = "1.33.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/12/909b98a7d9b110cce4b28d49b2e311797cffdce180371f35eba13a72dd00/opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531", size = 161885, upload-time = "2025-05-16T18:52:52.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/fd/3c3125b20ba18ce2155ba9ea74acb0ae5d25f8cd39cfd37455601b7955cc/opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2", size = 184252, upload-time = "2026-03-04T14:17:31.87Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/8e/ae2d0742041e0bd7fe0d2dcc5e7cce51dcf7d3961a26072d5b43cc8fa2a7/opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112", size = 118950, upload-time = "2025-05-16T18:52:37.297Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c5/6a852903d8bfac758c6dc6e9a68b015d3c33f2f1be5e9591e0f4b69c7e0a/opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1", size = 141951, upload-time = "2026-03-04T14:17:17.961Z" }, ] [[package]] @@ -2912,24 +2905,24 @@ wheels = [ [[package]] name = "opentelemetry-semantic-conventions" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "opentelemetry-api" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/2c/d7990fc1ffc82889d466e7cd680788ace44a26789809924813b164344393/opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee", size = 118642, upload-time = "2025-05-16T18:52:53.962Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/c0/4ae7973f3c2cfd2b6e321f1675626f0dab0a97027cc7a297474c9c8f3d04/opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a", size = 145755, upload-time = "2026-03-04T14:17:32.664Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/80/08b1698c52ff76d96ba440bf15edc2f4bc0a279868778928e947c1004bdd/opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d", size = 194938, upload-time = "2025-05-16T18:52:38.796Z" }, + { url = "https://files.pythonhosted.org/packages/b2/37/cc6a55e448deaa9b27377d087da8615a3416d8ad523d5960b78dbeadd02a/opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2", size = 231621, upload-time = "2026-03-04T14:17:19.33Z" }, ] [[package]] name = "opentelemetry-util-http" -version = "0.54b1" +version = "0.61b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a8/9f/1d8a1d1f34b9f62f2b940b388bf07b8167a8067e70870055bd05db354e5c/opentelemetry_util_http-0.54b1.tar.gz", hash = "sha256:f0b66868c19fbaf9c9d4e11f4a7599fa15d5ea50b884967a26ccd9d72c7c9d15", size = 8044, upload-time = "2025-05-16T19:04:10.79Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/3c/f0196223efc5c4ca19f8fad3d5462b171ac6333013335ce540c01af419e9/opentelemetry_util_http-0.61b0.tar.gz", hash = "sha256:1039cb891334ad2731affdf034d8fb8b48c239af9b6dd295e5fabd07f1c95572", size = 11361, upload-time = "2026-03-04T14:20:57.01Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/ef/c5aa08abca6894792beed4c0405e85205b35b8e73d653571c9ff13a8e34e/opentelemetry_util_http-0.54b1-py3-none-any.whl", hash = "sha256:b1c91883f980344a1c3c486cffd47ae5c9c1dd7323f9cbe9fdb7cadb401c87c9", size = 7301, upload-time = "2025-05-16T19:03:18.18Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e5/c08aaaf2f64288d2b6ef65741d2de5454e64af3e050f34285fb1907492fe/opentelemetry_util_http-0.61b0-py3-none-any.whl", hash = "sha256:8e715e848233e9527ea47e275659ea60a57a75edf5206a3b937e236a6da5fc33", size = 9281, upload-time = "2026-03-04T14:20:08.364Z" }, ] [[package]] @@ -3194,16 +3187,17 @@ wheels = [ [[package]] name = "protobuf" -version = "5.29.6" +version = "6.33.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/57/394a763c103e0edf87f0938dafcd918d53b4c011dfc5c8ae80f3b0452dbb/protobuf-5.29.6.tar.gz", hash = "sha256:da9ee6a5424b6b30fd5e45c5ea663aef540ca95f9ad99d1e887e819cdf9b8723", size = 425623, upload-time = "2026-02-04T22:54:40.584Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/88/9ee58ff7863c479d6f8346686d4636dd4c415b0cbeed7a6a7d0617639c2a/protobuf-5.29.6-cp310-abi3-win32.whl", hash = "sha256:62e8a3114992c7c647bce37dcc93647575fc52d50e48de30c6fcb28a6a291eb1", size = 423357, upload-time = "2026-02-04T22:54:25.805Z" }, - { url = "https://files.pythonhosted.org/packages/1c/66/2dc736a4d576847134fb6d80bd995c569b13cdc7b815d669050bf0ce2d2c/protobuf-5.29.6-cp310-abi3-win_amd64.whl", hash = "sha256:7e6ad413275be172f67fdee0f43484b6de5a904cc1c3ea9804cb6fe2ff366eda", size = 435175, upload-time = "2026-02-04T22:54:28.592Z" }, - { url = "https://files.pythonhosted.org/packages/06/db/49b05966fd208ae3f44dcd33837b6243b4915c57561d730a43f881f24dea/protobuf-5.29.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:b5a169e664b4057183a34bdc424540e86eea47560f3c123a0d64de4e137f9269", size = 418619, upload-time = "2026-02-04T22:54:30.266Z" }, - { url = "https://files.pythonhosted.org/packages/b7/d7/48cbf6b0c3c39761e47a99cb483405f0fde2be22cf00d71ef316ce52b458/protobuf-5.29.6-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:a8866b2cff111f0f863c1b3b9e7572dc7eaea23a7fae27f6fc613304046483e6", size = 320284, upload-time = "2026-02-04T22:54:31.782Z" }, - { url = "https://files.pythonhosted.org/packages/e3/dd/cadd6ec43069247d91f6345fa7a0d2858bef6af366dbd7ba8f05d2c77d3b/protobuf-5.29.6-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:e3387f44798ac1106af0233c04fb8abf543772ff241169946f698b3a9a3d3ab9", size = 320478, upload-time = "2026-02-04T22:54:32.909Z" }, - { url = "https://files.pythonhosted.org/packages/5a/cb/e3065b447186cb70aa65acc70c86baf482d82bf75625bf5a2c4f6919c6a3/protobuf-5.29.6-py3-none-any.whl", hash = "sha256:6b9edb641441b2da9fa8f428760fc136a49cf97a52076010cf22a2ff73438a86", size = 173126, upload-time = "2026-02-04T22:54:39.462Z" }, + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, ] [[package]] @@ -3784,7 +3778,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.5" +version = "2.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -3792,9 +3786,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, ] [[package]] @@ -3948,27 +3942,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/22/9e4f66ee588588dc6c9af6a994e12d26e19efbe874d1a909d09a6dac7a59/ruff-0.15.7.tar.gz", hash = "sha256:04f1ae61fc20fe0b148617c324d9d009b5f63412c0b16474f3d5f1a1a665f7ac", size = 4601277, upload-time = "2026-03-19T16:26:22.605Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/2f/0b08ced94412af091807b6119ca03755d651d3d93a242682bf020189db94/ruff-0.15.7-py3-none-linux_armv6l.whl", hash = "sha256:a81cc5b6910fb7dfc7c32d20652e50fa05963f6e13ead3c5915c41ac5d16668e", size = 10489037, upload-time = "2026-03-19T16:26:32.47Z" }, - { url = "https://files.pythonhosted.org/packages/91/4a/82e0fa632e5c8b1eba5ee86ecd929e8ff327bbdbfb3c6ac5d81631bef605/ruff-0.15.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:722d165bd52403f3bdabc0ce9e41fc47070ac56d7a91b4e0d097b516a53a3477", size = 10955433, upload-time = "2026-03-19T16:27:00.205Z" }, - { url = "https://files.pythonhosted.org/packages/ab/10/12586735d0ff42526ad78c049bf51d7428618c8b5c467e72508c694119df/ruff-0.15.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fbc2448094262552146cbe1b9643a92f66559d3761f1ad0656d4991491af49e", size = 10269302, upload-time = "2026-03-19T16:26:26.183Z" }, - { url = "https://files.pythonhosted.org/packages/eb/5d/32b5c44ccf149a26623671df49cbfbd0a0ae511ff3df9d9d2426966a8d57/ruff-0.15.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b39329b60eba44156d138275323cc726bbfbddcec3063da57caa8a8b1d50adf", size = 10607625, upload-time = "2026-03-19T16:27:03.263Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f1/f0001cabe86173aaacb6eb9bb734aa0605f9a6aa6fa7d43cb49cbc4af9c9/ruff-0.15.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87768c151808505f2bfc93ae44e5f9e7c8518943e5074f76ac21558ef5627c85", size = 10324743, upload-time = "2026-03-19T16:27:09.791Z" }, - { url = "https://files.pythonhosted.org/packages/7a/87/b8a8f3d56b8d848008559e7c9d8bf367934d5367f6d932ba779456e2f73b/ruff-0.15.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb0511670002c6c529ec66c0e30641c976c8963de26a113f3a30456b702468b0", size = 11138536, upload-time = "2026-03-19T16:27:06.101Z" }, - { url = "https://files.pythonhosted.org/packages/e4/f2/4fd0d05aab0c5934b2e1464784f85ba2eab9d54bffc53fb5430d1ed8b829/ruff-0.15.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0d19644f801849229db8345180a71bee5407b429dd217f853ec515e968a6912", size = 11994292, upload-time = "2026-03-19T16:26:48.718Z" }, - { url = "https://files.pythonhosted.org/packages/64/22/fc4483871e767e5e95d1622ad83dad5ebb830f762ed0420fde7dfa9d9b08/ruff-0.15.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4806d8e09ef5e84eb19ba833d0442f7e300b23fe3f0981cae159a248a10f0036", size = 11398981, upload-time = "2026-03-19T16:26:54.513Z" }, - { url = "https://files.pythonhosted.org/packages/b0/99/66f0343176d5eab02c3f7fcd2de7a8e0dd7a41f0d982bee56cd1c24db62b/ruff-0.15.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dce0896488562f09a27b9c91b1f58a097457143931f3c4d519690dea54e624c5", size = 11242422, upload-time = "2026-03-19T16:26:29.277Z" }, - { url = "https://files.pythonhosted.org/packages/5d/3a/a7060f145bfdcce4c987ea27788b30c60e2c81d6e9a65157ca8afe646328/ruff-0.15.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1852ce241d2bc89e5dc823e03cff4ce73d816b5c6cdadd27dbfe7b03217d2a12", size = 11232158, upload-time = "2026-03-19T16:26:42.321Z" }, - { url = "https://files.pythonhosted.org/packages/a7/53/90fbb9e08b29c048c403558d3cdd0adf2668b02ce9d50602452e187cd4af/ruff-0.15.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5f3e4b221fb4bd293f79912fc5e93a9063ebd6d0dcbd528f91b89172a9b8436c", size = 10577861, upload-time = "2026-03-19T16:26:57.459Z" }, - { url = "https://files.pythonhosted.org/packages/2f/aa/5f486226538fe4d0f0439e2da1716e1acf895e2a232b26f2459c55f8ddad/ruff-0.15.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b15e48602c9c1d9bdc504b472e90b90c97dc7d46c7028011ae67f3861ceba7b4", size = 10327310, upload-time = "2026-03-19T16:26:35.909Z" }, - { url = "https://files.pythonhosted.org/packages/99/9e/271afdffb81fe7bfc8c43ba079e9d96238f674380099457a74ccb3863857/ruff-0.15.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b4705e0e85cedc74b0a23cf6a179dbb3df184cb227761979cc76c0440b5ab0d", size = 10840752, upload-time = "2026-03-19T16:26:45.723Z" }, - { url = "https://files.pythonhosted.org/packages/bf/29/a4ae78394f76c7759953c47884eb44de271b03a66634148d9f7d11e721bd/ruff-0.15.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:112c1fa316a558bb34319282c1200a8bf0495f1b735aeb78bfcb2991e6087580", size = 11336961, upload-time = "2026-03-19T16:26:39.076Z" }, - { url = "https://files.pythonhosted.org/packages/26/6b/8786ba5736562220d588a2f6653e6c17e90c59ced34a2d7b512ef8956103/ruff-0.15.7-py3-none-win32.whl", hash = "sha256:6d39e2d3505b082323352f733599f28169d12e891f7dd407f2d4f54b4c2886de", size = 10582538, upload-time = "2026-03-19T16:26:15.992Z" }, - { url = "https://files.pythonhosted.org/packages/2b/e9/346d4d3fffc6871125e877dae8d9a1966b254fbd92a50f8561078b88b099/ruff-0.15.7-py3-none-win_amd64.whl", hash = "sha256:4d53d712ddebcd7dace1bc395367aec12c057aacfe9adbb6d832302575f4d3a1", size = 11755839, upload-time = "2026-03-19T16:26:19.897Z" }, - { url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" }, +version = "0.15.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/14/b0/73cf7550861e2b4824950b8b52eebdcc5adc792a00c514406556c5b80817/ruff-0.15.8.tar.gz", hash = "sha256:995f11f63597ee362130d1d5a327a87cb6f3f5eae3094c620bcc632329a4d26e", size = 4610921, upload-time = "2026-03-26T18:39:38.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/92/c445b0cd6da6e7ae51e954939cb69f97e008dbe750cfca89b8cedc081be7/ruff-0.15.8-py3-none-linux_armv6l.whl", hash = "sha256:cbe05adeba76d58162762d6b239c9056f1a15a55bd4b346cfd21e26cd6ad7bc7", size = 10527394, upload-time = "2026-03-26T18:39:41.566Z" }, + { url = "https://files.pythonhosted.org/packages/eb/92/f1c662784d149ad1414cae450b082cf736430c12ca78367f20f5ed569d65/ruff-0.15.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d3e3d0b6ba8dca1b7ef9ab80a28e840a20070c4b62e56d675c24f366ef330570", size = 10905693, upload-time = "2026-03-26T18:39:30.364Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f2/7a631a8af6d88bcef997eb1bf87cc3da158294c57044aafd3e17030613de/ruff-0.15.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ee3ae5c65a42f273f126686353f2e08ff29927b7b7e203b711514370d500de3", size = 10323044, upload-time = "2026-03-26T18:39:33.37Z" }, + { url = "https://files.pythonhosted.org/packages/67/18/1bf38e20914a05e72ef3b9569b1d5c70a7ef26cd188d69e9ca8ef588d5bf/ruff-0.15.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdce027ada77baa448077ccc6ebb2fa9c3c62fd110d8659d601cf2f475858d94", size = 10629135, upload-time = "2026-03-26T18:39:44.142Z" }, + { url = "https://files.pythonhosted.org/packages/d2/e9/138c150ff9af60556121623d41aba18b7b57d95ac032e177b6a53789d279/ruff-0.15.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12e617fc01a95e5821648a6df341d80456bd627bfab8a829f7cfc26a14a4b4a3", size = 10348041, upload-time = "2026-03-26T18:39:52.178Z" }, + { url = "https://files.pythonhosted.org/packages/02/f1/5bfb9298d9c323f842c5ddeb85f1f10ef51516ac7a34ba446c9347d898df/ruff-0.15.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:432701303b26416d22ba696c39f2c6f12499b89093b61360abc34bcc9bf07762", size = 11121987, upload-time = "2026-03-26T18:39:55.195Z" }, + { url = "https://files.pythonhosted.org/packages/10/11/6da2e538704e753c04e8d86b1fc55712fdbdcc266af1a1ece7a51fff0d10/ruff-0.15.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d910ae974b7a06a33a057cb87d2a10792a3b2b3b35e33d2699fdf63ec8f6b17a", size = 11951057, upload-time = "2026-03-26T18:39:19.18Z" }, + { url = "https://files.pythonhosted.org/packages/83/f0/c9208c5fd5101bf87002fed774ff25a96eea313d305f1e5d5744698dc314/ruff-0.15.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2033f963c43949d51e6fdccd3946633c6b37c484f5f98c3035f49c27395a8ab8", size = 11464613, upload-time = "2026-03-26T18:40:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/f8/22/d7f2fabdba4fae9f3b570e5605d5eb4500dcb7b770d3217dca4428484b17/ruff-0.15.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f29b989a55572fb885b77464cf24af05500806ab4edf9a0fd8977f9759d85b1", size = 11257557, upload-time = "2026-03-26T18:39:57.972Z" }, + { url = "https://files.pythonhosted.org/packages/71/8c/382a9620038cf6906446b23ce8632ab8c0811b8f9d3e764f58bedd0c9a6f/ruff-0.15.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ac51d486bf457cdc985a412fb1801b2dfd1bd8838372fc55de64b1510eff4bec", size = 11169440, upload-time = "2026-03-26T18:39:22.205Z" }, + { url = "https://files.pythonhosted.org/packages/4d/0d/0994c802a7eaaf99380085e4e40c845f8e32a562e20a38ec06174b52ef24/ruff-0.15.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c9861eb959edab053c10ad62c278835ee69ca527b6dcd72b47d5c1e5648964f6", size = 10605963, upload-time = "2026-03-26T18:39:46.682Z" }, + { url = "https://files.pythonhosted.org/packages/19/aa/d624b86f5b0aad7cef6bbf9cd47a6a02dfdc4f72c92a337d724e39c9d14b/ruff-0.15.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8d9a5b8ea13f26ae90838afc33f91b547e61b794865374f114f349e9036835fb", size = 10357484, upload-time = "2026-03-26T18:39:49.176Z" }, + { url = "https://files.pythonhosted.org/packages/35/c3/e0b7835d23001f7d999f3895c6b569927c4d39912286897f625736e1fd04/ruff-0.15.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c2a33a529fb3cbc23a7124b5c6ff121e4d6228029cba374777bd7649cc8598b8", size = 10830426, upload-time = "2026-03-26T18:40:03.702Z" }, + { url = "https://files.pythonhosted.org/packages/f0/51/ab20b322f637b369383adc341d761eaaa0f0203d6b9a7421cd6e783d81b9/ruff-0.15.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:75e5cd06b1cf3f47a3996cfc999226b19aa92e7cce682dcd62f80d7035f98f49", size = 11345125, upload-time = "2026-03-26T18:39:27.799Z" }, + { url = "https://files.pythonhosted.org/packages/37/e6/90b2b33419f59d0f2c4c8a48a4b74b460709a557e8e0064cf33ad894f983/ruff-0.15.8-py3-none-win32.whl", hash = "sha256:bc1f0a51254ba21767bfa9a8b5013ca8149dcf38092e6a9eb704d876de94dc34", size = 10571959, upload-time = "2026-03-26T18:39:36.117Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a2/ef467cb77099062317154c63f234b8a7baf7cb690b99af760c5b68b9ee7f/ruff-0.15.8-py3-none-win_amd64.whl", hash = "sha256:04f79eff02a72db209d47d665ba7ebcad609d8918a134f86cb13dd132159fc89", size = 11743893, upload-time = "2026-03-26T18:39:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/15/e2/77be4fff062fa78d9b2a4dea85d14785dac5f1d0c1fb58ed52331f0ebe28/ruff-0.15.8-py3-none-win_arm64.whl", hash = "sha256:cf891fa8e3bb430c0e7fac93851a5978fc99c8fa2c053b57b118972866f8e5f2", size = 11048175, upload-time = "2026-03-26T18:40:01.06Z" }, ] [[package]] @@ -4067,7 +4061,7 @@ wheels = [ [[package]] name = "strands-agents" -version = "1.32.0" +version = "1.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3" }, @@ -4083,14 +4077,14 @@ dependencies = [ { name = "typing-extensions" }, { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d4/b9/c18d13bbf85c23eb66d9f61bced0c1f5a3ac18916331eabde049f6c4dd33/strands_agents-1.32.0.tar.gz", hash = "sha256:2e399bc5ea98d91dbcdf79913115aa579a6bb3251dfe6c15be114821cad893a4", size = 776171, upload-time = "2026-03-20T14:07:41.75Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/af/76200d7fe69417ebfbf9d3b65c898609a7d74d98d288cce82ca4734591d2/strands_agents-1.33.0.tar.gz", hash = "sha256:1707ae217c2e2700caedafd22ed1d4385cefe90d3debffac4de20cce76cfa676", size = 776194, upload-time = "2026-03-24T19:17:42.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/a9/7e00371130dec1ae16df3fd0f7450aa2b92859b625818c793d178b2f1835/strands_agents-1.32.0-py3-none-any.whl", hash = "sha256:60c0eaae32ee1fc366ecebb10bca07681015104c111472a7378f71bbcdedbba4", size = 387030, upload-time = "2026-03-20T14:07:39.754Z" }, + { url = "https://files.pythonhosted.org/packages/13/99/b3056a03c7d6fb04c1d10afb8fa966b6a5fbce836e264faf663d136f69dd/strands_agents-1.33.0-py3-none-any.whl", hash = "sha256:037406bc86416d2ef3274658faacc35cb62fc5cc13b581d7049796b5e2cb6c33", size = 387070, upload-time = "2026-03-24T19:17:40.697Z" }, ] [[package]] name = "strands-agents-tools" -version = "0.2.23" +version = "0.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -4111,9 +4105,9 @@ dependencies = [ { name = "tzdata", marker = "sys_platform == 'win32'" }, { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/0b/95529cd0bbb0f3186adcae0476f506d3f098b95b552621195f190dcda14b/strands_agents_tools-0.2.23.tar.gz", hash = "sha256:40d0c30859f0cf00c62f5efacdb917ae541c7fe9affceee5adb63ba49657f887", size = 474100, upload-time = "2026-03-19T14:29:29.917Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/ab/078ea5ccab4f97aaa80500de844e1c744e4e77e6f2a4c67f62f10040c6a3/strands_agents_tools-0.3.0.tar.gz", hash = "sha256:10913ca85acb6da36ae05f2507e4f0c88d6b8b8034d1b4db6e93f3f6b7264fd0", size = 476838, upload-time = "2026-03-26T19:25:45.97Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/3c/b5d8586448878e755590bfc59824abc2b137b21b8a8183008e13c8316b13/strands_agents_tools-0.2.23-py3-none-any.whl", hash = "sha256:d98b602002eb978850a32d597f94262dd3a95c6871ab2007fbb0ad165799d8df", size = 312782, upload-time = "2026-03-19T14:29:27.768Z" }, + { url = "https://files.pythonhosted.org/packages/49/42/61dd37e0ad3c8b22cce83ef4c7ee21e8f6f3fc071f5096106090cb8ded60/strands_agents_tools-0.3.0-py3-none-any.whl", hash = "sha256:0d55aae56abfe1e336cdb718027d8780938881135b61b15f1dfbb1fa6e2d7cba", size = 313555, upload-time = "2026-03-26T19:25:44.147Z" }, ] [[package]] @@ -4314,16 +4308,16 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.42.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/ad/4a96c425be6fb67e0621e62d86c402b4a17ab2be7f7c055d9bd2f638b9e2/uvicorn-0.42.0.tar.gz", hash = "sha256:9b1f190ce15a2dd22e7758651d9b6d12df09a13d51ba5bf4fc33c383a48e1775", size = 85393, upload-time = "2026-03-16T06:19:50.077Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/0a/89/f8827ccff89c1586027a105e5630ff6139a64da2515e24dafe860bd9ae4d/uvicorn-0.42.0-py3-none-any.whl", hash = "sha256:96c30f5c7abe6f74ae8900a70e92b85ad6613b745d4879eb9b16ccad15645359", size = 68830, upload-time = "2026-03-16T06:19:48.325Z" }, ] [package.optional-dependencies] diff --git a/codeql-alerts.json b/codeql-alerts.json new file mode 100644 index 00000000..ba235ef4 --- /dev/null +++ b/codeql-alerts.json @@ -0,0 +1,3372 @@ +[ + { + "number": 522, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/shares/service.py", + "start_line": 7, + "end_line": 7, + "message": "Import of 'json' is not used." + }, + { + "number": 508, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 599, + "end_line": 604, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 507, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 520, + "end_line": 525, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 506, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 367, + "end_line": 372, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 505, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 324, + "end_line": 329, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 504, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 276, + "end_line": 281, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 503, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 240, + "end_line": 245, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 502, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly.yml", + "start_line": 198, + "end_line": 203, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 501, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly-deploy-pipeline.yml", + "start_line": 82, + "end_line": 87, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 500, + "rule": "actions/untrusted-checkout/medium", + "severity": "medium", + "description": "Checkout of untrusted code in trusted context", + "file": ".github/workflows/nightly-deploy-pipeline.yml", + "start_line": 51, + "end_line": 56, + "message": "Potential unsafe checkout of untrusted pull request on privileged workflow." + }, + { + "number": 489, + "rule": "py/unused-global-variable", + "severity": "note", + "description": "Unused global variable", + "file": "backend/src/apis/app_api/documents/ingestion/embeddings/bedrock_embeddings.py", + "start_line": 130, + "end_line": 130, + "message": "The global variable '_validate_and_split_chunks' is not used." + }, + { + "number": 488, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/documents/ingestion/embeddings/bedrock_embeddings.py", + "start_line": 16, + "end_line": 22, + "message": "Import of 'BEDROCK_EMBEDDING_CONFIG' is not used." + }, + { + "number": 436, + "rule": "js/comparison-between-incompatible-types", + "severity": "warning", + "description": "Comparison between inconvertible types", + "file": "frontend/ai.client/src/app/session/services/session/session.service.ts", + "start_line": 222, + "end_line": 222, + "message": "Variable 'apiResponse' cannot be of type null, but it is compared to an expression of type null." + }, + { + "number": 435, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/infrastructure-stack.ts", + "start_line": 244, + "end_line": 244, + "message": "Unused variable httpRedirectListener." + }, + { + "number": 434, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/rag-ingestion-stack.ts", + "start_line": 234, + "end_line": 234, + "message": "Unused variable containerImageUri." + }, + { + "number": 433, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/rag-ingestion-stack.ts", + "start_line": 72, + "end_line": 72, + "message": "Unused variable vpc." + }, + { + "number": 432, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/app-api-stack.ts", + "start_line": 75, + "end_line": 75, + "message": "Unused variable alb." + }, + { + "number": 431, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/inference-api-stack.ts", + "start_line": 62, + "end_line": 62, + "message": "Unused variable containerImageUri." + }, + { + "number": 430, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/app-api-stack.ts", + "start_line": 11, + "end_line": 11, + "message": "Unused import kms." + }, + { + "number": 429, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/app-api-stack.ts", + "start_line": 7, + "end_line": 7, + "message": "Unused import secretsmanager." + }, + { + "number": 427, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "infrastructure/lib/frontend-stack.ts", + "start_line": 126, + "end_line": 126, + "message": "Unused variable oac." + }, + { + "number": 426, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/settings/settings.page.ts", + "start_line": 1, + "end_line": 7, + "message": "Unused imports computed, signal." + }, + { + "number": 425, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/settings/oauth-callback/oauth-callback.page.ts", + "start_line": 1, + "end_line": 9, + "message": "Unused import computed." + }, + { + "number": 424, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/settings/connections/services/connections.service.ts", + "start_line": 6, + "end_line": 12, + "message": "Unused import OAuthConnectResponse." + }, + { + "number": 423, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/session/components/message-list/message-list.component.ts", + "start_line": 44, + "end_line": 44, + "message": "Unused variable messageCount." + }, + { + "number": 422, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/memory/memory-dashboard.page.ts", + "start_line": 14, + "end_line": 14, + "message": "Unused import MemoryRecord." + }, + { + "number": 421, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts", + "start_line": 21, + "end_line": 21, + "message": "Unused import merge." + }, + { + "number": 420, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts", + "start_line": 13, + "end_line": 18, + "message": "Unused import ScrollStrategy." + }, + { + "number": 419, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts", + "start_line": 1, + "end_line": 12, + "message": "Unused import effect." + }, + { + "number": 418, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/components/toast/toast.component.ts", + "start_line": 10, + "end_line": 10, + "message": "Unused import ToastMessage." + }, + { + "number": 417, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/components/error-toast/error-toast.component.ts", + "start_line": 2, + "end_line": 2, + "message": "Unused import ErrorMessage." + }, + { + "number": 416, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/assistants/services/assistant.service.ts", + "start_line": 3, + "end_line": 11, + "message": "Unused imports ShareAssistantRequest, UnshareAssistantRequest." + }, + { + "number": 415, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/assistants/assistants.page.ts", + "start_line": 1, + "end_line": 1, + "message": "Unused import signal." + }, + { + "number": 414, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/app.routes.ts", + "start_line": 2, + "end_line": 2, + "message": "Unused import ConversationPage." + }, + { + "number": 413, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/admin/tools/services/admin-tool.service.ts", + "start_line": 6, + "end_line": 15, + "message": "Unused import SetToolRolesRequest." + }, + { + "number": 412, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/admin/tools/pages/tool-form.page.ts", + "start_line": 23, + "end_line": 35, + "message": "Unused imports AdminTool, ToolFormData." + }, + { + "number": 411, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/admin/quota-tiers/services/quota-state.service.ts", + "start_line": 2, + "end_line": 2, + "message": "Unused import toSignal." + }, + { + "number": 410, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/admin/quota-tiers/pages/tier-list/tier-list.component.ts", + "start_line": 6, + "end_line": 6, + "message": "Unused import QuotaTier." + }, + { + "number": 409, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/admin/manage-models/services/managed-models.service.ts", + "start_line": 1, + "end_line": 1, + "message": "Unused import signal." + }, + { + "number": 408, + "rule": "js/unused-local-variable", + "severity": "note", + "description": "Unused variable, import, function or class", + "file": "frontend/ai.client/src/app/admin/costs/admin-costs.page.ts", + "start_line": 17, + "end_line": 20, + "message": "Unused import SummaryCardIcon." + }, + { + "number": 407, + "rule": "js/useless-assignment-to-local", + "severity": "warning", + "description": "Useless assignment to local variable", + "file": "frontend/ai.client/src/index.html", + "start_line": 24, + "end_line": 24, + "message": "The initial value of theme is unused, since it is always overwritten." + }, + { + "number": 406, + "rule": "js/useless-assignment-to-local", + "severity": "warning", + "description": "Useless assignment to local variable", + "file": "frontend/ai.client/src/app/session/services/chat/chat-http.service.ts", + "start_line": 101, + "end_line": 101, + "message": "The value assigned to errorDetail here is unused." + }, + { + "number": 405, + "rule": "js/useless-assignment-to-local", + "severity": "warning", + "description": "Useless assignment to local variable", + "file": "frontend/ai.client/src/app/session/services/chat/chat-http.service.ts", + "start_line": 82, + "end_line": 82, + "message": "The value assigned to errorDetail here is unused." + }, + { + "number": 404, + "rule": "py/commented-out-code", + "severity": "note", + "description": "Commented-out code", + "file": "backend/src/apis/inference_api/chat/models.py", + "start_line": 41, + "end_line": 43, + "message": "This comment appears to contain commented-out code." + }, + { + "number": 403, + "rule": "py/catch-base-exception", + "severity": "note", + "description": "Except block handles 'BaseException'", + "file": "backend/src/agents/local_tools/url_fetcher.py", + "start_line": 118, + "end_line": 118, + "message": "Except block directly handles BaseException." + }, + { + "number": 402, + "rule": "py/catch-base-exception", + "severity": "note", + "description": "Except block handles 'BaseException'", + "file": "backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py", + "start_line": 227, + "end_line": 227, + "message": "Except block directly handles BaseException." + }, + { + "number": 401, + "rule": "py/empty-except", + "severity": "note", + "description": "Empty except", + "file": "backend/src/agents/local_tools/url_fetcher.py", + "start_line": 118, + "end_line": 118, + "message": "'except' clause does nothing but pass and there is no explanatory comment." + }, + { + "number": 400, + "rule": "py/empty-except", + "severity": "note", + "description": "Empty except", + "file": "backend/src/agents/main_agent/streaming/tool_result_processor.py", + "start_line": 94, + "end_line": 94, + "message": "'except' clause does nothing but pass and there is no explanatory comment." + }, + { + "number": 399, + "rule": "py/empty-except", + "severity": "note", + "description": "Empty except", + "file": "backend/src/apis/app_api/admin/users/service.py", + "start_line": 66, + "end_line": 66, + "message": "'except' clause does nothing but pass and there is no explanatory comment." + }, + { + "number": 398, + "rule": "py/empty-except", + "severity": "note", + "description": "Empty except", + "file": "backend/src/agents/main_agent/streaming/event_formatter.py", + "start_line": 43, + "end_line": 43, + "message": "'except' clause does nothing but pass and there is no explanatory comment." + }, + { + "number": 397, + "rule": "py/empty-except", + "severity": "note", + "description": "Empty except", + "file": "backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py", + "start_line": 227, + "end_line": 227, + "message": "'except' clause does nothing but pass and there is no explanatory comment." + }, + { + "number": 396, + "rule": "py/print-during-import", + "severity": "note", + "description": "Use of a print statement at module level", + "file": "backend/src/apis/inference_api/main.py", + "start_line": 24, + "end_line": 24, + "message": "Print statement may execute during import." + }, + { + "number": 395, + "rule": "py/print-during-import", + "severity": "note", + "description": "Use of a print statement at module level", + "file": "backend/src/apis/inference_api/main.py", + "start_line": 22, + "end_line": 22, + "message": "Print statement may execute during import." + }, + { + "number": 394, + "rule": "py/non-iterable-in-for-loop", + "severity": "error", + "description": "Non-iterable used in for loop", + "file": "backend/src/agents/main_agent/quota/repository.py", + "start_line": 309, + "end_line": 309, + "message": "This for-loop may attempt to iterate over a non-iterable instance of class type." + }, + { + "number": 393, + "rule": "py/unreachable-statement", + "severity": "warning", + "description": "Unreachable code", + "file": "backend/src/agents/main_agent/streaming/stream_processor.py", + "start_line": 1294, + "end_line": 1294, + "message": "This statement is unreachable." + }, + { + "number": 392, + "rule": "py/unnecessary-lambda", + "severity": "note", + "description": "Unnecessary lambda", + "file": "backend/src/apis/app_api/fine_tuning/job_repository.py", + "start_line": 221, + "end_line": 221, + "message": "This 'lambda' is just a simple wrapper around a callable object. Use that object directly." + }, + { + "number": 391, + "rule": "py/unnecessary-lambda", + "severity": "note", + "description": "Unnecessary lambda", + "file": "backend/src/apis/app_api/fine_tuning/inference_repository.py", + "start_line": 233, + "end_line": 233, + "message": "This 'lambda' is just a simple wrapper around a callable object. Use that object directly." + }, + { + "number": 389, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/agents/main_agent/streaming/tool_result_processor.py", + "start_line": 295, + "end_line": 295, + "message": "Variable matches is not used." + }, + { + "number": 388, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/admin/services/tool_access.py", + "start_line": 95, + "end_line": 95, + "message": "Variable requested_set is not used." + }, + { + "number": 387, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/documents/ingestion/status.py", + "start_line": 121, + "end_line": 121, + "message": "Variable exception_type is not used." + }, + { + "number": 386, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/files/service.py", + "start_line": 288, + "end_line": 288, + "message": "Variable updated is not used." + }, + { + "number": 385, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 340, + "end_line": 340, + "message": "Variable limit is not used." + }, + { + "number": 384, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 480, + "end_line": 480, + "message": "Variable preferences is not used." + }, + { + "number": 383, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/agents/builtin_tools/code_interpreter_diagram_tool.py", + "start_line": 170, + "end_line": 170, + "message": "Variable execution_output is not used." + }, + { + "number": 382, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 289, + "end_line": 289, + "message": "Variable next_year is not used." + }, + { + "number": 381, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 288, + "end_line": 288, + "message": "Variable next_month is not used." + }, + { + "number": 380, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 286, + "end_line": 286, + "message": "Variable next_year is not used." + }, + { + "number": 379, + "rule": "py/unused-local-variable", + "severity": "note", + "description": "Unused local variable", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 285, + "end_line": 285, + "message": "Variable next_month is not used." + }, + { + "number": 377, + "rule": "py/unused-global-variable", + "severity": "note", + "description": "Unused global variable", + "file": "backend/src/apis/shared/auth/dependencies.py", + "start_line": 64, + "end_line": 64, + "message": "The global variable '_generic_validator_initialized' is not used." + }, + { + "number": 376, + "rule": "py/multiple-definition", + "severity": "warning", + "description": "Variable defined multiple times", + "file": "backend/src/apis/app_api/fine_tuning/routes.py", + "start_line": 723, + "end_line": 723, + "message": "This assignment to 'job' is unnecessary as it is redefined before this value is used." + }, + { + "number": 374, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/local_tools/url_fetcher.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'Optional' is not used." + }, + { + "number": 373, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/local_tools/url_fetcher.py", + "start_line": 6, + "end_line": 6, + "message": "Import of 'json' is not used." + }, + { + "number": 371, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/tools/tool_catalog.py", + "start_line": 7, + "end_line": 7, + "message": "Import of 'field' is not used." + }, + { + "number": 370, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/utils/timezone.py", + "start_line": 15, + "end_line": 15, + "message": "Import of 'pytz' is not used." + }, + { + "number": 369, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/services/tests/test_model_access.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'MagicMock' is not used." + }, + { + "number": 368, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/tests/test_compaction_integration.py", + "start_line": 25, + "end_line": 25, + "message": "Import of 'datetime' is not used." + }, + { + "number": 367, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/tests/test_compaction.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'TurnBasedSessionManager' is not used." + }, + { + "number": 366, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/costs/tests/test_calculator.py", + "start_line": 5, + "end_line": 5, + "message": "Import of 'CostBreakdown' is not used." + }, + { + "number": 365, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/sessions/tests/test_cache_savings.py", + "start_line": 4, + "end_line": 4, + "message": "Import of 'MagicMock' is not used." + }, + { + "number": 364, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/streaming/stream_coordinator.py", + "start_line": 13, + "end_line": 13, + "message": "Import of 'ConversationalErrorEvent' is not used." + }, + { + "number": 363, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/documents/ingestion/status.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'uuid' is not used." + }, + { + "number": 362, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/session_factory.py", + "start_line": 21, + "end_line": 21, + "message": "Import of 'AgentCoreMemorySessionManager' is not used." + }, + { + "number": 361, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/users/service.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'UserProfile' is not used." + }, + { + "number": 360, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/tools/service.py", + "start_line": 17, + "end_line": 27, + "message": "Import of 'AdminToolResponse' is not used." + }, + { + "number": 359, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/tools/service.py", + "start_line": 10, + "end_line": 10, + "message": "Import of 'datetime' is not used." + }, + { + "number": 358, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/tools/service.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'Set' is not used." + }, + { + "number": 357, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/oauth/service.py", + "start_line": 20, + "end_line": 26, + "message": "Import of 'compute_scopes_hash' is not used." + }, + { + "number": 356, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/files/service.py", + "start_line": 17, + "end_line": 29, + "message": "Import of 'UserFileQuota' is not used." + }, + { + "number": 355, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/files/service.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'Tuple' is not used." + }, + { + "number": 354, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/inference_api/chat/service.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'lru_cache' is not used." + }, + { + "number": 353, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/inference_api/chat/service.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'json' is not used." + }, + { + "number": 352, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/scripts/seed_auth_provider.py", + "start_line": 53, + "end_line": 53, + "message": "Import of 'Optional' is not used." + }, + { + "number": 351, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/routes.py", + "start_line": 20, + "end_line": 29, + "message": "Import of 'AvailableModel' is not used." + }, + { + "number": 350, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/s3_service.py", + "start_line": 6, + "end_line": 6, + "message": "Import of 'datetime' is not used." + }, + { + "number": 349, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 28, + "end_line": 28, + "message": "Import of 'ResolvedFileContent' is not used." + }, + { + "number": 348, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 22, + "end_line": 27, + "message": "Import of 'create_error_response' is not used." + }, + { + "number": 347, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 20, + "end_line": 20, + "message": "Import of 'get_current_user' is not used." + }, + { + "number": 346, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 13, + "end_line": 13, + "message": "Import of 'datetime' is not used." + }, + { + "number": 345, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'status' is not used." + }, + { + "number": 344, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/users/routes.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'UserProfile' is not used." + }, + { + "number": 343, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/users/routes.py", + "start_line": 4, + "end_line": 4, + "message": "Import of 'List' is not used." + }, + { + "number": 342, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/auth_providers/routes.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'Optional' is not used." + }, + { + "number": 341, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 43, + "end_line": 46, + "message": "Import of 'ModelAccessService' is not used." + }, + { + "number": 340, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 35, + "end_line": 35, + "message": "Import of 'get_messages' is not used." + }, + { + "number": 339, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 34, + "end_line": 34, + "message": "Import of 'list_user_sessions' is not used." + }, + { + "number": 338, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 33, + "end_line": 33, + "message": "Import of 'require_roles' is not used." + }, + { + "number": 337, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 15, + "end_line": 27, + "message": "Import of 'UserInfo' is not used." + }, + { + "number": 336, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 12, + "end_line": 12, + "message": "Import of 'datetime' is not used." + }, + { + "number": 335, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'ToolCatalogService' is not used." + }, + { + "number": 334, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/chat/routes.py", + "start_line": 34, + "end_line": 39, + "message": "Import of 'ConversationalErrorEvent' is not used." + }, + { + "number": 333, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/chat/routes.py", + "start_line": 23, + "end_line": 23, + "message": "Import of 'ResolvedFileContent' is not used." + }, + { + "number": 332, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/chat/routes.py", + "start_line": 15, + "end_line": 15, + "message": "Import of 'status' is not used." + }, + { + "number": 331, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/chat/routes.py", + "start_line": 13, + "end_line": 13, + "message": "Import of 'AsyncGenerator' is not used." + }, + { + "number": 330, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 22, + "end_line": 22, + "message": "Import of 'get_app_role_service' is not used." + }, + { + "number": 329, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 9, + "end_line": 13, + "message": "Import of 'AppRoleService' is not used." + }, + { + "number": 328, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 4, + "end_line": 4, + "message": "Import of 'Optional' is not used." + }, + { + "number": 327, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/quota/repository.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'uuid' is not used." + }, + { + "number": 326, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/auth/api_keys/repository.py", + "start_line": 20, + "end_line": 20, + "message": "Import of 'List' is not used." + }, + { + "number": 325, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/rbac/repository.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'EffectivePermissions' is not used." + }, + { + "number": 324, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/auth/rbac.py", + "start_line": 3, + "end_line": 3, + "message": "Import of 'List' is not used." + }, + { + "number": 323, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/quota.py", + "start_line": 10, + "end_line": 10, + "message": "Import of 'Decimal' is not used." + }, + { + "number": 322, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/oauth/provider_repository.py", + "start_line": 7, + "end_line": 7, + "message": "Import of 'Dict' is not used." + }, + { + "number": 321, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/preview_session_manager.py", + "start_line": 13, + "end_line": 13, + "message": "Import of 'Message' is not used." + }, + { + "number": 320, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/preview_session_manager.py", + "start_line": 12, + "end_line": 12, + "message": "Import of 'Optional' is not used." + }, + { + "number": 319, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/tools/oauth_tool_service.py", + "start_line": 37, + "end_line": 37, + "message": "Import of 'urlencode' is not used." + }, + { + "number": 318, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/integrations/oauth_auth.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'Awaitable' is not used." + }, + { + "number": 317, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/quota/models.py", + "start_line": 3, + "end_line": 3, + "message": "Import of 'model_serializer' is not used." + }, + { + "number": 316, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/admin/quota/models.py", + "start_line": 5, + "end_line": 11, + "message": "Import of 'QuotaEvent' is not used." + }, + { + "number": 315, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/rbac/models.py", + "start_line": 5, + "end_line": 5, + "message": "Import of 'datetime' is not used." + }, + { + "number": 314, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/auth/api_keys/models.py", + "start_line": 4, + "end_line": 4, + "message": "Import of 'List' is not used." + }, + { + "number": 313, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/auth/api_keys/models.py", + "start_line": 3, + "end_line": 3, + "message": "Import of 'datetime' is not used." + }, + { + "number": 312, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/files/models.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'time' is not used." + }, + { + "number": 311, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/costs/models.py", + "start_line": 4, + "end_line": 4, + "message": "Import of 'Optional' is not used." + }, + { + "number": 310, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/core/model_config.py", + "start_line": 6, + "end_line": 6, + "message": "Import of 'field' is not used." + }, + { + "number": 309, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/core/model_config.py", + "start_line": 5, + "end_line": 5, + "message": "Import of 'Literal' is not used." + }, + { + "number": 308, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/lambda-functions/runtime-provisioner/lambda_function.py", + "start_line": 15, + "end_line": 15, + "message": "Import of 'List' is not used." + }, + { + "number": 307, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/job_repository.py", + "start_line": 5, + "end_line": 5, + "message": "Import of 'uuid' is not used." + }, + { + "number": 306, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/sagemaker_scripts/inference.py", + "start_line": 23, + "end_line": 23, + "message": "Import of 'np' is not used." + }, + { + "number": 305, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/files/file_resolver.py", + "start_line": 13, + "end_line": 13, + "message": "Import of 'TYPE_CHECKING' is not used." + }, + { + "number": 304, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/integrations/external_mcp_client.py", + "start_line": 27, + "end_line": 31, + "message": "Import of 'OAuthBearerAuth' is not used." + }, + { + "number": 303, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/integrations/external_mcp_client.py", + "start_line": 15, + "end_line": 15, + "message": "Import of 'Callable' is not used." + }, + { + "number": 302, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/documents/ingestion/processors/docling_processor.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'Union' is not used." + }, + { + "number": 301, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/documents/ingestion/processors/docling_processor.py", + "start_line": 1, + "end_line": 1, + "message": "Import of 'asyncio' is not used." + }, + { + "number": 300, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/dependencies.py", + "start_line": 12, + "end_line": 12, + "message": "Import of 'ScriptPackagingService' is not used." + }, + { + "number": 299, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/dependencies.py", + "start_line": 11, + "end_line": 11, + "message": "Import of 'InferenceRepository' is not used." + }, + { + "number": 298, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/dependencies.py", + "start_line": 10, + "end_line": 10, + "message": "Import of 'SageMakerService' is not used." + }, + { + "number": 297, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/dependencies.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'FineTuningS3Service' is not used." + }, + { + "number": 296, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/fine_tuning/dependencies.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'FineTuningJobsRepository' is not used." + }, + { + "number": 295, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/utils/config.py", + "start_line": 4, + "end_line": 4, + "message": "Import of 'os' is not used." + }, + { + "number": 294, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/compaction_models.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'datetime' is not used." + }, + { + "number": 293, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/session/compaction_models.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'field' is not used." + }, + { + "number": 292, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/agents/main_agent/quota/checker.py", + "start_line": 8, + "end_line": 8, + "message": "Import of 'QuotaTier' is not used." + }, + { + "number": 291, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/costs/calculator.py", + "start_line": 9, + "end_line": 9, + "message": "Import of 'Optional' is not used." + }, + { + "number": 290, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 6, + "end_line": 6, + "message": "Import of 'Decimal' is not used." + }, + { + "number": 289, + "rule": "py/unused-import", + "severity": "note", + "description": "Unused import", + "file": "backend/src/apis/shared/rbac/admin_service.py", + "start_line": 5, + "end_line": 5, + "message": "Import of 'datetime' is not used." + }, + { + "number": 288, + "rule": "py/cyclic-import", + "severity": "note", + "description": "Cyclic import", + "file": "backend/src/apis/app_api/storage/metadata_storage.py", + "start_line": 173, + "end_line": 173, + "message": "Import of module storage.dynamodb_storage begins an import cycle." + }, + { + "number": 287, + "rule": "py/cyclic-import", + "severity": "note", + "description": "Cyclic import", + "file": "backend/src/apis/app_api/storage/dynamodb_storage.py", + "start_line": 42, + "end_line": 42, + "message": "Import of module storage.metadata_storage begins an import cycle." + }, + { + "number": 286, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 290, + "end_line": 290, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 285, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 287, + "end_line": 287, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 284, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 275, + "end_line": 275, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 283, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 231, + "end_line": 231, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 282, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 227, + "end_line": 227, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 281, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 198, + "end_line": 198, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 280, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 188, + "end_line": 188, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 279, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/services/session_service.py", + "start_line": 181, + "end_line": 181, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 278, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/agents/main_agent/session/session_factory.py", + "start_line": 232, + "end_line": 232, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 277, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/agents/main_agent/session/session_factory.py", + "start_line": 231, + "end_line": 231, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 276, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/agents/main_agent/session/session_factory.py", + "start_line": 103, + "end_line": 103, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 275, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 341, + "end_line": 342, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 274, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 280, + "end_line": 280, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 273, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 252, + "end_line": 252, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 272, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 204, + "end_line": 204, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 271, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 157, + "end_line": 157, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 270, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 147, + "end_line": 147, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 269, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 117, + "end_line": 117, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 268, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/service.py", + "start_line": 94, + "end_line": 94, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 267, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/service.py", + "start_line": 355, + "end_line": 355, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 266, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/service.py", + "start_line": 345, + "end_line": 345, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 265, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/service.py", + "start_line": 295, + "end_line": 295, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 264, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/auth/api_keys/service.py", + "start_line": 78, + "end_line": 78, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 263, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/auth/api_keys/service.py", + "start_line": 76, + "end_line": 76, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 262, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 452, + "end_line": 452, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 261, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 436, + "end_line": 436, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 260, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 396, + "end_line": 396, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 259, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 282, + "end_line": 282, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 258, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 259, + "end_line": 259, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 257, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 140, + "end_line": 140, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 256, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/service.py", + "start_line": 110, + "end_line": 110, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 255, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/service.py", + "start_line": 175, + "end_line": 175, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 254, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/service.py", + "start_line": 151, + "end_line": 151, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 253, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/service.py", + "start_line": 147, + "end_line": 147, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 252, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 636, + "end_line": 637, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 251, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 597, + "end_line": 597, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 250, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 566, + "end_line": 566, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 249, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 530, + "end_line": 530, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 248, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 497, + "end_line": 497, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 247, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 466, + "end_line": 466, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 246, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 422, + "end_line": 422, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 245, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 385, + "end_line": 385, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 244, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 353, + "end_line": 353, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 243, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 320, + "end_line": 320, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 242, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 284, + "end_line": 285, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 241, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 250, + "end_line": 251, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 240, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 211, + "end_line": 211, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 239, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 176, + "end_line": 176, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 238, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 144, + "end_line": 144, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 237, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 113, + "end_line": 113, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 236, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/quota/routes.py", + "start_line": 84, + "end_line": 84, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 235, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 520, + "end_line": 520, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 234, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 507, + "end_line": 507, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 233, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 468, + "end_line": 468, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 232, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 461, + "end_line": 461, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 231, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 458, + "end_line": 458, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 230, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 427, + "end_line": 427, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 229, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 409, + "end_line": 409, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 228, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 407, + "end_line": 407, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 227, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 389, + "end_line": 389, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 226, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 386, + "end_line": 386, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 225, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 379, + "end_line": 379, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 224, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 375, + "end_line": 375, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 223, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 361, + "end_line": 361, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 222, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 351, + "end_line": 351, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 221, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 345, + "end_line": 345, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 220, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 329, + "end_line": 329, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 219, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 306, + "end_line": 306, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 218, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 218, + "end_line": 218, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 217, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 215, + "end_line": 215, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 216, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 214, + "end_line": 214, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 215, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 180, + "end_line": 180, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 214, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 157, + "end_line": 157, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 213, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 103, + "end_line": 103, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 212, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 99, + "end_line": 99, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 211, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/routes.py", + "start_line": 95, + "end_line": 95, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 210, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/costs/routes.py", + "start_line": 105, + "end_line": 105, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 209, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/costs/routes.py", + "start_line": 62, + "end_line": 62, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 208, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/costs/routes.py", + "start_line": 52, + "end_line": 52, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 207, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/users/routes.py", + "start_line": 166, + "end_line": 166, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 206, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/users/routes.py", + "start_line": 114, + "end_line": 114, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 205, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/users/routes.py", + "start_line": 85, + "end_line": 85, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 204, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 416, + "end_line": 416, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 203, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 360, + "end_line": 361, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 202, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 307, + "end_line": 307, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 201, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 263, + "end_line": 263, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 200, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 214, + "end_line": 215, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 199, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 158, + "end_line": 159, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 198, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/costs/routes.py", + "start_line": 93, + "end_line": 93, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 197, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/fine_tuning/routes.py", + "start_line": 191, + "end_line": 191, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 196, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/fine_tuning/routes.py", + "start_line": 169, + "end_line": 169, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 195, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/fine_tuning/routes.py", + "start_line": 144, + "end_line": 144, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 194, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/fine_tuning/routes.py", + "start_line": 118, + "end_line": 119, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 193, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/fine_tuning/routes.py", + "start_line": 98, + "end_line": 98, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 192, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/fine_tuning/routes.py", + "start_line": 75, + "end_line": 75, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 191, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 528, + "end_line": 528, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 190, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 517, + "end_line": 517, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 189, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 506, + "end_line": 506, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 188, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 355, + "end_line": 355, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 187, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 324, + "end_line": 324, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 186, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 173, + "end_line": 173, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 185, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 115, + "end_line": 115, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 184, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 689, + "end_line": 689, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 183, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/sessions/routes.py", + "start_line": 59, + "end_line": 59, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 182, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 649, + "end_line": 649, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 181, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 598, + "end_line": 598, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 180, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 547, + "end_line": 547, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 179, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 497, + "end_line": 497, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 178, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 361, + "end_line": 361, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 177, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/users/routes.py", + "start_line": 169, + "end_line": 169, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 176, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/users/routes.py", + "start_line": 84, + "end_line": 84, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 175, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/auth/api_keys/routes.py", + "start_line": 98, + "end_line": 98, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 174, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 268, + "end_line": 268, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 173, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/routes.py", + "start_line": 123, + "end_line": 123, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 172, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/auth_providers/routes.py", + "start_line": 258, + "end_line": 258, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 171, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/auth_providers/routes.py", + "start_line": 213, + "end_line": 213, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 170, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/auth_providers/routes.py", + "start_line": 183, + "end_line": 183, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 169, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/auth_providers/routes.py", + "start_line": 124, + "end_line": 124, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 168, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/auth_providers/routes.py", + "start_line": 39, + "end_line": 39, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 167, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 372, + "end_line": 372, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 166, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 335, + "end_line": 335, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 165, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 305, + "end_line": 305, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 164, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 275, + "end_line": 275, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 163, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 239, + "end_line": 239, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 162, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 205, + "end_line": 205, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 161, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 160, + "end_line": 160, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 160, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/tools/routes.py", + "start_line": 72, + "end_line": 72, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 159, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/auth/routes.py", + "start_line": 244, + "end_line": 244, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 158, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/auth/routes.py", + "start_line": 112, + "end_line": 112, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 157, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/routes.py", + "start_line": 209, + "end_line": 209, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 156, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/routes.py", + "start_line": 180, + "end_line": 182, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 155, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/routes.py", + "start_line": 132, + "end_line": 132, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 154, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/files/routes.py", + "start_line": 119, + "end_line": 119, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 153, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/routes.py", + "start_line": 482, + "end_line": 482, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 152, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 693, + "end_line": 693, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 151, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 650, + "end_line": 650, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 150, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 606, + "end_line": 606, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 149, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 476, + "end_line": 476, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 148, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 427, + "end_line": 427, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 147, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 382, + "end_line": 382, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 146, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 324, + "end_line": 324, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 145, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 269, + "end_line": 269, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 144, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/assistants/routes.py", + "start_line": 186, + "end_line": 187, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 143, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/routes.py", + "start_line": 358, + "end_line": 358, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 142, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/routes.py", + "start_line": 280, + "end_line": 280, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 141, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/routes.py", + "start_line": 220, + "end_line": 220, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 140, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/routes.py", + "start_line": 149, + "end_line": 149, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 139, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/oauth/routes.py", + "start_line": 280, + "end_line": 280, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 138, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/oauth/routes.py", + "start_line": 235, + "end_line": 235, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 137, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/oauth/routes.py", + "start_line": 218, + "end_line": 218, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 136, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/oauth/routes.py", + "start_line": 182, + "end_line": 183, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 135, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/oauth/routes.py", + "start_line": 159, + "end_line": 159, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 134, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/oauth/routes.py", + "start_line": 83, + "end_line": 83, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 133, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/routes.py", + "start_line": 84, + "end_line": 84, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 132, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/documents/routes.py", + "start_line": 238, + "end_line": 238, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 131, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/documents/routes.py", + "start_line": 235, + "end_line": 235, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 130, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/shared/oauth/routes.py", + "start_line": 255, + "end_line": 255, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 129, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/shared/oauth/routes.py", + "start_line": 183, + "end_line": 183, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 128, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/shared/oauth/routes.py", + "start_line": 137, + "end_line": 137, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 127, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 239, + "end_line": 239, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 126, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 197, + "end_line": 197, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 125, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 155, + "end_line": 155, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 124, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/admin/roles/routes.py", + "start_line": 81, + "end_line": 81, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 123, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/agents/main_agent/session/preview_session_manager.py", + "start_line": 58, + "end_line": 58, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 122, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 472, + "end_line": 472, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 121, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 468, + "end_line": 468, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 120, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 465, + "end_line": 465, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 119, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 461, + "end_line": 461, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 118, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 448, + "end_line": 448, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 117, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 298, + "end_line": 298, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 116, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/memory/services/memory_service.py", + "start_line": 247, + "end_line": 247, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 115, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/fine_tuning/job_repository.py", + "start_line": 128, + "end_line": 128, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 114, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/fine_tuning/inference_repository.py", + "start_line": 128, + "end_line": 128, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 113, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/agents/main_agent/integrations/external_mcp_client.py", + "start_line": 332, + "end_line": 333, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 112, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/agents/main_agent/integrations/external_mcp_client.py", + "start_line": 246, + "end_line": 246, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 111, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/converse_routes.py", + "start_line": 340, + "end_line": 342, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 110, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/converse_routes.py", + "start_line": 128, + "end_line": 128, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 109, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/inference_api/chat/converse_routes.py", + "start_line": 87, + "end_line": 87, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 108, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 48, + "end_line": 48, + "message": "This log entry depends on a user-provided value." + }, + { + "number": 107, + "rule": "py/log-injection", + "severity": "high", + "description": "Log Injection", + "file": "backend/src/apis/app_api/costs/aggregator.py", + "start_line": 37, + "end_line": 37, + "message": "This log entry depends on a user-provided value." + } +] \ No newline at end of file diff --git a/frontend/ai.client/package-lock.json b/frontend/ai.client/package-lock.json index ba105781..c2167245 100644 --- a/frontend/ai.client/package-lock.json +++ b/frontend/ai.client/package-lock.json @@ -1,29 +1,29 @@ { "name": "ai.client", - "version": "1.0.0-beta.19", + "version": "1.0.0-beta.20", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "ai.client", - "version": "1.0.0-beta.19", - "dependencies": { - "@angular/cdk": "21.2.3", - "@angular/common": "21.2.5", - "@angular/compiler": "21.2.5", - "@angular/core": "21.2.5", - "@angular/forms": "21.2.5", - "@angular/platform-browser": "21.2.5", - "@angular/router": "21.2.5", + "version": "1.0.0-beta.20", + "dependencies": { + "@angular/cdk": "21.2.4", + "@angular/common": "21.2.6", + "@angular/compiler": "21.2.6", + "@angular/core": "21.2.6", + "@angular/forms": "21.2.6", + "@angular/platform-browser": "21.2.6", + "@angular/router": "21.2.6", "@ctrl/ngx-emoji-mart": "9.3.0", "@microsoft/fetch-event-source": "2.0.1", - "@ng-icons/core": "33.1.0", - "@ng-icons/heroicons": "33.1.0", + "@ng-icons/core": "33.2.0", + "@ng-icons/heroicons": "33.2.0", "chart.js": "4.5.1", - "katex": "0.16.33", - "marked": "17.0.3", - "mermaid": "11.12.3", - "ng2-charts": "8.0.0", + "katex": "0.16.44", + "marked": "17.0.5", + "mermaid": "11.13.0", + "ng2-charts": "10.0.0", "ngx-markdown": "21.1.0", "prismjs": "1.30.0", "rxjs": "7.8.2", @@ -31,26 +31,21 @@ "uuid": "13.0.0" }, "devDependencies": { - "@angular/build": "21.2.3", - "@angular/cli": "21.2.3", - "@angular/compiler-cli": "21.2.5", - "@tailwindcss/postcss": "4.2.1", - "@vitest/coverage-v8": "4.0.18", - "fast-check": "3.23.2", - "jsdom": "27.4.0", - "postcss": "8.5.6", - "tailwindcss": "4.2.1", + "@analogjs/vite-plugin-angular": "3.0.0-alpha.18", + "@analogjs/vitest-angular": "3.0.0-alpha.18", + "@angular/build": "21.2.5", + "@angular/cli": "21.2.5", + "@angular/compiler-cli": "21.2.6", + "@tailwindcss/postcss": "4.2.2", + "@vitest/coverage-v8": "4.1.2", + "fast-check": "4.6.0", + "jsdom": "29.0.1", + "postcss": "8.5.8", + "tailwindcss": "4.2.2", "typescript": "5.9.3", - "vitest": "4.0.18" + "vitest": "4.1.2" } }, - "node_modules/@acemir/cssom": { - "version": "0.9.31", - "resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.31.tgz", - "integrity": "sha512-ZnR3GSaH+/vJ0YlHau21FjfLYjMpYVIzTD8M8vIEQvIGxeOXyXdzCI140rrCY862p/C/BbzWsjc1dgnM9mkoTA==", - "dev": true, - "license": "MIT" - }, "node_modules/@algolia/abtesting": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.14.1.tgz", @@ -287,14 +282,341 @@ "node": ">=6.0.0" } }, + "node_modules/@analogjs/vite-plugin-angular": { + "version": "3.0.0-alpha.18", + "resolved": "https://registry.npmjs.org/@analogjs/vite-plugin-angular/-/vite-plugin-angular-3.0.0-alpha.18.tgz", + "integrity": "sha512-WVYRQ/cpOPdkAyeRFxgpu0rIzo8yiVV+aJ/dVAw2dOTSo9YcPBIWdtu6yEx3Vy+nA9S3HKErMZ2pvZbknbNySg==", + "dev": true, + "license": "MIT", + "dependencies": { + "oxc-parser": "^0.121.0", + "oxc-resolver": "^11.19.0", + "rolldown": "^1.0.0-rc.11", + "tinyglobby": "^0.2.14" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/brandonroberts" + }, + "peerDependencies": { + "@angular-devkit/build-angular": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^20.0.0 || ^21.0.0", + "@angular/build": "^18.0.0 || ^19.0.0 || ^20.0.0 || ^21.0.0" + }, + "peerDependenciesMeta": { + "@angular-devkit/build-angular": { + "optional": true + }, + "@angular/build": { + "optional": true + } + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@oxc-project/types": { + "version": "0.122.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.122.0.tgz", + "integrity": "sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.12.tgz", + "integrity": "sha512-pv1y2Fv0JybcykuiiD3qBOBdz6RteYojRFY1d+b95WVuzx211CRh+ytI/+9iVyWQ6koTh5dawe4S/yRfOFjgaA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.12.tgz", + "integrity": "sha512-cFYr6zTG/3PXXF3pUO+umXxt1wkRK/0AYT8lDwuqvRC+LuKYWSAQAQZjCWDQpAH172ZV6ieYrNnFzVVcnSflAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.12.tgz", + "integrity": "sha512-ZCsYknnHzeXYps0lGBz8JrF37GpE9bFVefrlmDrAQhOEi4IOIlcoU1+FwHEtyXGx2VkYAvhu7dyBf75EJQffBw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.12.tgz", + "integrity": "sha512-dMLeprcVsyJsKolRXyoTH3NL6qtsT0Y2xeuEA8WQJquWFXkEC4bcu1rLZZSnZRMtAqwtrF/Ib9Ddtpa/Gkge9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.12.tgz", + "integrity": "sha512-YqWjAgGC/9M1lz3GR1r1rP79nMgo3mQiiA+Hfo+pvKFK1fAJ1bCi0ZQVh8noOqNacuY1qIcfyVfP6HoyBRZ85Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.12.tgz", + "integrity": "sha512-/I5AS4cIroLpslsmzXfwbe5OmWvSsrFuEw3mwvbQ1kDxJ822hFHIx+vsN/TAzNVyepI/j/GSzrtCIwQPeKCLIg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.12.tgz", + "integrity": "sha512-V6/wZztnBqlx5hJQqNWwFdxIKN0m38p8Jas+VoSfgH54HSj9tKTt1dZvG6JRHcjh6D7TvrJPWFGaY9UBVOaWPw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.12.tgz", + "integrity": "sha512-RNrafz5bcwRy+O9e6P8Z/OCAJW/A+qtBczIqVYwTs14pf4iV1/+eKEjdOUta93q2TsT/FI0XYDP3TCky38LMAg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.12.tgz", + "integrity": "sha512-Jpw/0iwoKWx3LJ2rc1yjFrj+T7iHZn2JDg1Yny1ma0luviFS4mhAIcd1LFNxK3EYu3DHWCps0ydXQ5i/rrJ2ig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.12.tgz", + "integrity": "sha512-vRugONE4yMfVn0+7lUKdKvN4D5YusEiPilaoO2sgUWpCvrncvWgPMzK00ZFFJuiPgLwgFNP5eSiUlv2tfc+lpA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.12.tgz", + "integrity": "sha512-ykGiLr/6kkiHc0XnBfmFJuCjr5ZYKKofkx+chJWDjitX+KsJuAmrzWhwyOMSHzPhzOHOy7u9HlFoa5MoAOJ/Zg==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.12.tgz", + "integrity": "sha512-5eOND4duWkwx1AzCxadcOrNeighiLwMInEADT0YM7xeEOOFcovWZCq8dadXgcRHSf3Ulh1kFo/qvzoFiCLOL1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.12.tgz", + "integrity": "sha512-PyqoipaswDLAZtot351MLhrlrh6lcZPo2LSYE+VDxbVk24LVKAGOuE4hb8xZQmrPAuEtTZW8E6D2zc5EUZX4Lw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.12.tgz", + "integrity": "sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@analogjs/vite-plugin-angular/node_modules/rolldown": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.12.tgz", + "integrity": "sha512-yP4USLIMYrwpPHEFB5JGH1uxhcslv6/hL0OyvTuY+3qlOSJvZ7ntYnoWpehBxufkgN0cvXxppuTu5hHa/zPh+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "=0.122.0", + "@rolldown/pluginutils": "1.0.0-rc.12" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-rc.12", + "@rolldown/binding-darwin-arm64": "1.0.0-rc.12", + "@rolldown/binding-darwin-x64": "1.0.0-rc.12", + "@rolldown/binding-freebsd-x64": "1.0.0-rc.12", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.12", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.12", + "@rolldown/binding-linux-arm64-musl": "1.0.0-rc.12", + "@rolldown/binding-linux-ppc64-gnu": "1.0.0-rc.12", + "@rolldown/binding-linux-s390x-gnu": "1.0.0-rc.12", + "@rolldown/binding-linux-x64-gnu": "1.0.0-rc.12", + "@rolldown/binding-linux-x64-musl": "1.0.0-rc.12", + "@rolldown/binding-openharmony-arm64": "1.0.0-rc.12", + "@rolldown/binding-wasm32-wasi": "1.0.0-rc.12", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.12", + "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.12" + } + }, + "node_modules/@analogjs/vitest-angular": { + "version": "3.0.0-alpha.18", + "resolved": "https://registry.npmjs.org/@analogjs/vitest-angular/-/vitest-angular-3.0.0-alpha.18.tgz", + "integrity": "sha512-TeYWAJYnFhKiDa67BJ+8aPrUXLJbkufl5M+5KR8O3hMzvWc96OVM7JFB3GaVm5bzxrA+affSPkLBKY9NQwvPuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "oxc-transform": "^0.121.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/brandonroberts" + }, + "peerDependencies": { + "@analogjs/vite-plugin-angular": "*", + "@angular-devkit/architect": ">=0.1500.0 < 0.2200.0", + "@angular-devkit/schematics": ">=17.0.0", + "vitest": "^1.3.1 || ^2.0.0 || ^3.0.0 || ^4.0.0", + "zone.js": ">=0.14.0" + }, + "peerDependenciesMeta": { + "zone.js": { + "optional": true + } + } + }, "node_modules/@angular-devkit/architect": { - "version": "0.2102.3", - "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.2102.3.tgz", - "integrity": "sha512-G4wSWUbtWp1WCKw5GMRqHH8g4m5RBpIyzt8n8IX5Pm6iYe/rwCBSKL3ktEkk7AYMwjtonkRlDtAK1GScFsf1Sg==", + "version": "0.2102.5", + "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.2102.5.tgz", + "integrity": "sha512-9xE7G177R9G9Kte+4AtbEMlEeZUupnvdBUMVBlZRa/n4UDUyAkB/vj58KrzRCCIVQ/ypHVMwUilaDTO484dd+g==", "dev": true, "license": "MIT", "dependencies": { - "@angular-devkit/core": "21.2.3", + "@angular-devkit/core": "21.2.5", "rxjs": "7.8.2" }, "bin": { @@ -307,15 +629,15 @@ } }, "node_modules/@angular-devkit/core": { - "version": "21.2.3", - "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-21.2.3.tgz", - "integrity": "sha512-i++JVHOijyFckjdYqKbSXUpKnvmO2a0Utt/wQVwiLAT0O9H1hR/2NGPzubB4hnLMNSyVWY8diminaF23mZ0xjA==", + "version": "21.2.5", + "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-21.2.5.tgz", + "integrity": "sha512-9z9w7UxKKVmib5QHFZTOfJpAiSudqQwwEZFpQy31yaXR3tJw85xO5owi+66sgTpEvNh9Ix2THhcUq//ToP/0VA==", "license": "MIT", "dependencies": { "ajv": "8.18.0", "ajv-formats": "3.0.1", "jsonc-parser": "3.3.1", - "picomatch": "4.0.3", + "picomatch": "4.0.4", "rxjs": "7.8.2", "source-map": "0.7.6" }, @@ -334,12 +656,12 @@ } }, "node_modules/@angular-devkit/schematics": { - "version": "21.2.3", - "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-21.2.3.tgz", - "integrity": "sha512-tc/bBloRTVIBWGRiMPln1QbW+2QPj+YnWL/nG79abLKWkdrL9dJLcCRXY7dsPNrxOc/QF+8tVpnr8JofhWL9cQ==", + "version": "21.2.5", + "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-21.2.5.tgz", + "integrity": "sha512-gEg84eipTX6lcpNTDVUXBBwp0vs3rXM319Qom+sCLOKBGyqE0mvb1RM1WwfNcyOqeSMQC/vLUwRKqnP0wg1UDg==", "license": "MIT", "dependencies": { - "@angular-devkit/core": "21.2.3", + "@angular-devkit/core": "21.2.5", "jsonc-parser": "3.3.1", "magic-string": "0.30.21", "ora": "9.3.0", @@ -352,14 +674,14 @@ } }, "node_modules/@angular/build": { - "version": "21.2.3", - "resolved": "https://registry.npmjs.org/@angular/build/-/build-21.2.3.tgz", - "integrity": "sha512-u4bhVQruK7KOuHQuoltqlHg+szp0f6rnsGIUolJnT3ez5V6OuSoWIxUorSbvryi2DiKRD/3iwMq7qJN1aN9HCA==", + "version": "21.2.5", + "resolved": "https://registry.npmjs.org/@angular/build/-/build-21.2.5.tgz", + "integrity": "sha512-AfE09K+pkgS3VB84R74XG/XB9LQmO6Q6YfpssjDwMnWGwDGGwUGydXn8AKdhnhI4mM2nFKoe+QYszFgrzu5HeQ==", "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "2.3.0", - "@angular-devkit/architect": "0.2102.3", + "@angular-devkit/architect": "0.2102.5", "@babel/core": "7.29.0", "@babel/helper-annotate-as-pure": "7.27.3", "@babel/helper-split-export-declaration": "7.24.7", @@ -375,14 +697,14 @@ "magic-string": "0.30.21", "mrmime": "2.0.1", "parse5-html-rewriting-stream": "8.0.0", - "picomatch": "4.0.3", + "picomatch": "4.0.4", "piscina": "5.1.4", "rolldown": "1.0.0-rc.4", "sass": "1.97.3", "semver": "7.7.4", "source-map-support": "0.5.21", "tinyglobby": "0.2.15", - "undici": "7.22.0", + "undici": "7.24.4", "vite": "7.3.1", "watchpack": "2.5.1" }, @@ -402,7 +724,7 @@ "@angular/platform-browser": "^21.0.0", "@angular/platform-server": "^21.0.0", "@angular/service-worker": "^21.0.0", - "@angular/ssr": "^21.2.3", + "@angular/ssr": "^21.2.5", "karma": "^6.4.0", "less": "^4.2.0", "ng-packagr": "^21.0.0", @@ -452,9 +774,9 @@ } }, "node_modules/@angular/cdk": { - "version": "21.2.3", - "resolved": "https://registry.npmjs.org/@angular/cdk/-/cdk-21.2.3.tgz", - "integrity": "sha512-7t+UhfbSpIUG9uUyL4b8nI/HyYyrbgAvDwBT8kH4D7If0WiFQhUoottAM0+WZ7Uy+F4nx322K6TOomz/fZJOoQ==", + "version": "21.2.4", + "resolved": "https://registry.npmjs.org/@angular/cdk/-/cdk-21.2.4.tgz", + "integrity": "sha512-Zv+q9Z/wVWTt0ckuO3gnU7PbpCLTr1tKPEsofLGGzDufA5/85aBLn2UiLcjlY6wQ+V3EMqANhGo/8XJgvBEYFA==", "license": "MIT", "dependencies": { "parse5": "^8.0.0", @@ -468,19 +790,19 @@ } }, "node_modules/@angular/cli": { - "version": "21.2.3", - "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-21.2.3.tgz", - "integrity": "sha512-QzDxnSy8AUOz6ca92xfbNuEmRdWRDi1dfFkxDVr+4l6XUnA9X6VmOi7ioCO1I9oDR73LXHybOqkqHBYDlqt/Ag==", + "version": "21.2.5", + "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-21.2.5.tgz", + "integrity": "sha512-nLpyqXQ0s96jC/vR8CsKM3q94/F/nZwtbjM3E6g5lXpKe7cHfJkCfERPexx+jzzYP5JBhtm+u61aH6auu9KYQw==", "dev": true, "license": "MIT", "dependencies": { - "@angular-devkit/architect": "0.2102.3", - "@angular-devkit/core": "21.2.3", - "@angular-devkit/schematics": "21.2.3", + "@angular-devkit/architect": "0.2102.5", + "@angular-devkit/core": "21.2.5", + "@angular-devkit/schematics": "21.2.5", "@inquirer/prompts": "7.10.1", "@listr2/prompt-adapter-inquirer": "3.0.5", "@modelcontextprotocol/sdk": "1.26.0", - "@schematics/angular": "21.2.3", + "@schematics/angular": "21.2.5", "@yarnpkg/lockfile": "1.1.0", "algoliasearch": "5.48.1", "ini": "6.0.0", @@ -503,9 +825,9 @@ } }, "node_modules/@angular/common": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/common/-/common-21.2.5.tgz", - "integrity": "sha512-MTjCbsHBkF9W12CW9yYiTJdVfZv/qCqBCZ2iqhMpDA5G+ZJiTKP0IDTJVrx2N5iHfiJ1lnK719t/9GXROtEAvg==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/common/-/common-21.2.6.tgz", + "integrity": "sha512-2FcpZ1h6AZ4JwCIlnpHCYrbRTGQTOj/RFXkuX/qw7K6cFmJGfWFMmr++xWtHZEvUddfbR9hqDo+v1mkqEKE/Kw==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -514,14 +836,14 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/core": "21.2.5", + "@angular/core": "21.2.6", "rxjs": "^6.5.3 || ^7.4.0" } }, "node_modules/@angular/compiler": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/compiler/-/compiler-21.2.5.tgz", - "integrity": "sha512-QloEsknGqLvmr+ED7QShDt7SoMY9mipV+gVnwn4hBI5sbl+TOBfYWXIaJMnxseFwSqjXTSCVGckfylIlynNcFg==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/compiler/-/compiler-21.2.6.tgz", + "integrity": "sha512-shGkb/aAIPbG8oSYkVJ0msGlRdDVcJBVaUVx2KenMltifQjfLn5N8DFMAzOR6haaA3XeugFExxKqmvySjrVq+A==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -531,9 +853,9 @@ } }, "node_modules/@angular/compiler-cli": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-21.2.5.tgz", - "integrity": "sha512-Ox3vz6KAM7i47ujR/3M3NCOeCRn6vrC9yV1SHZRhSrYg6CWWcOMveavEEwtNjYtn3hOzrktO4CnuVwtDbU8pLg==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-21.2.6.tgz", + "integrity": "sha512-CiPmat4+D+hWXMTAY++09WeII/5D0r6iTjdLdaTq8tlo0uJcrOlazib4CpA94kJ2CRdzfhmC1H+ttwBI1xIlTg==", "dev": true, "license": "MIT", "dependencies": { @@ -554,7 +876,7 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/compiler": "21.2.5", + "@angular/compiler": "21.2.6", "typescript": ">=5.9 <6.1" }, "peerDependenciesMeta": { @@ -564,9 +886,9 @@ } }, "node_modules/@angular/core": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/core/-/core-21.2.5.tgz", - "integrity": "sha512-JgHU134Adb1wrpyGC9ozcv3hiRAgaFTvJFn1u9OU/AVXyxu4meMmVh2hp5QhAvPnv8XQdKWWIkAY+dbpPE6zKA==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/core/-/core-21.2.6.tgz", + "integrity": "sha512-svgK5DhFlQlS+sMybXftn08rHHRiDGY/uIKT5LZUaKgyffnkPb8uClpMIW0NzANtU8qs8pwgDZFoJw85Ia3oqQ==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -575,7 +897,7 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/compiler": "21.2.5", + "@angular/compiler": "21.2.6", "rxjs": "^6.5.3 || ^7.4.0", "zone.js": "~0.15.0 || ~0.16.0" }, @@ -589,9 +911,9 @@ } }, "node_modules/@angular/forms": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/forms/-/forms-21.2.5.tgz", - "integrity": "sha512-pqRuK+a1ZAFZbs8/dZoorFJah2IWaf/SH8axHUpaDJ7fyNrwNEcpczyObdxZ00lOgORpKAhWo/q0hlVS+In8cw==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/forms/-/forms-21.2.6.tgz", + "integrity": "sha512-i8BoWxBAm0g2xOMcQ8wTdj07gqMPIFYIyefCOo0ezcGj5XhYjd+C2UrYnKsup0aMZqqEAO1l2aZbmfHx9xLheQ==", "license": "MIT", "dependencies": { "@standard-schema/spec": "^1.0.0", @@ -601,16 +923,16 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/common": "21.2.5", - "@angular/core": "21.2.5", - "@angular/platform-browser": "21.2.5", + "@angular/common": "21.2.6", + "@angular/core": "21.2.6", + "@angular/platform-browser": "21.2.6", "rxjs": "^6.5.3 || ^7.4.0" } }, "node_modules/@angular/platform-browser": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/platform-browser/-/platform-browser-21.2.5.tgz", - "integrity": "sha512-VuuYguxjgyI4XWuoXrKynmuA3FB991pXbkNhxHeCW0yX+7DGOnGLPF1oierd4/X+IvskmN8foBZLfjyg9u4Ffg==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/platform-browser/-/platform-browser-21.2.6.tgz", + "integrity": "sha512-LW1vPXVHvy71LBahn+fSzPlWQl25kJIdcXq+ptG7HsMVgbPQ3/vvkKXAHYaRdppLGCFL+v+3dQGHYLNLiYL9qg==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -619,9 +941,9 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/animations": "21.2.5", - "@angular/common": "21.2.5", - "@angular/core": "21.2.5" + "@angular/animations": "21.2.6", + "@angular/common": "21.2.6", + "@angular/core": "21.2.6" }, "peerDependenciesMeta": { "@angular/animations": { @@ -630,9 +952,9 @@ } }, "node_modules/@angular/router": { - "version": "21.2.5", - "resolved": "https://registry.npmjs.org/@angular/router/-/router-21.2.5.tgz", - "integrity": "sha512-yQGhTVGvh8OMW3auj13+g+OCSQj7gyBQON/2X4LuCvIUG71NPV6Fqzfk9DKTKaXpqo0FThy8/LPJ0Lsy3CRejg==", + "version": "21.2.6", + "resolved": "https://registry.npmjs.org/@angular/router/-/router-21.2.6.tgz", + "integrity": "sha512-0ajhkKYeOqHQEEH88+Q0HrheR3helwTvdTqD/0gTaapCe+HOoC+SYwmzzsYP2zwAxBNQEg4JHOGKQ30X9/gwgw==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -641,9 +963,9 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/common": "21.2.5", - "@angular/core": "21.2.5", - "@angular/platform-browser": "21.2.5", + "@angular/common": "21.2.6", + "@angular/core": "21.2.6", + "@angular/platform-browser": "21.2.6", "rxjs": "^6.5.3 || ^7.4.0" } }, @@ -661,23 +983,26 @@ } }, "node_modules/@asamuzakjp/css-color": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.2.tgz", - "integrity": "sha512-NfBUvBaYgKIuq6E/RBLY1m0IohzNHAYyaJGuTK79Z23uNwmz2jl1mPsC5ZxCCxylinKhT1Amn5oNTlx1wN8cQg==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-5.1.1.tgz", + "integrity": "sha512-iGWN8E45Ws0XWx3D44Q1t6vX2LqhCKcwfmwBYCDsFrYFS6m4q/Ks61L2veETaLv+ckDC6+dTETJoaAAb7VjLiw==", "dev": true, "license": "MIT", "dependencies": { - "@csstools/css-calc": "^3.0.0", - "@csstools/css-color-parser": "^4.0.1", + "@csstools/css-calc": "^3.1.1", + "@csstools/css-color-parser": "^4.0.2", "@csstools/css-parser-algorithms": "^4.0.0", "@csstools/css-tokenizer": "^4.0.0", - "lru-cache": "^11.2.5" + "lru-cache": "^11.2.7" + }, + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", - "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", + "version": "11.2.7", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.7.tgz", + "integrity": "sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==", "dev": true, "license": "BlueOak-1.0.0", "engines": { @@ -685,23 +1010,26 @@ } }, "node_modules/@asamuzakjp/dom-selector": { - "version": "6.8.1", - "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.8.1.tgz", - "integrity": "sha512-MvRz1nCqW0fsy8Qz4dnLIvhOlMzqDVBabZx6lH+YywFDdjXhMY37SmpV1XFX3JzG5GWHn63j6HX6QPr3lZXHvQ==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-7.0.4.tgz", + "integrity": "sha512-jXR6x4AcT3eIrS2fSNAwJpwirOkGcd+E7F7CP3zjdTqz9B/2huHOL8YJZBgekKwLML+u7qB/6P1LXQuMScsx0w==", "dev": true, "license": "MIT", "dependencies": { "@asamuzakjp/nwsapi": "^2.3.9", "bidi-js": "^1.0.3", - "css-tree": "^3.1.0", + "css-tree": "^3.2.1", "is-potential-custom-element-name": "^1.0.1", - "lru-cache": "^11.2.6" + "lru-cache": "^11.2.7" + }, + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/@asamuzakjp/dom-selector/node_modules/lru-cache": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", - "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", + "version": "11.2.7", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.7.tgz", + "integrity": "sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==", "dev": true, "license": "BlueOak-1.0.0", "engines": { @@ -1024,43 +1352,56 @@ "integrity": "sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==", "license": "MIT" }, - "node_modules/@chevrotain/cst-dts-gen": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.1.1.tgz", - "integrity": "sha512-fRHyv6/f542qQqiRGalrfJl/evD39mAvbJLCekPazhiextEatq1Jx1K/i9gSd5NNO0ds03ek0Cbo/4uVKmOBcw==", - "license": "Apache-2.0", + "node_modules/@bramus/specificity": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/@bramus/specificity/-/specificity-2.4.2.tgz", + "integrity": "sha512-ctxtJ/eA+t+6q2++vj5j7FYX3nRu311q1wfYH3xjlLOsczhlhxAg2FWNUXhpGvAw3BWo1xBcvOV6/YLc2r5FJw==", + "dev": true, + "license": "MIT", "dependencies": { - "@chevrotain/gast": "11.1.1", - "@chevrotain/types": "11.1.1", - "lodash-es": "4.17.23" + "css-tree": "^3.0.0" + }, + "bin": { + "specificity": "bin/cli.js" + } + }, + "node_modules/@chevrotain/cst-dts-gen": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.1.2.tgz", + "integrity": "sha512-XTsjvDVB5nDZBQB8o0o/0ozNelQtn2KrUVteIHSlPd2VAV2utEb6JzyCJaJ8tGxACR4RiBNWy5uYUHX2eji88Q==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/gast": "11.1.2", + "@chevrotain/types": "11.1.2", + "lodash-es": "4.17.23" } }, "node_modules/@chevrotain/gast": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.1.1.tgz", - "integrity": "sha512-Ko/5vPEYy1vn5CbCjjvnSO4U7GgxyGm+dfUZZJIWTlQFkXkyym0jFYrWEU10hyCjrA7rQtiHtBr0EaZqvHFZvg==", + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.1.2.tgz", + "integrity": "sha512-Z9zfXR5jNZb1Hlsd/p+4XWeUFugrHirq36bKzPWDSIacV+GPSVXdk+ahVWZTwjhNwofAWg/sZg58fyucKSQx5g==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/types": "11.1.1", + "@chevrotain/types": "11.1.2", "lodash-es": "4.17.23" } }, "node_modules/@chevrotain/regexp-to-ast": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.1.1.tgz", - "integrity": "sha512-ctRw1OKSXkOrR8VTvOxrQ5USEc4sNrfwXHa1NuTcR7wre4YbjPcKw+82C2uylg/TEwFRgwLmbhlln4qkmDyteg==", + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.1.2.tgz", + "integrity": "sha512-nMU3Uj8naWer7xpZTYJdxbAs6RIv/dxYzkYU8GSwgUtcAAlzjcPfX1w+RKRcYG8POlzMeayOQ/znfwxEGo5ulw==", "license": "Apache-2.0" }, "node_modules/@chevrotain/types": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.1.1.tgz", - "integrity": "sha512-wb2ToxG8LkgPYnKe9FH8oGn3TMCBdnwiuNC5l5y+CtlaVRbCytU0kbVsk6CGrqTL4ZN4ksJa0TXOYbxpbthtqw==", + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.1.2.tgz", + "integrity": "sha512-U+HFai5+zmJCkK86QsaJtoITlboZHBqrVketcO2ROv865xfCMSFpELQoz1GkX5GzME8pTa+3kbKrZHQtI0gdbw==", "license": "Apache-2.0" }, "node_modules/@chevrotain/utils": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.1.1.tgz", - "integrity": "sha512-71eTYMzYXYSFPrbg/ZwftSaSDld7UYlS8OQa3lNnn9jzNtpFbaReRRyghzqS7rI3CDaorqpPJJcXGHK+FE1TVQ==", + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.1.2.tgz", + "integrity": "sha512-4mudFAQ6H+MqBTfqLmU7G1ZwRzCLfJEooL/fsF6rCX5eePMbGhoy5n4g+G4vlh2muDcsCTJtL+uKbOzWxs5LHA==", "license": "Apache-2.0" }, "node_modules/@csstools/color-helpers": { @@ -1159,9 +1500,9 @@ } }, "node_modules/@csstools/css-syntax-patches-for-csstree": { - "version": "1.0.28", - "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.28.tgz", - "integrity": "sha512-1NRf1CUBjnr3K7hu8BLxjQrKCxEe8FP/xmPTenAxCRZWVLbmGotkFvG9mfNpjA6k7Bw1bw4BilZq9cu19RA5pg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.1.2.tgz", + "integrity": "sha512-5GkLzz4prTIpoyeUiIu3iV6CSG3Plo7xRVOFPKI7FVEJ3mZ0A8SwK0XU3Gl7xAkiQ+mDyam+NNp875/C5y+jSA==", "dev": true, "funding": [ { @@ -1173,7 +1514,15 @@ "url": "https://opencollective.com/csstools" } ], - "license": "MIT-0" + "license": "MIT-0", + "peerDependencies": { + "css-tree": "^3.2.1" + }, + "peerDependenciesMeta": { + "css-tree": { + "optional": true + } + } }, "node_modules/@csstools/css-tokenizer": { "version": "4.0.0", @@ -1684,9 +2033,9 @@ } }, "node_modules/@exodus/bytes": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@exodus/bytes/-/bytes-1.14.1.tgz", - "integrity": "sha512-OhkBFWI6GcRMUroChZiopRiSp2iAMvEBK47NhJooDqz1RERO4QuZIZnjP63TXX8GAiLABkYmX+fuQsdJ1dd2QQ==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@exodus/bytes/-/bytes-1.15.0.tgz", + "integrity": "sha512-UY0nlA+feH81UGSHv92sLEPLCeZFjXOuHhrIo0HQydScuQc8s0A7kL/UdgwgDq8g8ilksmuoF35YVTNphV2aBQ==", "dev": true, "license": "MIT", "engines": { @@ -2293,9 +2642,9 @@ ] }, "node_modules/@mermaid-js/parser": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.0.0.tgz", - "integrity": "sha512-vvK0Hi/VWndxoh03Mmz6wa1KDriSPjS2XMZL/1l19HFwygiObEEoEwSDxOqyLzzAI6J2PU3261JjTMTO7x+BPw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.0.1.tgz", + "integrity": "sha512-opmV19kN1JsK0T6HhhokHpcVkqKpF+x2pPDKKM2ThHtZAB5F4PROopk0amuVYK5qMrIA4erzpNm8gmPNJgMDxQ==", "license": "MIT", "dependencies": { "langium": "^4.0.0" @@ -2773,9 +3122,9 @@ } }, "node_modules/@ng-icons/core": { - "version": "33.1.0", - "resolved": "https://registry.npmjs.org/@ng-icons/core/-/core-33.1.0.tgz", - "integrity": "sha512-kNkVt+saULAZp6f84PXmMRbNiiBVdqpM71ajiTAyTqOZCjKxXJe6gnpBfuOd0QvnkxEKSkFuswu4xFYbRPVXUQ==", + "version": "33.2.0", + "resolved": "https://registry.npmjs.org/@ng-icons/core/-/core-33.2.0.tgz", + "integrity": "sha512-BdAzCKZzLKuRPbZLBTmuBuCVI0Fk0WM+sPNzmhWv3PG+yXUKPpRt3O3dCIxqO5djb9S/x50OJkfOJH+IBxLUgg==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -2789,9 +3138,9 @@ } }, "node_modules/@ng-icons/heroicons": { - "version": "33.1.0", - "resolved": "https://registry.npmjs.org/@ng-icons/heroicons/-/heroicons-33.1.0.tgz", - "integrity": "sha512-KjctPPaOoFC08p1KcTt+Htib0vQ9yFv17NK1CGk7If6sJgfSsromMgwV/enG0nISBlzubBaxX/7R9GGrR5iKNw==", + "version": "33.2.0", + "resolved": "https://registry.npmjs.org/@ng-icons/heroicons/-/heroicons-33.2.0.tgz", + "integrity": "sha512-Xde6SghRO8m46OxEWJ21UPH94o7wt6V3j2Y8pZ0igfDsMRMHBpQquQqVwcCvo/d1obSdvvq3YPvus4Z2gJ7TUg==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -2962,57 +3311,1020 @@ "node": ">=20" } }, - "node_modules/@npmcli/promise-spawn/node_modules/which": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", - "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", + "node_modules/@npmcli/promise-spawn/node_modules/which": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", + "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^4.0.0" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/@npmcli/redact": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-4.0.0.tgz", + "integrity": "sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/@npmcli/run-script": { + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.4.tgz", + "integrity": "sha512-mGUWr1uMnf0le2TwfOZY4SFxZGXGfm4Jtay/nwAa2FLNAKXUoUwaGwBMNH36UHPtinWfTSJ3nqFQr0091CxVGg==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/node-gyp": "^5.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "node-gyp": "^12.1.0", + "proc-log": "^6.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/@oxc-parser/binding-android-arm-eabi": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-android-arm-eabi/-/binding-android-arm-eabi-0.121.0.tgz", + "integrity": "sha512-n07FQcySwOlzap424/PLMtOkbS7xOu8nsJduKL8P3COGHKgKoDYXwoAHCbChfgFpHnviehrLWIPX0lKGtbEk/A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-android-arm64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.121.0.tgz", + "integrity": "sha512-/Dd1xIXboYAicw+twT2utxPD7bL8qh7d3ej0qvaYIMj3/EgIrGR+tSnjCUkiCT6g6uTC0neSS4JY8LxhdSU/sA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-darwin-arm64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.121.0.tgz", + "integrity": "sha512-A0jNEvv7QMtCO1yk205t3DWU9sWUjQ2KNF0hSVO5W9R9r/R1BIvzG01UQAfmtC0dQm7sCrs5puixurKSfr2bRQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-darwin-x64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.121.0.tgz", + "integrity": "sha512-SsHzipdxTKUs3I9EOAPmnIimEeJOemqRlRDOp9LIj+96wtxZejF51gNibmoGq8KoqbT1ssAI5po/E3J+vEtXGA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-freebsd-x64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.121.0.tgz", + "integrity": "sha512-v1APOTkCp+RWOIDAHRoaeW/UoaHF15a60E8eUL6kUQXh+i4K7PBwq2Wi7jm8p0ymID5/m/oC1w3W31Z/+r7HQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-arm-gnueabihf": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.121.0.tgz", + "integrity": "sha512-PmqPQuqHZyFVWA4ycr0eu4VnTMmq9laOHZd+8R359w6kzuNZPvmmunmNJ8ybkm769A0nCoVp3TJ6dUz7B3FYIQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-arm-musleabihf": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.121.0.tgz", + "integrity": "sha512-vF24htj+MOH+Q7y9A8NuC6pUZu8t/C2Fr/kDOi2OcNf28oogr2xadBPXAbml802E8wRAVfbta6YLDQTearz+jw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-arm64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.121.0.tgz", + "integrity": "sha512-wjH8cIG2Lu/3d64iZpbYr73hREMgKAfu7fqpXjgM2S16y2zhTfDIp8EQjxO8vlDtKP5Rc7waZW72lh8nZtWrpA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-arm64-musl": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.121.0.tgz", + "integrity": "sha512-qT663J/W8yQFw3dtscbEi9LKJevr20V7uWs2MPGTnvNZ3rm8anhhE16gXGpxDOHeg9raySaSHKhd4IGa3YZvuw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-ppc64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.121.0.tgz", + "integrity": "sha512-mYNe4NhVvDBbPkAP8JaVS8lC1dsoJZWH5WCjpw5E+sjhk1R08wt3NnXYUzum7tIiWPfgQxbCMcoxgeemFASbRw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-riscv64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.121.0.tgz", + "integrity": "sha512-+QiFoGxhAbaI/amqX567784cDyyuZIpinBrJNxUzb+/L2aBRX67mN6Jv40pqduHf15yYByI+K5gUEygCuv0z9w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-riscv64-musl": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.121.0.tgz", + "integrity": "sha512-9ykEgyTa5JD/Uhv2sttbKnCfl2PieUfOjyxJC/oDL2UO0qtXOtjPLl7H8Kaj5G7p3hIvFgu3YWvAxvE0sqY+hQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-s390x-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.121.0.tgz", + "integrity": "sha512-DB1EW5VHZdc1lIRjOI3bW/wV6R6y0xlfvdVrqj6kKi7Ayu2U3UqUBdq9KviVkcUGd5Oq+dROqvUEEFRXGAM7EQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-x64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.121.0.tgz", + "integrity": "sha512-s4lfobX9p4kPTclvMiH3gcQUd88VlnkMTF6n2MTMDAyX5FPNRhhRSFZK05Ykhf8Zy5NibV4PbGR6DnK7FGNN6A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-x64-musl": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.121.0.tgz", + "integrity": "sha512-P9KlyTpuBuMi3NRGpJO8MicuGZfOoqZVRP1WjOecwx8yk4L/+mrCRNc5egSi0byhuReblBF2oVoDSMgV9Bj4Hw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-openharmony-arm64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-openharmony-arm64/-/binding-openharmony-arm64-0.121.0.tgz", + "integrity": "sha512-R+4jrWOfF2OAPPhj3Eb3U5CaKNAH9/btMveMULIrcNW/hjfysFQlF8wE0GaVBr81dWz8JLgQlsxwctoL78JwXw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-wasm32-wasi": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.121.0.tgz", + "integrity": "sha512-5TFISkPTymKvsmIlKasPVTPuWxzCcrT8pM+p77+mtQbIZDd1UC8zww4CJcRI46kolmgrEX6QpKO8AvWMVZ+ifw==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@oxc-parser/binding-win32-arm64-msvc": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.121.0.tgz", + "integrity": "sha512-V0pxh4mql4XTt3aiEtRNUeBAUFOw5jzZNxPABLaOKAWrVzSr9+XUaB095lY7jqMf5t8vkfh8NManGB28zanYKw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-win32-ia32-msvc": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.121.0.tgz", + "integrity": "sha512-4Ob1qvYMPnlF2N9rdmKdkQFdrq16QVcQwBsO8yiPZXof0fHKFF+LmQV501XFbi7lHyrKm8rlJRfQ/M8bZZPVLw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-win32-x64-msvc": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.121.0.tgz", + "integrity": "sha512-BOp1KCzdboB1tPqoCPXgntgFs0jjeSyOXHzgxVFR7B/qfr3F8r4YDacHkTOUNXtDgM8YwKnkf3rE5gwALYX7NA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-project/types": { + "version": "0.113.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.113.0.tgz", + "integrity": "sha512-Tp3XmgxwNQ9pEN9vxgJBAqdRamHibi76iowQ38O2I4PMpcvNRQNVsU2n1x1nv9yh0XoTrGFzf7cZSGxmixxrhA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@oxc-resolver/binding-android-arm-eabi": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.19.1.tgz", + "integrity": "sha512-aUs47y+xyXHUKlbhqHUjBABjvycq6YSD7bpxSW7vplUmdzAlJ93yXY6ZR0c1o1x5A/QKbENCvs3+NlY8IpIVzg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@oxc-resolver/binding-android-arm64": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm64/-/binding-android-arm64-11.19.1.tgz", + "integrity": "sha512-oolbkRX+m7Pq2LNjr/kKgYeC7bRDMVTWPgxBGMjSpZi/+UskVo4jsMU3MLheZV55jL6c3rNelPl4oD60ggYmqA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@oxc-resolver/binding-darwin-arm64": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-arm64/-/binding-darwin-arm64-11.19.1.tgz", + "integrity": "sha512-nUC6d2i3R5B12sUW4O646qD5cnMXf2oBGPLIIeaRfU9doJRORAbE2SGv4eW6rMqhD+G7nf2Y8TTJTLiiO3Q/dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oxc-resolver/binding-darwin-x64": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-x64/-/binding-darwin-x64-11.19.1.tgz", + "integrity": "sha512-cV50vE5+uAgNcFa3QY1JOeKDSkM/9ReIcc/9wn4TavhW/itkDGrXhw9jaKnkQnGbjJ198Yh5nbX/Gr2mr4Z5jQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oxc-resolver/binding-freebsd-x64": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-freebsd-x64/-/binding-freebsd-x64-11.19.1.tgz", + "integrity": "sha512-xZOQiYGFxtk48PBKff+Zwoym7ScPAIVp4c14lfLxizO2LTTTJe5sx9vQNGrBymrf/vatSPNMD4FgsaaRigPkqw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm-gnueabihf": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-11.19.1.tgz", + "integrity": "sha512-lXZYWAC6kaGe/ky2su94e9jN9t6M0/6c+GrSlCqL//XO1cxi5lpAhnJYdyrKfm0ZEr/c7RNyAx3P7FSBcBd5+A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm-musleabihf": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-11.19.1.tgz", + "integrity": "sha512-veG1kKsuK5+t2IsO9q0DErYVSw2azvCVvWHnfTOS73WE0STdLLB7Q1bB9WR+yHPQM76ASkFyRbogWo1GR1+WbQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm64-gnu": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-11.19.1.tgz", + "integrity": "sha512-heV2+jmXyYnUrpUXSPugqWDRpnsQcDm2AX4wzTuvgdlZfoNYO0O3W2AVpJYaDn9AG4JdM6Kxom8+foE7/BcSig==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm64-musl": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-musl/-/binding-linux-arm64-musl-11.19.1.tgz", + "integrity": "sha512-jvo2Pjs1c9KPxMuMPIeQsgu0mOJF9rEb3y3TdpsrqwxRM+AN6/nDDwv45n5ZrUnQMsdBy5gIabioMKnQfWo9ew==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-ppc64-gnu": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-11.19.1.tgz", + "integrity": "sha512-vLmdNxWCdN7Uo5suays6A/+ywBby2PWBBPXctWPg5V0+eVuzsJxgAn6MMB4mPlshskYbppjpN2Zg83ArHze9gQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-riscv64-gnu": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-11.19.1.tgz", + "integrity": "sha512-/b+WgR+VTSBxzgOhDO7TlMXC1ufPIMR6Vj1zN+/x+MnyXGW7prTLzU9eW85Aj7Th7CCEG9ArCbTeqxCzFWdg2w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-riscv64-musl": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-11.19.1.tgz", + "integrity": "sha512-YlRdeWb9j42p29ROh+h4eg/OQ3dTJlpHSa+84pUM9+p6i3djtPz1q55yLJhgW9XfDch7FN1pQ/Vd6YP+xfRIuw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-s390x-gnu": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-11.19.1.tgz", + "integrity": "sha512-EDpafVOQWF8/MJynsjOGFThcqhRHy417sRyLfQmeiamJ8qVhSKAn2Dn2VVKUGCjVB9C46VGjhNo7nOPUi1x6uA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-x64-gnu": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-gnu/-/binding-linux-x64-gnu-11.19.1.tgz", + "integrity": "sha512-NxjZe+rqWhr+RT8/Ik+5ptA3oz7tUw361Wa5RWQXKnfqwSSHdHyrw6IdcTfYuml9dM856AlKWZIUXDmA9kkiBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-x64-musl": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-musl/-/binding-linux-x64-musl-11.19.1.tgz", + "integrity": "sha512-cM/hQwsO3ReJg5kR+SpI69DMfvNCp+A/eVR4b4YClE5bVZwz8rh2Nh05InhwI5HR/9cArbEkzMjcKgTHS6UaNw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-openharmony-arm64": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-openharmony-arm64/-/binding-openharmony-arm64-11.19.1.tgz", + "integrity": "sha512-QF080IowFB0+9Rh6RcD19bdgh49BpQHUW5TajG1qvWHvmrQznTZZjYlgE2ltLXyKY+qs4F/v5xuX1XS7Is+3qA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@oxc-resolver/binding-wasm32-wasi": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-wasm32-wasi/-/binding-wasm32-wasi-11.19.1.tgz", + "integrity": "sha512-w8UCKhX826cP/ZLokXDS6+milN8y4X7zidsAttEdWlVoamTNf6lhBJldaWr3ukTDiye7s4HRcuPEPOXNC432Vg==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@oxc-resolver/binding-win32-arm64-msvc": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-11.19.1.tgz", + "integrity": "sha512-nJ4AsUVZrVKwnU/QRdzPCCrO0TrabBqgJ8pJhXITdZGYOV28TIYystV1VFLbQ7DtAcaBHpocT5/ZJnF78YJPtQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@oxc-resolver/binding-win32-ia32-msvc": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-11.19.1.tgz", + "integrity": "sha512-EW+ND5q2Tl+a3pH81l1QbfgbF3HmqgwLfDfVithRFheac8OTcnbXt/JxqD2GbDkb7xYEqy1zNaVFRr3oeG8npA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@oxc-resolver/binding-win32-x64-msvc": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-x64-msvc/-/binding-win32-x64-msvc-11.19.1.tgz", + "integrity": "sha512-6hIU3RQu45B+VNTY4Ru8ppFwjVS/S5qwYyGhBotmjxfEKk41I2DlGtRfGJndZ5+6lneE2pwloqunlOyZuX/XAw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@oxc-transform/binding-android-arm-eabi": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-android-arm-eabi/-/binding-android-arm-eabi-0.121.0.tgz", + "integrity": "sha512-NNYkyDjTID7oVW0LUZ04kDShtyY6hgsTakd2u3mz/hN765JviCuyBIi5qT9dDOmgX0t1y74nuS7FwiLgaCcZ4g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-android-arm64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-android-arm64/-/binding-android-arm64-0.121.0.tgz", + "integrity": "sha512-zO5az3E5JUmF/k7xOOL9TCipqaVn/d8QHK5T8/bcw6qTWAPVFJjQRK8+5MSmp2ItO2Dmxed5DdWMSxG2NNfA5w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-darwin-arm64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-darwin-arm64/-/binding-darwin-arm64-0.121.0.tgz", + "integrity": "sha512-3vcZdmL8OAdYzXfPDeXrO9KagTgUbXPSFXotoww9N0jVNbdCvSpKJHia1aqdltyevrCWF4KqJyOeeUfGcw7AJw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-darwin-x64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-darwin-x64/-/binding-darwin-x64-0.121.0.tgz", + "integrity": "sha512-R63ZXF4Fuer3FEZYX9UmzIKAENSEYQZTglTkzWoyNPyuHDhSfyJIK+X+wgy2Wc1lTad1XquCUq5SDuRSd37fcQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-freebsd-x64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-freebsd-x64/-/binding-freebsd-x64-0.121.0.tgz", + "integrity": "sha512-0krk8L6iOJ6fobs3f9XHo4RSgEas0yLq9/xGZMuwxFs+rI/rnpYPX+1LLSmreHqeZM77a7r+UF12WjwI1odVUA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-arm-gnueabihf": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.121.0.tgz", + "integrity": "sha512-cNkTaw77UaNiGOCIv2R1kHZ3OkTVlr/059agLCUaeQmZGl76Ad7DrDcDyhC0Iugw0jEdWZ9zeUS5VLmzblnTXQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-arm-musleabihf": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.121.0.tgz", + "integrity": "sha512-eDwTIN0UUCQePgFR41doxorzsxoMoUTbXo6bEbvdFH7P4ZoaUXgHYN10Qjd9K6k0x/bBnU6oC4YPSWYKvQDr9Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-arm64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.121.0.tgz", + "integrity": "sha512-UthSp+L23xeV0lIVloiRDU1d3aOvq0KRif3s6vszeSGnWf69+EVcZcondqLuX9optUhKV0/L8xwe2wLr9WkaDA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-arm64-musl": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.121.0.tgz", + "integrity": "sha512-J5vKUF8Jml1m9Fl48fKp2/wPl8LhGdjJWZ3PrrT+S16SbW7yEKixq5upzO2arhrky5elRYMXWwfi60ex1tBi6g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-ppc64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.121.0.tgz", + "integrity": "sha512-ya+/TL/YH/VcfWeRs95pMIgEj1eQgKg3kR/9AkQgSi8i9jIDEXrgrcQ8cwRYSZ3THlT6cxe3KGJa6vwcHG6JEg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-riscv64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.121.0.tgz", + "integrity": "sha512-XhUBS/6bxL3maLMvkyY5jM23jFCORl+noYc7KkMydpb0Ot08XSu+8c2o7QpGVHWf85eTH/1Tx0aOTrcWek7EAw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-riscv64-musl": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.121.0.tgz", + "integrity": "sha512-kAcZZrU2Wxopcpt38D1u5OeLUwV78EXyOu3VfFNkP/vrMiKB4Tbca8ZxBq+XTkpijuKE4DdCQaLZylsFj7L00w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-s390x-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.121.0.tgz", + "integrity": "sha512-jHyHS+NwPAlUEuY6BzFBDoT4LfSBEW/Ne2FeMzdK8LXOvgHFrJiBf6x8FgekatrTGrDpy1hLiACNnPA81Hs2pQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-x64-gnu": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.121.0.tgz", + "integrity": "sha512-KedV2jkFxeMvUqfh6SgXjCnO5SBZ+SorTUxSBeql7zp59ONZgAcehWAqDX+YWsK8wEpt23Q8ydC/0d6ebJIAzQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-x64-musl": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-x64-musl/-/binding-linux-x64-musl-0.121.0.tgz", + "integrity": "sha512-jFAZwvgjsswiHET2xxxNvxhKCI74yVmewl0F00i3vzt9C088ZVaUvvWlqDS1GRvD4ORBmpJWOYkHdscpIJijEA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-openharmony-arm64": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-openharmony-arm64/-/binding-openharmony-arm64-0.121.0.tgz", + "integrity": "sha512-xn9nxaq31f19PUyGh1xKMOSs8MVPImeaESWNOHtAIznckE+qa5/oHtYALzF3z8uvy1EC/eZODWcHrsYOVNaWug==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-wasm32-wasi": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-wasm32-wasi/-/binding-wasm32-wasi-0.121.0.tgz", + "integrity": "sha512-7lj6FBMX8zLfTqIY4YHHTE/b6oyCzZaUwqi2n9KX4FkgjtBpfmq5KSUgi/I+YiE7JJHu1g8Bd3uWJq1lbehL8Q==", + "cpu": [ + "wasm32" + ], "dev": true, - "license": "ISC", + "license": "MIT", + "optional": true, "dependencies": { - "isexe": "^4.0.0" - }, - "bin": { - "node-which": "bin/which.js" + "@napi-rs/wasm-runtime": "^1.1.1" }, "engines": { - "node": "^20.17.0 || >=22.9.0" + "node": ">=14.0.0" } }, - "node_modules/@npmcli/redact": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-4.0.0.tgz", - "integrity": "sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==", + "node_modules/@oxc-transform/binding-win32-arm64-msvc": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.121.0.tgz", + "integrity": "sha512-+ve3UajNq2ldcCEEmpMVn7Ic3v/qCykPTSx3lZfe0iCW6tisIWvkYiXpf6B5dvwSY7SDyrdt9EyPMS75b41iPA==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "ISC", + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": "^20.17.0 || >=22.9.0" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@npmcli/run-script": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.4.tgz", - "integrity": "sha512-mGUWr1uMnf0le2TwfOZY4SFxZGXGfm4Jtay/nwAa2FLNAKXUoUwaGwBMNH36UHPtinWfTSJ3nqFQr0091CxVGg==", + "node_modules/@oxc-transform/binding-win32-ia32-msvc": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.121.0.tgz", + "integrity": "sha512-9ZUHa4bXWlPRLzbjYsU3VBSvqwSVHAknQlN+nUO1DVu6j958Ui9ux0I9pZHwxb07I26VMdDhd7AjJyz1ZtZlkg==", + "cpu": [ + "ia32" + ], "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/node-gyp": "^5.0.0", - "@npmcli/package-json": "^7.0.0", - "@npmcli/promise-spawn": "^9.0.0", - "node-gyp": "^12.1.0", - "proc-log": "^6.0.0" - }, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": "^20.17.0 || >=22.9.0" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@oxc-project/types": { - "version": "0.113.0", - "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.113.0.tgz", - "integrity": "sha512-Tp3XmgxwNQ9pEN9vxgJBAqdRamHibi76iowQ38O2I4PMpcvNRQNVsU2n1x1nv9yh0XoTrGFzf7cZSGxmixxrhA==", + "node_modules/@oxc-transform/binding-win32-x64-msvc": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.121.0.tgz", + "integrity": "sha512-vV/rzJsmJeeXI1q/xuy93PnoL/IYMwCCyYMX9MmIgMx2a4Lu3vIjUNBLJx1R5CqP/NnvAelsuz05sKlO017FmQ==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/Boshen" + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@parcel/watcher": { @@ -3452,6 +4764,40 @@ "node": "^20.19.0 || >=22.12.0" } }, + "node_modules/@rolldown/binding-linux-ppc64-gnu": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.12.tgz", + "integrity": "sha512-AP3E9BpcUYliZCxa3w5Kwj9OtEVDYK6sVoUzy4vTOJsjPOgdaJZKFmN4oOlX0Wp0RPV2ETfmIra9x1xuayFB7g==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-s390x-gnu": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.12.tgz", + "integrity": "sha512-nWwpvUSPkoFmZo0kQazZYOrT7J5DGOJ/+QHHzjvNlooDZED8oH82Yg67HvehPPLAg5fUff7TfWFHQS8IV1n3og==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, "node_modules/@rolldown/binding-linux-x64-gnu": { "version": "1.0.0-rc.4", "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.4.tgz", @@ -3912,13 +5258,13 @@ ] }, "node_modules/@schematics/angular": { - "version": "21.2.3", - "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-21.2.3.tgz", - "integrity": "sha512-rCEprgpNbJLl9Rm/t92eRYc1eIqD4BAJqB1OO8fzQolyDajCcOBpohjXkuLYSwK9RMyS6f+szNnYGOQawlrPYw==", + "version": "21.2.5", + "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-21.2.5.tgz", + "integrity": "sha512-orOiXcG86t34ejqbkm7ZHEkGfwTU/ySYFgY7BOQdaYFCoNQXxtU87fZoHckJ2xYpVitoKTvbf1bxDDphXb3ycw==", "license": "MIT", "dependencies": { - "@angular-devkit/core": "21.2.3", - "@angular-devkit/schematics": "21.2.3", + "@angular-devkit/core": "21.2.5", + "@angular-devkit/schematics": "21.2.5", "jsonc-parser": "3.3.1" }, "engines": { @@ -4014,49 +5360,49 @@ "license": "MIT" }, "node_modules/@tailwindcss/node": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.1.tgz", - "integrity": "sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.2.tgz", + "integrity": "sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==", "dev": true, "license": "MIT", "dependencies": { "@jridgewell/remapping": "^2.3.5", "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", - "lightningcss": "1.31.1", + "lightningcss": "1.32.0", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", - "tailwindcss": "4.2.1" + "tailwindcss": "4.2.2" } }, "node_modules/@tailwindcss/oxide": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.1.tgz", - "integrity": "sha512-yv9jeEFWnjKCI6/T3Oq50yQEOqmpmpfzG1hcZsAOaXFQPfzWprWrlHSdGPEF3WQTi8zu8ohC9Mh9J470nT5pUw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.2.tgz", + "integrity": "sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==", "dev": true, "license": "MIT", "engines": { "node": ">= 20" }, "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.2.1", - "@tailwindcss/oxide-darwin-arm64": "4.2.1", - "@tailwindcss/oxide-darwin-x64": "4.2.1", - "@tailwindcss/oxide-freebsd-x64": "4.2.1", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.1", - "@tailwindcss/oxide-linux-arm64-gnu": "4.2.1", - "@tailwindcss/oxide-linux-arm64-musl": "4.2.1", - "@tailwindcss/oxide-linux-x64-gnu": "4.2.1", - "@tailwindcss/oxide-linux-x64-musl": "4.2.1", - "@tailwindcss/oxide-wasm32-wasi": "4.2.1", - "@tailwindcss/oxide-win32-arm64-msvc": "4.2.1", - "@tailwindcss/oxide-win32-x64-msvc": "4.2.1" + "@tailwindcss/oxide-android-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-x64": "4.2.2", + "@tailwindcss/oxide-freebsd-x64": "4.2.2", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.2", + "@tailwindcss/oxide-linux-arm64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-arm64-musl": "4.2.2", + "@tailwindcss/oxide-linux-x64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-x64-musl": "4.2.2", + "@tailwindcss/oxide-wasm32-wasi": "4.2.2", + "@tailwindcss/oxide-win32-arm64-msvc": "4.2.2", + "@tailwindcss/oxide-win32-x64-msvc": "4.2.2" } }, "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.1.tgz", - "integrity": "sha512-eZ7G1Zm5EC8OOKaesIKuw77jw++QJ2lL9N+dDpdQiAB/c/B2wDh0QPFHbkBVrXnwNugvrbJFk1gK2SsVjwWReg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.2.tgz", + "integrity": "sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==", "cpu": [ "arm64" ], @@ -4071,9 +5417,9 @@ } }, "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.1.tgz", - "integrity": "sha512-q/LHkOstoJ7pI1J0q6djesLzRvQSIfEto148ppAd+BVQK0JYjQIFSK3JgYZJa+Yzi0DDa52ZsQx2rqytBnf8Hw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.2.tgz", + "integrity": "sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==", "cpu": [ "arm64" ], @@ -4088,9 +5434,9 @@ } }, "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.1.tgz", - "integrity": "sha512-/f/ozlaXGY6QLbpvd/kFTro2l18f7dHKpB+ieXz+Cijl4Mt9AI2rTrpq7V+t04nK+j9XBQHnSMdeQRhbGyt6fw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.2.tgz", + "integrity": "sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==", "cpu": [ "x64" ], @@ -4105,9 +5451,9 @@ } }, "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.1.tgz", - "integrity": "sha512-5e/AkgYJT/cpbkys/OU2Ei2jdETCLlifwm7ogMC7/hksI2fC3iiq6OcXwjibcIjPung0kRtR3TxEITkqgn0TcA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.2.tgz", + "integrity": "sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==", "cpu": [ "x64" ], @@ -4122,9 +5468,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.1.tgz", - "integrity": "sha512-Uny1EcVTTmerCKt/1ZuKTkb0x8ZaiuYucg2/kImO5A5Y/kBz41/+j0gxUZl+hTF3xkWpDmHX+TaWhOtba2Fyuw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.2.tgz", + "integrity": "sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==", "cpu": [ "arm" ], @@ -4139,9 +5485,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.1.tgz", - "integrity": "sha512-CTrwomI+c7n6aSSQlsPL0roRiNMDQ/YzMD9EjcR+H4f0I1SQ8QqIuPnsVp7QgMkC1Qi8rtkekLkOFjo7OlEFRQ==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.2.tgz", + "integrity": "sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==", "cpu": [ "arm64" ], @@ -4156,9 +5502,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.1.tgz", - "integrity": "sha512-WZA0CHRL/SP1TRbA5mp9htsppSEkWuQ4KsSUumYQnyl8ZdT39ntwqmz4IUHGN6p4XdSlYfJwM4rRzZLShHsGAQ==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.2.tgz", + "integrity": "sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==", "cpu": [ "arm64" ], @@ -4173,9 +5519,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.1.tgz", - "integrity": "sha512-qMFzxI2YlBOLW5PhblzuSWlWfwLHaneBE0xHzLrBgNtqN6mWfs+qYbhryGSXQjFYB1Dzf5w+LN5qbUTPhW7Y5g==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.2.tgz", + "integrity": "sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==", "cpu": [ "x64" ], @@ -4190,9 +5536,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.1.tgz", - "integrity": "sha512-5r1X2FKnCMUPlXTWRYpHdPYUY6a1Ar/t7P24OuiEdEOmms5lyqjDRvVY1yy9Rmioh+AunQ0rWiOTPE8F9A3v5g==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.2.tgz", + "integrity": "sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==", "cpu": [ "x64" ], @@ -4207,9 +5553,9 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.1.tgz", - "integrity": "sha512-MGFB5cVPvshR85MTJkEvqDUnuNoysrsRxd6vnk1Lf2tbiqNlXpHYZqkqOQalydienEWOHHFyyuTSYRsLfxFJ2Q==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.2.tgz", + "integrity": "sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==", "bundleDependencies": [ "@napi-rs/wasm-runtime", "@emnapi/core", @@ -4301,9 +5647,9 @@ "optional": true }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.1.tgz", - "integrity": "sha512-YlUEHRHBGnCMh4Nj4GnqQyBtsshUPdiNroZj8VPkvTZSoHsilRCwXcVKnG9kyi0ZFAS/3u+qKHBdDc81SADTRA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.2.tgz", + "integrity": "sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==", "cpu": [ "arm64" ], @@ -4318,9 +5664,9 @@ } }, "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.1.tgz", - "integrity": "sha512-rbO34G5sMWWyrN/idLeVxAZgAKWrn5LiR3/I90Q9MkA67s6T1oB0xtTe+0heoBvHSpbU9Mk7i6uwJnpo4u21XQ==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.2.tgz", + "integrity": "sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==", "cpu": [ "x64" ], @@ -4335,17 +5681,17 @@ } }, "node_modules/@tailwindcss/postcss": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.2.1.tgz", - "integrity": "sha512-OEwGIBnXnj7zJeonOh6ZG9woofIjGrd2BORfvE5p9USYKDCZoQmfqLcfNiRWoJlRWLdNPn2IgVZuWAOM4iTYMw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.2.2.tgz", + "integrity": "sha512-n4goKQbW8RVXIbNKRB/45LzyUqN451deQK0nzIeauVEqjlI49slUlgKYJM2QyUzap/PcpnS7kzSUmPb1sCRvYQ==", "dev": true, "license": "MIT", "dependencies": { "@alloc/quick-lru": "^5.2.0", - "@tailwindcss/node": "4.2.1", - "@tailwindcss/oxide": "4.2.1", + "@tailwindcss/node": "4.2.2", + "@tailwindcss/oxide": "4.2.2", "postcss": "^8.5.6", - "tailwindcss": "4.2.1" + "tailwindcss": "4.2.2" } }, "node_modules/@tufjs/canonical-json": { @@ -4674,6 +6020,16 @@ "license": "MIT", "optional": true }, + "node_modules/@upsetjs/venn.js": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@upsetjs/venn.js/-/venn.js-2.0.0.tgz", + "integrity": "sha512-WbBhLrooyePuQ1VZxrJjtLvTc4NVfpOyKx0sKqioq9bX1C1m7Jgykkn8gLrtwumBioXIqam8DLxp88Adbue6Hw==", + "license": "MIT", + "optionalDependencies": { + "d3-selection": "^3.0.0", + "d3-transition": "^3.0.1" + } + }, "node_modules/@vitejs/plugin-basic-ssl": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@vitejs/plugin-basic-ssl/-/plugin-basic-ssl-2.1.4.tgz", @@ -4688,29 +6044,29 @@ } }, "node_modules/@vitest/coverage-v8": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.18.tgz", - "integrity": "sha512-7i+N2i0+ME+2JFZhfuz7Tg/FqKtilHjGyGvoHYQ6iLV0zahbsJ9sljC9OcFcPDbhYKCet+sG8SsVqlyGvPflZg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.1.2.tgz", + "integrity": "sha512-sPK//PHO+kAkScb8XITeB1bf7fsk85Km7+rt4eeuRR3VS1/crD47cmV5wicisJmjNdfeokTZwjMk4Mj2d58Mgg==", "dev": true, "license": "MIT", "dependencies": { "@bcoe/v8-coverage": "^1.0.2", - "@vitest/utils": "4.0.18", - "ast-v8-to-istanbul": "^0.3.10", + "@vitest/utils": "4.1.2", + "ast-v8-to-istanbul": "^1.0.0", "istanbul-lib-coverage": "^3.2.2", "istanbul-lib-report": "^3.0.1", "istanbul-reports": "^3.2.0", - "magicast": "^0.5.1", + "magicast": "^0.5.2", "obug": "^2.1.1", - "std-env": "^3.10.0", - "tinyrainbow": "^3.0.3" + "std-env": "^4.0.0-rc.1", + "tinyrainbow": "^3.1.0" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "4.0.18", - "vitest": "4.0.18" + "@vitest/browser": "4.1.2", + "vitest": "4.1.2" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -4719,31 +6075,31 @@ } }, "node_modules/@vitest/expect": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.18.tgz", - "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.2.tgz", + "integrity": "sha512-gbu+7B0YgUJ2nkdsRJrFFW6X7NTP44WlhiclHniUhxADQJH5Szt9mZ9hWnJPJ8YwOK5zUOSSlSvyzRf0u1DSBQ==", "dev": true, "license": "MIT", "dependencies": { - "@standard-schema/spec": "^1.0.0", + "@standard-schema/spec": "^1.1.0", "@types/chai": "^5.2.2", - "@vitest/spy": "4.0.18", - "@vitest/utils": "4.0.18", - "chai": "^6.2.1", - "tinyrainbow": "^3.0.3" + "@vitest/spy": "4.1.2", + "@vitest/utils": "4.1.2", + "chai": "^6.2.2", + "tinyrainbow": "^3.1.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/mocker": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.18.tgz", - "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.2.tgz", + "integrity": "sha512-Ize4iQtEALHDttPRCmN+FKqOl2vxTiNUhzobQFFt/BM1lRUTG7zRCLOykG/6Vo4E4hnUdfVLo5/eqKPukcWW7Q==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "4.0.18", + "@vitest/spy": "4.1.2", "estree-walker": "^3.0.3", "magic-string": "^0.30.21" }, @@ -4752,7 +6108,7 @@ }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^6.0.0 || ^7.0.0-0" + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "msw": { @@ -4764,26 +6120,26 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.18.tgz", - "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.2.tgz", + "integrity": "sha512-dwQga8aejqeuB+TvXCMzSQemvV9hNEtDDpgUKDzOmNQayl2OG241PSWeJwKRH3CiC+sESrmoFd49rfnq7T4RnA==", "dev": true, "license": "MIT", "dependencies": { - "tinyrainbow": "^3.0.3" + "tinyrainbow": "^3.1.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/runner": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.18.tgz", - "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.2.tgz", + "integrity": "sha512-Gr+FQan34CdiYAwpGJmQG8PgkyFVmARK8/xSijia3eTFgVfpcpztWLuP6FttGNfPLJhaZVP/euvujeNYar36OQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "4.0.18", + "@vitest/utils": "4.1.2", "pathe": "^2.0.3" }, "funding": { @@ -4791,13 +6147,14 @@ } }, "node_modules/@vitest/snapshot": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.18.tgz", - "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.2.tgz", + "integrity": "sha512-g7yfUmxYS4mNxk31qbOYsSt2F4m1E02LFqO53Xpzg3zKMhLAPZAjjfyl9e6z7HrW6LvUdTwAQR3HHfLjpko16A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "4.0.18", + "@vitest/pretty-format": "4.1.2", + "@vitest/utils": "4.1.2", "magic-string": "^0.30.21", "pathe": "^2.0.3" }, @@ -4806,9 +6163,9 @@ } }, "node_modules/@vitest/spy": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.18.tgz", - "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.2.tgz", + "integrity": "sha512-DU4fBnbVCJGNBwVA6xSToNXrkZNSiw59H8tcuUspVMsBDBST4nfvsPsEHDHGtWRRnqBERBQu7TrTKskmjqTXKA==", "dev": true, "license": "MIT", "funding": { @@ -4816,19 +6173,27 @@ } }, "node_modules/@vitest/utils": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.18.tgz", - "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.2.tgz", + "integrity": "sha512-xw2/TiX82lQHA06cgbqRKFb5lCAy3axQ4H4SoUFhUsg+wztiet+co86IAMDtF6Vm1hc7J6j09oh/rgDn+JdKIQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "4.0.18", - "tinyrainbow": "^3.0.3" + "@vitest/pretty-format": "4.1.2", + "convert-source-map": "^2.0.0", + "tinyrainbow": "^3.1.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, + "node_modules/@vitest/utils/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, "node_modules/@yarnpkg/lockfile": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", @@ -4993,9 +6358,9 @@ } }, "node_modules/ast-v8-to-istanbul": { - "version": "0.3.11", - "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.11.tgz", - "integrity": "sha512-Qya9fkoofMjCBNVdWINMjB5KZvkYfaO9/anwkWnjxibpWUxo5iHl2sOdP7/uAqaRuUYuoo8rDwnbaaKVFxoUvw==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-1.0.0.tgz", + "integrity": "sha512-1fSfIwuDICFA4LKkCzRPO7F0hzFf0B7+Xqrl27ynQaa+Rh0e1Es0v6kWHPott3lU10AyAr7oKHa65OppjLn3Rg==", "dev": true, "license": "MIT", "dependencies": { @@ -5287,16 +6652,16 @@ } }, "node_modules/chevrotain": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.1.1.tgz", - "integrity": "sha512-f0yv5CPKaFxfsPTBzX7vGuim4oIC1/gcS7LUGdBSwl2dU6+FON6LVUksdOo1qJjoUvXNn45urgh8C+0a24pACQ==", + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.1.2.tgz", + "integrity": "sha512-opLQzEVriiH1uUQ4Kctsd49bRoFDXGGSC4GUqj7pGyxM3RehRhvTlZJc1FL/Flew2p5uwxa1tUDWKzI4wNM8pg==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/cst-dts-gen": "11.1.1", - "@chevrotain/gast": "11.1.1", - "@chevrotain/regexp-to-ast": "11.1.1", - "@chevrotain/types": "11.1.1", - "@chevrotain/utils": "11.1.1", + "@chevrotain/cst-dts-gen": "11.1.2", + "@chevrotain/gast": "11.1.2", + "@chevrotain/regexp-to-ast": "11.1.2", + "@chevrotain/types": "11.1.2", + "@chevrotain/utils": "11.1.2", "lodash-es": "4.17.23" } }, @@ -5608,14 +6973,14 @@ } }, "node_modules/css-tree": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz", - "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.2.1.tgz", + "integrity": "sha512-X7sjQzceUhu1u7Y/ylrRZFU2FS6LRiFVp6rKLPg23y3x3c3DOKAwuXGDp+PAGjh6CSnCjYeAul8pcT8bAl+lSA==", "dev": true, "license": "MIT", "dependencies": { - "mdn-data": "2.12.2", - "source-map-js": "^1.0.1" + "mdn-data": "2.27.1", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" @@ -5634,32 +6999,6 @@ "url": "https://github.com/sponsors/fb55" } }, - "node_modules/cssstyle": { - "version": "5.3.7", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.7.tgz", - "integrity": "sha512-7D2EPVltRrsTkhpQmksIu+LxeWAIEk6wRDMJ1qljlv+CKHJM+cJLlfhWIzNA44eAsHXSNe3+vO6DW1yCYx8SuQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@asamuzakjp/css-color": "^4.1.1", - "@csstools/css-syntax-patches-for-csstree": "^1.0.21", - "css-tree": "^3.1.0", - "lru-cache": "^11.2.4" - }, - "engines": { - "node": ">=20" - } - }, - "node_modules/cssstyle/node_modules/lru-cache": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", - "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": "20 || >=22" - } - }, "node_modules/cytoscape": { "version": "3.33.1", "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz", @@ -6171,9 +7510,9 @@ } }, "node_modules/dagre-d3-es": { - "version": "7.0.13", - "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz", - "integrity": "sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==", + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.14.tgz", + "integrity": "sha512-P4rFMVq9ESWqmOgK+dlXvOtLwYg0i7u0HBGJER0LZDJT2VHIPAMZ/riPxqJceWMStH5+E61QxFra9kIS3AqdMg==", "license": "MIT", "dependencies": { "d3": "^7.9.0", @@ -6181,27 +7520,17 @@ } }, "node_modules/data-urls": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-6.0.1.tgz", - "integrity": "sha512-euIQENZg6x8mj3fO6o9+fOW8MimUI4PpD/fZBhJfeioZVy9TUpM4UY7KjQNVZFlqwJ0UdzRDzkycB997HEq1BQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-7.0.0.tgz", + "integrity": "sha512-23XHcCF+coGYevirZceTVD7NdJOqVn+49IHyxgszm+JIiHLoB2TkmPtsYkNWT1pvRSGkc35L6NHs0yHkN2SumA==", "dev": true, "license": "MIT", "dependencies": { "whatwg-mimetype": "^5.0.0", - "whatwg-url": "^15.1.0" + "whatwg-url": "^16.0.0" }, "engines": { - "node": ">=20" - } - }, - "node_modules/data-urls/node_modules/whatwg-mimetype": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-5.0.0.tgz", - "integrity": "sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=20" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/dayjs": { @@ -6236,9 +7565,9 @@ "license": "MIT" }, "node_modules/delaunator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", - "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.1.0.tgz", + "integrity": "sha512-AGrQ4QSgssa1NGmWmLPqN5NY2KajF5MqxetNEO+o0n3ZwZZeTmt7bBnvzHWrmkZFxGgr4HdyFgelzgi06otLuQ==", "license": "ISC", "dependencies": { "robust-predicates": "^3.0.2" @@ -6393,9 +7722,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.19.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz", - "integrity": "sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==", + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.1.tgz", + "integrity": "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==", "dev": true, "license": "MIT", "dependencies": { @@ -6470,9 +7799,9 @@ } }, "node_modules/es-module-lexer": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz", + "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==", "dev": true, "license": "MIT" }, @@ -6489,6 +7818,16 @@ "node": ">= 0.4" } }, + "node_modules/es-toolkit": { + "version": "1.45.1", + "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.45.1.tgz", + "integrity": "sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==", + "license": "MIT", + "workspaces": [ + "docs", + "benchmarks" + ] + }, "node_modules/esbuild": { "version": "0.27.3", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", @@ -6660,13 +7999,13 @@ } }, "node_modules/express-rate-limit": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.2.1.tgz", - "integrity": "sha512-PCZEIEIxqwhzw4KF0n7QF4QqruVTcF73O5kFKUnGOyjbCCgizBBiFaYpd/fnBLUMPw/BWw9OsiN7GgrNYr7j6g==", + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.3.1.tgz", + "integrity": "sha512-D1dKN+cmyPWuvB+G2SREQDzPY1agpBIcTa9sJxOPMCNeH3gwzhqJRDWCXW3gg0y//+LQ/8j52JbMROWyrKdMdw==", "dev": true, "license": "MIT", "dependencies": { - "ip-address": "10.0.1" + "ip-address": "10.1.0" }, "engines": { "node": ">= 16" @@ -6679,9 +8018,9 @@ } }, "node_modules/fast-check": { - "version": "3.23.2", - "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.23.2.tgz", - "integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==", + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-4.6.0.tgz", + "integrity": "sha512-h7H6Dm0Fy+H4ciQYFxFjXnXkzR2kr9Fb22c0UBpHnm59K2zpr2t13aPTHlltFiNT6zuxp6HMPAVVvgur4BLdpA==", "dev": true, "funding": [ { @@ -6695,10 +8034,10 @@ ], "license": "MIT", "dependencies": { - "pure-rand": "^6.1.0" + "pure-rand": "^8.0.0" }, "engines": { - "node": ">=8.0.0" + "node": ">=12.17.0" } }, "node_modules/fast-deep-equal": { @@ -7195,9 +8534,9 @@ } }, "node_modules/ip-address": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz", - "integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", + "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", "dev": true, "license": "MIT", "engines": { @@ -7384,35 +8723,36 @@ "license": "MIT" }, "node_modules/jsdom": { - "version": "27.4.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.4.0.tgz", - "integrity": "sha512-mjzqwWRD9Y1J1KUi7W97Gja1bwOOM5Ug0EZ6UDK3xS7j7mndrkwozHtSblfomlzyB4NepioNt+B2sOSzczVgtQ==", + "version": "29.0.1", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-29.0.1.tgz", + "integrity": "sha512-z6JOK5gRO7aMybVq/y/MlIpKh8JIi68FBKMUtKkK2KH/wMSRlCxQ682d08LB9fYXplyY/UXG8P4XXTScmdjApg==", "dev": true, "license": "MIT", "dependencies": { - "@acemir/cssom": "^0.9.28", - "@asamuzakjp/dom-selector": "^6.7.6", - "@exodus/bytes": "^1.6.0", - "cssstyle": "^5.3.4", - "data-urls": "^6.0.0", + "@asamuzakjp/css-color": "^5.0.1", + "@asamuzakjp/dom-selector": "^7.0.3", + "@bramus/specificity": "^2.4.2", + "@csstools/css-syntax-patches-for-csstree": "^1.1.1", + "@exodus/bytes": "^1.15.0", + "css-tree": "^3.2.1", + "data-urls": "^7.0.0", "decimal.js": "^10.6.0", "html-encoding-sniffer": "^6.0.0", - "http-proxy-agent": "^7.0.2", - "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", + "lru-cache": "^11.2.7", "parse5": "^8.0.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", - "tough-cookie": "^6.0.0", + "tough-cookie": "^6.0.1", + "undici": "^7.24.5", "w3c-xmlserializer": "^5.0.0", - "webidl-conversions": "^8.0.0", - "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^15.1.0", - "ws": "^8.18.3", + "webidl-conversions": "^8.0.1", + "whatwg-mimetype": "^5.0.0", + "whatwg-url": "^16.0.1", "xml-name-validator": "^5.0.0" }, "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" + "node": "^20.19.0 || ^22.13.0 || >=24.0.0" }, "peerDependencies": { "canvas": "^3.0.0" @@ -7423,6 +8763,16 @@ } } }, + "node_modules/jsdom/node_modules/lru-cache": { + "version": "11.2.7", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.7.tgz", + "integrity": "sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, "node_modules/jsesc": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", @@ -7489,9 +8839,9 @@ "license": "MIT" }, "node_modules/katex": { - "version": "0.16.33", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.33.tgz", - "integrity": "sha512-q3N5u+1sY9Bu7T4nlXoiRBXWfwSefNGoKeOwekV+gw0cAXQlz2Ww6BLcmBxVDeXBMUDQv6fK5bcNaJLxob3ZQA==", + "version": "0.16.44", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.44.tgz", + "integrity": "sha512-EkxoDTk8ufHqHlf9QxGwcxeLkWRR3iOuYfRpfORgYfqc8s13bgb+YtRY59NK5ZpRaCwq1kqA6a5lpX8C/eLphQ==", "funding": [ "https://opencollective.com/katex", "https://github.com/sponsors/katex" @@ -7533,9 +8883,9 @@ "license": "MIT" }, "node_modules/lightningcss": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.31.1.tgz", - "integrity": "sha512-l51N2r93WmGUye3WuFoN5k10zyvrVs0qfKBhyC5ogUQ6Ew6JUSswh78mbSO+IU3nTWsyOArqPCcShdQSadghBQ==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", "dev": true, "license": "MPL-2.0", "dependencies": { @@ -7549,23 +8899,23 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "lightningcss-android-arm64": "1.31.1", - "lightningcss-darwin-arm64": "1.31.1", - "lightningcss-darwin-x64": "1.31.1", - "lightningcss-freebsd-x64": "1.31.1", - "lightningcss-linux-arm-gnueabihf": "1.31.1", - "lightningcss-linux-arm64-gnu": "1.31.1", - "lightningcss-linux-arm64-musl": "1.31.1", - "lightningcss-linux-x64-gnu": "1.31.1", - "lightningcss-linux-x64-musl": "1.31.1", - "lightningcss-win32-arm64-msvc": "1.31.1", - "lightningcss-win32-x64-msvc": "1.31.1" + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" } }, "node_modules/lightningcss-android-arm64": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.31.1.tgz", - "integrity": "sha512-HXJF3x8w9nQ4jbXRiNppBCqeZPIAfUo8zE/kOEGbW5NZvGc/K7nMxbhIr+YlFlHW5mpbg/YFPdbnCh1wAXCKFg==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", "cpu": [ "arm64" ], @@ -7584,9 +8934,9 @@ } }, "node_modules/lightningcss-darwin-arm64": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.31.1.tgz", - "integrity": "sha512-02uTEqf3vIfNMq3h/z2cJfcOXnQ0GRwQrkmPafhueLb2h7mqEidiCzkE4gBMEH65abHRiQvhdcQ+aP0D0g67sg==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", "cpu": [ "arm64" ], @@ -7605,9 +8955,9 @@ } }, "node_modules/lightningcss-darwin-x64": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.31.1.tgz", - "integrity": "sha512-1ObhyoCY+tGxtsz1lSx5NXCj3nirk0Y0kB/g8B8DT+sSx4G9djitg9ejFnjb3gJNWo7qXH4DIy2SUHvpoFwfTA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", "cpu": [ "x64" ], @@ -7626,9 +8976,9 @@ } }, "node_modules/lightningcss-freebsd-x64": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.31.1.tgz", - "integrity": "sha512-1RINmQKAItO6ISxYgPwszQE1BrsVU5aB45ho6O42mu96UiZBxEXsuQ7cJW4zs4CEodPUioj/QrXW1r9pLUM74A==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", "cpu": [ "x64" ], @@ -7647,9 +8997,9 @@ } }, "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.31.1.tgz", - "integrity": "sha512-OOCm2//MZJ87CdDK62rZIu+aw9gBv4azMJuA8/KB74wmfS3lnC4yoPHm0uXZ/dvNNHmnZnB8XLAZzObeG0nS1g==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", "cpu": [ "arm" ], @@ -7668,9 +9018,9 @@ } }, "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.31.1.tgz", - "integrity": "sha512-WKyLWztD71rTnou4xAD5kQT+982wvca7E6QoLpoawZ1gP9JM0GJj4Tp5jMUh9B3AitHbRZ2/H3W5xQmdEOUlLg==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", "cpu": [ "arm64" ], @@ -7689,9 +9039,9 @@ } }, "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.31.1.tgz", - "integrity": "sha512-mVZ7Pg2zIbe3XlNbZJdjs86YViQFoJSpc41CbVmKBPiGmC4YrfeOyz65ms2qpAobVd7WQsbW4PdsSJEMymyIMg==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", "cpu": [ "arm64" ], @@ -7710,9 +9060,9 @@ } }, "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.31.1.tgz", - "integrity": "sha512-xGlFWRMl+0KvUhgySdIaReQdB4FNudfUTARn7q0hh/V67PVGCs3ADFjw+6++kG1RNd0zdGRlEKa+T13/tQjPMA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", "cpu": [ "x64" ], @@ -7731,9 +9081,9 @@ } }, "node_modules/lightningcss-linux-x64-musl": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.31.1.tgz", - "integrity": "sha512-eowF8PrKHw9LpoZii5tdZwnBcYDxRw2rRCyvAXLi34iyeYfqCQNA9rmUM0ce62NlPhCvof1+9ivRaTY6pSKDaA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", "cpu": [ "x64" ], @@ -7752,9 +9102,9 @@ } }, "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.31.1.tgz", - "integrity": "sha512-aJReEbSEQzx1uBlQizAOBSjcmr9dCdL3XuC/6HLXAxmtErsj2ICo5yYggg1qOODQMtnjNQv2UHb9NpOuFtYe4w==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", "cpu": [ "arm64" ], @@ -7773,9 +9123,9 @@ } }, "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.31.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.31.1.tgz", - "integrity": "sha512-I9aiFrbd7oYHwlnQDqr1Roz+fTz61oDDJX7n9tYF9FJymH1cIN1DtKw3iYt6b8WZgEjoNwVSncwF4wx/ZedMhw==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", "cpu": [ "x64" ], @@ -8026,9 +9376,9 @@ } }, "node_modules/marked": { - "version": "17.0.3", - "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.3.tgz", - "integrity": "sha512-jt1v2ObpyOKR8p4XaUJVk3YWRJ5n+i4+rjQopxvV32rSndTJXvIzuUdWWIy/1pFQMkQmvTXawzDNqOH/CUmx6A==", + "version": "17.0.5", + "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.5.tgz", + "integrity": "sha512-6hLvc0/JEbRjRgzI6wnT2P1XuM1/RrrDEX0kPt0N7jGm1133g6X7DlxFasUIx+72aKAr904GTxhSLDrd5DIlZg==", "license": "MIT", "bin": { "marked": "bin/marked.js" @@ -8048,9 +9398,9 @@ } }, "node_modules/mdn-data": { - "version": "2.12.2", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", - "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", + "version": "2.27.1", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.27.1.tgz", + "integrity": "sha512-9Yubnt3e8A0OKwxYSXyhLymGW4sCufcLG6VdiDdUGVkPhpqLxlvP5vl1983gQjJl3tqbrM731mjaZaP68AgosQ==", "dev": true, "license": "CC0-1.0" }, @@ -8078,27 +9428,28 @@ } }, "node_modules/mermaid": { - "version": "11.12.3", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.3.tgz", - "integrity": "sha512-wN5ZSgJQIC+CHJut9xaKWsknLxaFBwCPwPkGTSUYrTiHORWvpT8RxGk849HPnpUAQ+/9BPRqYb80jTpearrHzQ==", + "version": "11.13.0", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.13.0.tgz", + "integrity": "sha512-fEnci+Immw6lKMFI8sqzjlATTyjLkRa6axrEgLV2yHTfv8r+h1wjFbV6xeRtd4rUV1cS4EpR9rwp3Rci7TRWDw==", "license": "MIT", "dependencies": { "@braintree/sanitize-url": "^7.1.1", - "@iconify/utils": "^3.0.1", - "@mermaid-js/parser": "^1.0.0", + "@iconify/utils": "^3.0.2", + "@mermaid-js/parser": "^1.0.1", "@types/d3": "^7.4.3", - "cytoscape": "^3.29.3", + "@upsetjs/venn.js": "^2.0.0", + "cytoscape": "^3.33.1", "cytoscape-cose-bilkent": "^4.1.0", "cytoscape-fcose": "^2.2.0", "d3": "^7.9.0", "d3-sankey": "^0.12.3", - "dagre-d3-es": "7.0.13", - "dayjs": "^1.11.18", - "dompurify": "^3.2.5", - "katex": "^0.16.22", + "dagre-d3-es": "7.0.14", + "dayjs": "^1.11.19", + "dompurify": "^3.3.1", + "katex": "^0.16.25", "khroma": "^2.1.0", "lodash-es": "^4.17.23", - "marked": "^16.2.1", + "marked": "^16.3.0", "roughjs": "^4.6.6", "stylis": "^4.3.6", "ts-dedent": "^2.2.0", @@ -8421,19 +9772,19 @@ } }, "node_modules/ng2-charts": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/ng2-charts/-/ng2-charts-8.0.0.tgz", - "integrity": "sha512-nofsNHI2Zt+EAwT+BJBVg0kgOhNo9ukO4CxULlaIi7VwZSr7I1km38kWSoU41Oq6os6qqIh5srnL+CcV+RFPFA==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/ng2-charts/-/ng2-charts-10.0.0.tgz", + "integrity": "sha512-mdL75XJrk/0s0YO2ySPQpAHPja85ECDEGNWFlcElJiy/bYliTNGEpeCtctAqZuozTff/E2CwGjyfPFM1ScP2og==", "license": "MIT", "dependencies": { - "lodash-es": "^4.17.15", + "es-toolkit": "^1.39.7", "tslib": "^2.3.0" }, "peerDependencies": { - "@angular/cdk": ">=19.0.0", - "@angular/common": ">=19.0.0", - "@angular/core": ">=19.0.0", - "@angular/platform-browser": ">=19.0.0", + "@angular/cdk": ">=21.0.0", + "@angular/common": ">=21.0.0", + "@angular/core": ">=21.0.0", + "@angular/platform-browser": ">=21.0.0", "chart.js": "^3.4.0 || ^4.0.0", "rxjs": "^6.5.3 || ^7.4.0" } @@ -8777,6 +10128,121 @@ "license": "MIT", "optional": true }, + "node_modules/oxc-parser": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/oxc-parser/-/oxc-parser-0.121.0.tgz", + "integrity": "sha512-ek9o58+SCv6AV7nchiAcUJy1DNE2CC5WRdBcO0mF+W4oRjNQfPO7b3pLjTHSFECpHkKGOZSQxx3hk8viIL5YCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "^0.121.0" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/sponsors/Boshen" + }, + "optionalDependencies": { + "@oxc-parser/binding-android-arm-eabi": "0.121.0", + "@oxc-parser/binding-android-arm64": "0.121.0", + "@oxc-parser/binding-darwin-arm64": "0.121.0", + "@oxc-parser/binding-darwin-x64": "0.121.0", + "@oxc-parser/binding-freebsd-x64": "0.121.0", + "@oxc-parser/binding-linux-arm-gnueabihf": "0.121.0", + "@oxc-parser/binding-linux-arm-musleabihf": "0.121.0", + "@oxc-parser/binding-linux-arm64-gnu": "0.121.0", + "@oxc-parser/binding-linux-arm64-musl": "0.121.0", + "@oxc-parser/binding-linux-ppc64-gnu": "0.121.0", + "@oxc-parser/binding-linux-riscv64-gnu": "0.121.0", + "@oxc-parser/binding-linux-riscv64-musl": "0.121.0", + "@oxc-parser/binding-linux-s390x-gnu": "0.121.0", + "@oxc-parser/binding-linux-x64-gnu": "0.121.0", + "@oxc-parser/binding-linux-x64-musl": "0.121.0", + "@oxc-parser/binding-openharmony-arm64": "0.121.0", + "@oxc-parser/binding-wasm32-wasi": "0.121.0", + "@oxc-parser/binding-win32-arm64-msvc": "0.121.0", + "@oxc-parser/binding-win32-ia32-msvc": "0.121.0", + "@oxc-parser/binding-win32-x64-msvc": "0.121.0" + } + }, + "node_modules/oxc-parser/node_modules/@oxc-project/types": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.121.0.tgz", + "integrity": "sha512-CGtOARQb9tyv7ECgdAlFxi0Fv7lmzvmlm2rpD/RdijOO9rfk/JvB1CjT8EnoD+tjna/IYgKKw3IV7objRb+aYw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/oxc-resolver": { + "version": "11.19.1", + "resolved": "https://registry.npmjs.org/oxc-resolver/-/oxc-resolver-11.19.1.tgz", + "integrity": "sha512-qE/CIg/spwrTBFt5aKmwe3ifeDdLfA2NESN30E42X/lII5ClF8V7Wt6WIJhcGZjp0/Q+nQ+9vgxGk//xZNX2hg==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + }, + "optionalDependencies": { + "@oxc-resolver/binding-android-arm-eabi": "11.19.1", + "@oxc-resolver/binding-android-arm64": "11.19.1", + "@oxc-resolver/binding-darwin-arm64": "11.19.1", + "@oxc-resolver/binding-darwin-x64": "11.19.1", + "@oxc-resolver/binding-freebsd-x64": "11.19.1", + "@oxc-resolver/binding-linux-arm-gnueabihf": "11.19.1", + "@oxc-resolver/binding-linux-arm-musleabihf": "11.19.1", + "@oxc-resolver/binding-linux-arm64-gnu": "11.19.1", + "@oxc-resolver/binding-linux-arm64-musl": "11.19.1", + "@oxc-resolver/binding-linux-ppc64-gnu": "11.19.1", + "@oxc-resolver/binding-linux-riscv64-gnu": "11.19.1", + "@oxc-resolver/binding-linux-riscv64-musl": "11.19.1", + "@oxc-resolver/binding-linux-s390x-gnu": "11.19.1", + "@oxc-resolver/binding-linux-x64-gnu": "11.19.1", + "@oxc-resolver/binding-linux-x64-musl": "11.19.1", + "@oxc-resolver/binding-openharmony-arm64": "11.19.1", + "@oxc-resolver/binding-wasm32-wasi": "11.19.1", + "@oxc-resolver/binding-win32-arm64-msvc": "11.19.1", + "@oxc-resolver/binding-win32-ia32-msvc": "11.19.1", + "@oxc-resolver/binding-win32-x64-msvc": "11.19.1" + } + }, + "node_modules/oxc-transform": { + "version": "0.121.0", + "resolved": "https://registry.npmjs.org/oxc-transform/-/oxc-transform-0.121.0.tgz", + "integrity": "sha512-Kf243wJU/vWF/ThV+ZyfLMQIrViVFRSyYO7UPKpZMMPGGMzxxcHgsNGWy0Uy+pcXD78+jdUnxVTR9rYT73Qw3A==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/sponsors/Boshen" + }, + "optionalDependencies": { + "@oxc-transform/binding-android-arm-eabi": "0.121.0", + "@oxc-transform/binding-android-arm64": "0.121.0", + "@oxc-transform/binding-darwin-arm64": "0.121.0", + "@oxc-transform/binding-darwin-x64": "0.121.0", + "@oxc-transform/binding-freebsd-x64": "0.121.0", + "@oxc-transform/binding-linux-arm-gnueabihf": "0.121.0", + "@oxc-transform/binding-linux-arm-musleabihf": "0.121.0", + "@oxc-transform/binding-linux-arm64-gnu": "0.121.0", + "@oxc-transform/binding-linux-arm64-musl": "0.121.0", + "@oxc-transform/binding-linux-ppc64-gnu": "0.121.0", + "@oxc-transform/binding-linux-riscv64-gnu": "0.121.0", + "@oxc-transform/binding-linux-riscv64-musl": "0.121.0", + "@oxc-transform/binding-linux-s390x-gnu": "0.121.0", + "@oxc-transform/binding-linux-x64-gnu": "0.121.0", + "@oxc-transform/binding-linux-x64-musl": "0.121.0", + "@oxc-transform/binding-openharmony-arm64": "0.121.0", + "@oxc-transform/binding-wasm32-wasi": "0.121.0", + "@oxc-transform/binding-win32-arm64-msvc": "0.121.0", + "@oxc-transform/binding-win32-ia32-msvc": "0.121.0", + "@oxc-transform/binding-win32-x64-msvc": "0.121.0" + } + }, "node_modules/p-map": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.4.tgz", @@ -8971,9 +10437,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "license": "MIT", "engines": { "node": ">=12" @@ -9033,9 +10499,9 @@ } }, "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", "dev": true, "funding": [ { @@ -9153,9 +10619,9 @@ } }, "node_modules/pure-rand": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", - "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-8.4.0.tgz", + "integrity": "sha512-IoM8YF/jY0hiugFo/wOWqfmarlE6J0wc6fDK1PhftMk7MGhVZl88sZimmqBBFomLOCSmcCCpsfj7wXASCpvK9A==", "dev": true, "funding": [ { @@ -9275,9 +10741,9 @@ "license": "MIT" }, "node_modules/robust-predicates": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", - "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.3.tgz", + "integrity": "sha512-NS3levdsRIUOmiJ8FZWCP7LG3QpJyrs/TE0Zpf1yvZu8cAJJ6QMW92H1c7kWpdIHo8RvmLxN/o2JXTKHp74lUA==", "license": "Unlicense" }, "node_modules/rolldown": { @@ -9835,9 +11301,9 @@ } }, "node_modules/std-env": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", - "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-4.0.0.tgz", + "integrity": "sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ==", "dev": true, "license": "MIT" }, @@ -9911,16 +11377,16 @@ "license": "MIT" }, "node_modules/tailwindcss": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.1.tgz", - "integrity": "sha512-/tBrSQ36vCleJkAOsy9kbNTgaxvGbyOamC30PRePTQe/o1MFwEKHQk4Cn7BNGaPtjp+PuUrByJehM1hgxfq4sw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.2.tgz", + "integrity": "sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==", "dev": true, "license": "MIT" }, "node_modules/tapable": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.2.tgz", + "integrity": "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==", "dev": true, "license": "MIT", "engines": { @@ -9999,9 +11465,9 @@ } }, "node_modules/tinyrainbow": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz", - "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.1.0.tgz", + "integrity": "sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==", "dev": true, "license": "MIT", "engines": { @@ -10009,22 +11475,22 @@ } }, "node_modules/tldts": { - "version": "7.0.23", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.23.tgz", - "integrity": "sha512-ASdhgQIBSay0R/eXggAkQ53G4nTJqTXqC2kbaBbdDwM7SkjyZyO0OaaN1/FH7U/yCeqOHDwFO5j8+Os/IS1dXw==", + "version": "7.0.27", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.27.tgz", + "integrity": "sha512-I4FZcVFcqCRuT0ph6dCDpPuO4Xgzvh+spkcTr1gK7peIvxWauoloVO0vuy1FQnijT63ss6AsHB6+OIM4aXHbPg==", "dev": true, "license": "MIT", "dependencies": { - "tldts-core": "^7.0.23" + "tldts-core": "^7.0.27" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.23", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.23.tgz", - "integrity": "sha512-0g9vrtDQLrNIiCj22HSe9d4mLVG3g5ph5DZ8zCKBr4OtrspmNB6ss7hVyzArAeE88ceZocIEGkyW1Ime7fxPtQ==", + "version": "7.0.27", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.27.tgz", + "integrity": "sha512-YQ7uPjgWUibIK6DW5lrKujGwUKhLevU4hcGbP5O6TcIUb+oTjJYJVWPS4nZsIHrEEEG6myk/oqAJUEQmpZrHsg==", "dev": true, "license": "MIT" }, @@ -10039,9 +11505,9 @@ } }, "node_modules/tough-cookie": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz", - "integrity": "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.1.tgz", + "integrity": "sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -10289,31 +11755,31 @@ } }, "node_modules/vitest": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.18.tgz", - "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.2.tgz", + "integrity": "sha512-xjR1dMTVHlFLh98JE3i/f/WePqJsah4A0FK9cc8Ehp9Udk0AZk6ccpIZhh1qJ/yxVWRZ+Q54ocnD8TXmkhspGg==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "4.0.18", - "@vitest/mocker": "4.0.18", - "@vitest/pretty-format": "4.0.18", - "@vitest/runner": "4.0.18", - "@vitest/snapshot": "4.0.18", - "@vitest/spy": "4.0.18", - "@vitest/utils": "4.0.18", - "es-module-lexer": "^1.7.0", - "expect-type": "^1.2.2", + "@vitest/expect": "4.1.2", + "@vitest/mocker": "4.1.2", + "@vitest/pretty-format": "4.1.2", + "@vitest/runner": "4.1.2", + "@vitest/snapshot": "4.1.2", + "@vitest/spy": "4.1.2", + "@vitest/utils": "4.1.2", + "es-module-lexer": "^2.0.0", + "expect-type": "^1.3.0", "magic-string": "^0.30.21", "obug": "^2.1.1", "pathe": "^2.0.3", "picomatch": "^4.0.3", - "std-env": "^3.10.0", + "std-env": "^4.0.0-rc.1", "tinybench": "^2.9.0", "tinyexec": "^1.0.2", "tinyglobby": "^0.2.15", - "tinyrainbow": "^3.0.3", - "vite": "^6.0.0 || ^7.0.0", + "tinyrainbow": "^3.1.0", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0", "why-is-node-running": "^2.3.0" }, "bin": { @@ -10329,12 +11795,13 @@ "@edge-runtime/vm": "*", "@opentelemetry/api": "^1.9.0", "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", - "@vitest/browser-playwright": "4.0.18", - "@vitest/browser-preview": "4.0.18", - "@vitest/browser-webdriverio": "4.0.18", - "@vitest/ui": "4.0.18", + "@vitest/browser-playwright": "4.1.2", + "@vitest/browser-preview": "4.1.2", + "@vitest/browser-webdriverio": "4.1.2", + "@vitest/ui": "4.1.2", "happy-dom": "*", - "jsdom": "*" + "jsdom": "*", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "@edge-runtime/vm": { @@ -10363,6 +11830,9 @@ }, "jsdom": { "optional": true + }, + "vite": { + "optional": false } } }, @@ -10461,27 +11931,28 @@ } }, "node_modules/whatwg-mimetype": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", - "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-5.0.0.tgz", + "integrity": "sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==", "dev": true, "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20" } }, "node_modules/whatwg-url": { - "version": "15.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-15.1.0.tgz", - "integrity": "sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==", + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-16.0.1.tgz", + "integrity": "sha512-1to4zXBxmXHV3IiSSEInrreIlu02vUOvrhxJJH5vcxYTBDAx51cqZiKdyTxlecdKNSjj8EcxGBxNf6Vg+945gw==", "dev": true, "license": "MIT", "dependencies": { + "@exodus/bytes": "^1.11.0", "tr46": "^6.0.0", - "webidl-conversions": "^8.0.0" + "webidl-conversions": "^8.0.1" }, "engines": { - "node": ">=20" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/which": { @@ -10610,28 +12081,6 @@ "dev": true, "license": "ISC" }, - "node_modules/ws": { - "version": "8.19.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", - "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/xml-name-validator": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", diff --git a/frontend/ai.client/package.json b/frontend/ai.client/package.json index 544b0604..5b6183b8 100644 --- a/frontend/ai.client/package.json +++ b/frontend/ai.client/package.json @@ -1,6 +1,6 @@ { "name": "ai.client", - "version": "1.0.0-beta.19", + "version": "1.0.0-beta.20", "scripts": { "ng": "ng", "start": "ng serve", @@ -24,22 +24,22 @@ "private": true, "packageManager": "npm@11.2.0", "dependencies": { - "@angular/cdk": "21.2.3", - "@angular/common": "21.2.5", - "@angular/compiler": "21.2.5", - "@angular/core": "21.2.5", - "@angular/forms": "21.2.5", - "@angular/platform-browser": "21.2.5", - "@angular/router": "21.2.5", + "@angular/cdk": "21.2.4", + "@angular/common": "21.2.6", + "@angular/compiler": "21.2.6", + "@angular/core": "21.2.6", + "@angular/forms": "21.2.6", + "@angular/platform-browser": "21.2.6", + "@angular/router": "21.2.6", "@ctrl/ngx-emoji-mart": "9.3.0", "@microsoft/fetch-event-source": "2.0.1", - "@ng-icons/core": "33.1.0", - "@ng-icons/heroicons": "33.1.0", + "@ng-icons/core": "33.2.0", + "@ng-icons/heroicons": "33.2.0", "chart.js": "4.5.1", - "katex": "0.16.33", - "marked": "17.0.3", - "mermaid": "11.12.3", - "ng2-charts": "8.0.0", + "katex": "0.16.44", + "marked": "17.0.5", + "mermaid": "11.13.0", + "ng2-charts": "10.0.0", "ngx-markdown": "21.1.0", "prismjs": "1.30.0", "rxjs": "7.8.2", @@ -47,19 +47,22 @@ "uuid": "13.0.0" }, "devDependencies": { - "@angular/build": "21.2.3", - "@angular/cli": "21.2.3", - "@angular/compiler-cli": "21.2.5", - "@tailwindcss/postcss": "4.2.1", - "@vitest/coverage-v8": "4.0.18", - "fast-check": "3.23.2", - "jsdom": "27.4.0", - "postcss": "8.5.6", - "tailwindcss": "4.2.1", + "@analogjs/vite-plugin-angular": "3.0.0-alpha.18", + "@analogjs/vitest-angular": "3.0.0-alpha.18", + "@angular/build": "21.2.5", + "@angular/cli": "21.2.5", + "@angular/compiler-cli": "21.2.6", + "@tailwindcss/postcss": "4.2.2", + "@vitest/coverage-v8": "4.1.2", + "fast-check": "4.6.0", + "jsdom": "29.0.1", + "postcss": "8.5.8", + "tailwindcss": "4.2.2", "typescript": "5.9.3", - "vitest": "4.0.18" + "vitest": "4.1.2" }, "overrides": { - "undici": ">=7.24.0" + "undici": ">=7.24.0", + "picomatch": ">=4.0.4" } } diff --git a/frontend/ai.client/src/app/admin/admin.page.ts b/frontend/ai.client/src/app/admin/admin.page.ts index ec7fcd58..dba672ef 100644 --- a/frontend/ai.client/src/app/admin/admin.page.ts +++ b/frontend/ai.client/src/app/admin/admin.page.ts @@ -121,6 +121,12 @@ export class AdminPage { icon: 'heroAcademicCap', route: '/admin/fine-tuning', }, + { + title: 'Fine-Tuning Costs', + description: 'View per-user GPU compute costs, hours used, and job counts for fine-tuning. Drill into monthly breakdowns.', + icon: 'heroChartBar', + route: '/admin/fine-tuning/costs', + }, { title: 'Quota Tiers', description: 'Create and manage quota tiers with cost limits and soft limit configurations. Define monthly/daily limits and warning thresholds.', diff --git a/frontend/ai.client/src/app/admin/costs/admin-costs.page.ts b/frontend/ai.client/src/app/admin/costs/admin-costs.page.ts index 858649a9..106ebd48 100644 --- a/frontend/ai.client/src/app/admin/costs/admin-costs.page.ts +++ b/frontend/ai.client/src/app/admin/costs/admin-costs.page.ts @@ -16,7 +16,6 @@ import { AdminCostStateService } from './services'; import { PeriodSelectorComponent } from './components/period-selector.component'; import { SystemSummaryCardComponent, - SummaryCardIcon, } from './components/system-summary-card.component'; import { TopUsersTableComponent } from './components/top-users-table.component'; import { CostTrendsChartComponent } from './components/cost-trends-chart.component'; diff --git a/frontend/ai.client/src/app/admin/fine-tuning-access/models/fine-tuning-access.models.ts b/frontend/ai.client/src/app/admin/fine-tuning-access/models/fine-tuning-access.models.ts index 7d90bba6..2301bd51 100644 --- a/frontend/ai.client/src/app/admin/fine-tuning-access/models/fine-tuning-access.models.ts +++ b/frontend/ai.client/src/app/admin/fine-tuning-access/models/fine-tuning-access.models.ts @@ -13,3 +13,23 @@ export interface AccessListResponse { grants: FineTuningGrant[]; total_count: number; } + +/** Per-user cost breakdown for a billing period. */ +export interface UserCostBreakdown { + email: string; + total_cost_usd: number; + total_gpu_hours: number; + training_job_count: number; + inference_job_count: number; +} + +/** Aggregated cost dashboard response. */ +export interface FineTuningCostDashboard { + period: string; + total_cost_usd: number; + total_gpu_hours: number; + active_user_count: number; + training_job_count: number; + inference_job_count: number; + users: UserCostBreakdown[]; +} diff --git a/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-http.service.ts b/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-http.service.ts index 2684c2fd..3210dd43 100644 --- a/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-http.service.ts +++ b/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-http.service.ts @@ -1,8 +1,12 @@ import { Injectable, inject, computed } from '@angular/core'; -import { HttpClient } from '@angular/common/http'; +import { HttpClient, HttpParams } from '@angular/common/http'; import { Observable } from 'rxjs'; import { ConfigService } from '../../../services/config.service'; -import { AccessListResponse, FineTuningGrant } from '../models/fine-tuning-access.models'; +import { + AccessListResponse, + FineTuningGrant, + FineTuningCostDashboard, +} from '../models/fine-tuning-access.models'; /** * HTTP service for admin fine-tuning access management API. @@ -43,4 +47,13 @@ export class FineTuningAdminHttpService { `${this.baseUrl()}/access/${encodeURIComponent(email)}`, ); } + + /** Get aggregated cost dashboard for a billing period. */ + getCostDashboard(month?: string): Observable { + let params = new HttpParams(); + if (month) { + params = params.set('month', month); + } + return this.http.get(`${this.baseUrl()}/costs`, { params }); + } } diff --git a/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-state.service.ts b/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-state.service.ts index d0de6e49..003c2adc 100644 --- a/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-state.service.ts +++ b/frontend/ai.client/src/app/admin/fine-tuning-access/services/fine-tuning-admin-state.service.ts @@ -1,7 +1,7 @@ import { Injectable, inject, signal, computed } from '@angular/core'; import { firstValueFrom } from 'rxjs'; import { FineTuningAdminHttpService } from './fine-tuning-admin-http.service'; -import { FineTuningGrant } from '../models/fine-tuning-access.models'; +import { FineTuningGrant, FineTuningCostDashboard } from '../models/fine-tuning-access.models'; /** * State service for admin fine-tuning access management. @@ -98,4 +98,42 @@ export class FineTuningAdminStateService { clearError(): void { this.error.set(null); } + + // ========== Cost Dashboard ========== + + /** Cost dashboard data for the selected period. */ + readonly costDashboard = signal(null); + + /** Whether cost data is loading. */ + readonly costLoading = signal(false); + + /** Selected billing period (YYYY-MM). */ + readonly costPeriod = signal(this.getCurrentPeriod()); + + /** Fetch cost dashboard for a given period. */ + async loadCostDashboard(month?: string): Promise { + const period = month ?? this.costPeriod(); + this.costLoading.set(true); + this.error.set(null); + try { + const data = await firstValueFrom(this.http.getCostDashboard(period)); + this.costDashboard.set(data); + } catch (err: unknown) { + const message = err instanceof Error ? err.message : 'Failed to load cost data'; + this.error.set(message); + } finally { + this.costLoading.set(false); + } + } + + /** Update the selected cost period and reload. */ + setCostPeriod(period: string): void { + this.costPeriod.set(period); + this.loadCostDashboard(period); + } + + private getCurrentPeriod(): string { + const now = new Date(); + return `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}`; + } } diff --git a/frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.html b/frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.html new file mode 100644 index 00000000..dc634d59 --- /dev/null +++ b/frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.html @@ -0,0 +1,281 @@ +
+
+ + + + Back to Admin + + + +
+
+

+ Fine-Tuning Costs +

+

+ Monitor GPU compute costs and usage across all fine-tuning users. +

+
+ + +
+ + + +
+
+ + @if (loading()) { + +
+
+
+ Loading cost data +
+

+ Loading cost data... +

+
+
+ } @else if (error()) { + + + } @else if (dashboard(); as data) { + +
+ +
+
+
+

Total Cost

+

+ {{ formatCurrency(data.total_cost_usd) }} +

+
+
+ +
+
+
+ + +
+
+
+

GPU Hours

+

+ {{ formatHours(data.total_gpu_hours) }}h +

+
+
+ +
+
+
+ + +
+
+
+

Active Users

+

+ {{ data.active_user_count }} +

+
+
+ +
+
+
+ + +
+
+
+

Avg Cost / User

+

+ {{ formatCurrency(avgCostPerUser()) }} +

+
+
+ +
+
+
+
+ + +
+
+
Training Jobs
+
{{ data.training_job_count }}
+
+
+
Inference Jobs
+
{{ data.inference_job_count }}
+
+
+ + +
+

Cost by User

+ + @if (data.users.length === 0) { +
+

No billable jobs found for this period.

+
+ } @else { +
+ + + + + + + + + + + + + @for (user of sortedUsers(); track user.email) { + + + + + + + + + } + +
+ + + + + + + + + + + + Share + +
+ {{ user.email }} + + {{ formatCurrency(user.total_cost_usd) }} + + {{ formatHours(user.total_gpu_hours) }}h + + {{ user.training_job_count }} + + {{ user.inference_job_count }} + +
+
+
+
+ + {{ costBarWidth(user) }}% + +
+
+
+ } +
+ } +
+
diff --git a/frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.ts b/frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.ts new file mode 100644 index 00000000..24ecacfc --- /dev/null +++ b/frontend/ai.client/src/app/admin/fine-tuning-costs/fine-tuning-costs.page.ts @@ -0,0 +1,134 @@ +import { + ChangeDetectionStrategy, + Component, + computed, + inject, + OnInit, + signal, +} from '@angular/core'; +import { RouterLink } from '@angular/router'; +import { FormsModule } from '@angular/forms'; +import { NgIcon, provideIcons } from '@ng-icons/core'; +import { + heroArrowLeft, + heroCurrencyDollar, + heroClock, + heroUsers, + heroCpuChip, + heroChevronDown, + heroChevronUp, + heroCalendar, +} from '@ng-icons/heroicons/outline'; +import { FineTuningAdminStateService } from '../fine-tuning-access/services/fine-tuning-admin-state.service'; +import { UserCostBreakdown } from '../fine-tuning-access/models/fine-tuning-access.models'; + +type SortField = 'email' | 'total_cost_usd' | 'total_gpu_hours' | 'training_job_count' | 'inference_job_count'; + +@Component({ + selector: 'app-fine-tuning-costs-page', + imports: [RouterLink, FormsModule, NgIcon], + providers: [ + provideIcons({ + heroArrowLeft, + heroCurrencyDollar, + heroClock, + heroUsers, + heroCpuChip, + heroChevronDown, + heroChevronUp, + heroCalendar, + }), + ], + changeDetection: ChangeDetectionStrategy.OnPush, + templateUrl: './fine-tuning-costs.page.html', + host: { class: 'block' }, +}) +export class FineTuningCostsPage implements OnInit { + readonly state = inject(FineTuningAdminStateService); + + readonly sortField = signal('total_cost_usd'); + readonly sortAsc = signal(false); + + readonly dashboard = this.state.costDashboard; + readonly loading = this.state.costLoading; + readonly error = this.state.error; + readonly selectedPeriod = this.state.costPeriod; + + readonly periodOptions = computed(() => { + const options: { value: string; label: string }[] = []; + const now = new Date(); + for (let i = 0; i < 12; i++) { + const date = new Date(now.getFullYear(), now.getMonth() - i, 1); + const value = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}`; + const label = date.toLocaleDateString('en-US', { month: 'long', year: 'numeric' }); + options.push({ value, label }); + } + return options; + }); + + readonly sortedUsers = computed(() => { + const data = this.dashboard(); + if (!data) return []; + const users = [...data.users]; + const field = this.sortField(); + const asc = this.sortAsc(); + users.sort((a, b) => { + const aVal = a[field]; + const bVal = b[field]; + if (typeof aVal === 'string' && typeof bVal === 'string') { + return asc ? aVal.localeCompare(bVal) : bVal.localeCompare(aVal); + } + return asc ? (aVal as number) - (bVal as number) : (bVal as number) - (aVal as number); + }); + return users; + }); + + readonly avgCostPerUser = computed(() => { + const data = this.dashboard(); + if (!data || data.active_user_count === 0) return 0; + return data.total_cost_usd / data.active_user_count; + }); + + ngOnInit(): void { + this.state.loadCostDashboard(); + } + + onPeriodChange(period: string): void { + this.state.setCostPeriod(period); + } + + toggleSort(field: SortField): void { + if (this.sortField() === field) { + this.sortAsc.update(v => !v); + } else { + this.sortField.set(field); + this.sortAsc.set(field === 'email'); + } + } + + isSortedBy(field: SortField): boolean { + return this.sortField() === field; + } + + formatCurrency(value: number): string { + return new Intl.NumberFormat('en-US', { + style: 'currency', + currency: 'USD', + minimumFractionDigits: 2, + maximumFractionDigits: 2, + }).format(value); + } + + formatHours(value: number): string { + return new Intl.NumberFormat('en-US', { + minimumFractionDigits: 1, + maximumFractionDigits: 1, + }).format(value); + } + + costBarWidth(user: UserCostBreakdown): number { + const data = this.dashboard(); + if (!data || data.total_cost_usd === 0) return 0; + return Math.max(2, Math.round((user.total_cost_usd / data.total_cost_usd) * 100)); + } +} diff --git a/frontend/ai.client/src/app/admin/manage-models/services/managed-models.service.ts b/frontend/ai.client/src/app/admin/manage-models/services/managed-models.service.ts index 89826642..943e27e0 100644 --- a/frontend/ai.client/src/app/admin/manage-models/services/managed-models.service.ts +++ b/frontend/ai.client/src/app/admin/manage-models/services/managed-models.service.ts @@ -1,4 +1,4 @@ -import { Injectable, inject, signal, computed, resource } from '@angular/core'; +import { Injectable, inject, computed, resource } from '@angular/core'; import { HttpClient } from '@angular/common/http'; import { firstValueFrom } from 'rxjs'; import { ConfigService } from '../../../services/config.service'; diff --git a/frontend/ai.client/src/app/admin/quota-tiers/pages/tier-list/tier-list.component.ts b/frontend/ai.client/src/app/admin/quota-tiers/pages/tier-list/tier-list.component.ts index 9ea7f72b..105ebb98 100644 --- a/frontend/ai.client/src/app/admin/quota-tiers/pages/tier-list/tier-list.component.ts +++ b/frontend/ai.client/src/app/admin/quota-tiers/pages/tier-list/tier-list.component.ts @@ -3,7 +3,6 @@ import { RouterLink } from '@angular/router'; import { FormsModule } from '@angular/forms'; import { DatePipe } from '@angular/common'; import { QuotaStateService } from '../../services/quota-state.service'; -import { QuotaTier } from '../../models/quota.models'; @Component({ selector: 'app-tier-list', diff --git a/frontend/ai.client/src/app/admin/quota-tiers/services/quota-state.service.ts b/frontend/ai.client/src/app/admin/quota-tiers/services/quota-state.service.ts index 0540617d..03a0f0be 100644 --- a/frontend/ai.client/src/app/admin/quota-tiers/services/quota-state.service.ts +++ b/frontend/ai.client/src/app/admin/quota-tiers/services/quota-state.service.ts @@ -1,5 +1,4 @@ import { Injectable, inject, signal, computed } from '@angular/core'; -import { toSignal } from '@angular/core/rxjs-interop'; import { QuotaHttpService } from './quota-http.service'; import { QuotaTier, diff --git a/frontend/ai.client/src/app/admin/tools/pages/tool-form.page.ts b/frontend/ai.client/src/app/admin/tools/pages/tool-form.page.ts index c710122d..7df23c46 100644 --- a/frontend/ai.client/src/app/admin/tools/pages/tool-form.page.ts +++ b/frontend/ai.client/src/app/admin/tools/pages/tool-form.page.ts @@ -21,8 +21,6 @@ import { import { AdminToolService } from '../services/admin-tool.service'; import { OAuthProvidersService } from '../../oauth-providers/services/oauth-providers.service'; import { - AdminTool, - ToolFormData, TOOL_CATEGORIES, TOOL_PROTOCOLS, TOOL_STATUSES, diff --git a/frontend/ai.client/src/app/admin/tools/services/admin-tool.service.ts b/frontend/ai.client/src/app/admin/tools/services/admin-tool.service.ts index 6fc2f173..e271faeb 100644 --- a/frontend/ai.client/src/app/admin/tools/services/admin-tool.service.ts +++ b/frontend/ai.client/src/app/admin/tools/services/admin-tool.service.ts @@ -10,7 +10,6 @@ import { ToolUpdateRequest, ToolRolesResponse, ToolRoleAssignment, - SetToolRolesRequest, SyncResult, } from '../models/admin-tool.model'; diff --git a/frontend/ai.client/src/app/app.config.spec.ts b/frontend/ai.client/src/app/app.config.spec.ts index 4e3caab5..d154d429 100644 --- a/frontend/ai.client/src/app/app.config.spec.ts +++ b/frontend/ai.client/src/app/app.config.spec.ts @@ -78,11 +78,11 @@ describe('APP_INITIALIZER Integration - App Bootstrap with Valid Config', () => expect(result).toBeInstanceOf(Promise); }); - it.skip('should return a Promise that resolves when config is loaded', async () => { - // Create a mock ConfigService with a delayed response + it('should return a Promise that resolves when config is loaded', async () => { + // Create a mock ConfigService with an async response const mockConfigService = { loadConfig: vi.fn().mockImplementation(() => - new Promise(resolve => setTimeout(resolve, 10)) + Promise.resolve() ) } as any; diff --git a/frontend/ai.client/src/app/app.routes.ts b/frontend/ai.client/src/app/app.routes.ts index 46aebf60..3d47b361 100644 --- a/frontend/ai.client/src/app/app.routes.ts +++ b/frontend/ai.client/src/app/app.routes.ts @@ -1,5 +1,4 @@ import { Routes } from '@angular/router'; -import { ConversationPage } from './session/session.page'; import { authGuard } from './auth/auth.guard'; import { adminGuard } from './auth/admin.guard'; @@ -192,6 +191,11 @@ export const routes: Routes = [ loadComponent: () => import('./admin/fine-tuning-access/fine-tuning-access.page').then(m => m.FineTuningAccessPage), canActivate: [adminGuard], }, + { + path: 'admin/fine-tuning/costs', + loadComponent: () => import('./admin/fine-tuning-costs/fine-tuning-costs.page').then(m => m.FineTuningCostsPage), + canActivate: [adminGuard], + }, { path: 'fine-tuning', loadComponent: () => import('./fine-tuning/pages/dashboard/fine-tuning-dashboard.page').then(m => m.FineTuningDashboardPage), diff --git a/frontend/ai.client/src/app/assistants/assistant-form/assistant-form.page.ts b/frontend/ai.client/src/app/assistants/assistant-form/assistant-form.page.ts index 08c989aa..1299c9bc 100644 --- a/frontend/ai.client/src/app/assistants/assistant-form/assistant-form.page.ts +++ b/frontend/ai.client/src/app/assistants/assistant-form/assistant-form.page.ts @@ -375,9 +375,12 @@ export class AssistantFormPage implements OnInit, OnDestroy { status: 'uploading', }); + let documentId: string | undefined; + try { // Step 1: Request presigned URL const uploadUrlResponse = await this.documentService.requestUploadUrl(assistantId, file); + documentId = uploadUrlResponse.documentId; // Step 2: Upload to S3 with progress tracking await this.documentService.uploadToS3(uploadUrlResponse.uploadUrl, file, (progress) => { @@ -418,6 +421,17 @@ export class AssistantFormPage implements OnInit, OnDestroy { status: 'error', error: errorMessage, }); + + // Report the failure to the backend so the DynamoDB record is marked + // as 'failed' instead of stuck in 'uploading'. This prevents infinite + // polling on page refresh. + if (documentId) { + const details = + error instanceof DocumentUploadError + ? JSON.stringify(error.details) + : undefined; + this.documentService.reportUploadFailure(assistantId, documentId, errorMessage, details); + } } } @@ -539,8 +553,6 @@ export class AssistantFormPage implements OnInit, OnDestroy { return `${baseClasses} bg-green-100 text-green-800 dark:bg-green-900/30 dark:text-green-300`; case 'DRAFT': return `${baseClasses} bg-amber-100 text-amber-800 dark:bg-amber-900/30 dark:text-amber-300`; - case 'ARCHIVED': - return `${baseClasses} bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300`; default: return `${baseClasses} bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300`; } diff --git a/frontend/ai.client/src/app/assistants/assistants.page.ts b/frontend/ai.client/src/app/assistants/assistants.page.ts index 6dfd6e63..d697d329 100644 --- a/frontend/ai.client/src/app/assistants/assistants.page.ts +++ b/frontend/ai.client/src/app/assistants/assistants.page.ts @@ -1,4 +1,4 @@ -import { Component, ChangeDetectionStrategy, inject, OnInit, signal, computed } from '@angular/core'; +import { Component, ChangeDetectionStrategy, inject, OnInit, computed } from '@angular/core'; import { Router } from '@angular/router'; import { Dialog } from '@angular/cdk/dialog'; import { firstValueFrom } from 'rxjs'; @@ -45,8 +45,8 @@ export class AssistantsPage implements OnInit { async loadAssistants(): Promise { try { - // Load COMPLETE assistants (not drafts or archived) and do NOT include public assistants - await this.assistantService.loadAssistants(true, false, false); + // Load COMPLETE assistants (not drafts) and do NOT include public assistants + await this.assistantService.loadAssistants(true, false); } catch (error) { console.error('Error loading assistants:', error); } diff --git a/frontend/ai.client/src/app/assistants/components/assistant-list.component.ts b/frontend/ai.client/src/app/assistants/components/assistant-list.component.ts index 2d208f6d..07f42728 100644 --- a/frontend/ai.client/src/app/assistants/components/assistant-list.component.ts +++ b/frontend/ai.client/src/app/assistants/components/assistant-list.component.ts @@ -197,8 +197,6 @@ export class AssistantListComponent { return `${baseClasses} bg-emerald-100 text-emerald-700 dark:bg-emerald-900/30 dark:text-emerald-400`; case 'DRAFT': return `${baseClasses} bg-amber-100 text-amber-700 dark:bg-amber-900/30 dark:text-amber-400`; - case 'ARCHIVED': - return `${baseClasses} bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-400`; default: return `${baseClasses} bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-400`; } diff --git a/frontend/ai.client/src/app/assistants/models/assistant.model.ts b/frontend/ai.client/src/app/assistants/models/assistant.model.ts index bed6cd54..d80f8689 100644 --- a/frontend/ai.client/src/app/assistants/models/assistant.model.ts +++ b/frontend/ai.client/src/app/assistants/models/assistant.model.ts @@ -13,7 +13,7 @@ export interface Assistant { usageCount: number; createdAt: string; updatedAt: string; - status: 'DRAFT' | 'COMPLETE' | 'ARCHIVED'; + status: 'DRAFT' | 'COMPLETE'; imageUrl?: string; // Share metadata (only present for shared assistants) @@ -45,7 +45,7 @@ export interface UpdateAssistantRequest { tags?: string[]; starters?: string[]; emoji?: string; - status?: 'DRAFT' | 'COMPLETE' | 'ARCHIVED'; + status?: 'DRAFT' | 'COMPLETE'; } export interface AssistantsListResponse { diff --git a/frontend/ai.client/src/app/assistants/services/assistant-api.service.ts b/frontend/ai.client/src/app/assistants/services/assistant-api.service.ts index d7fedf86..38bea6b5 100644 --- a/frontend/ai.client/src/app/assistants/services/assistant-api.service.ts +++ b/frontend/ai.client/src/app/assistants/services/assistant-api.service.ts @@ -36,7 +36,6 @@ export class AssistantApiService { getAssistants(params?: { limit?: number; nextToken?: string; - includeArchived?: boolean; includeDrafts?: boolean; includePublic?: boolean; }): Observable { @@ -47,9 +46,6 @@ export class AssistantApiService { if (params?.nextToken) { httpParams = httpParams.set('next_token', params.nextToken); } - if (params?.includeArchived !== undefined) { - httpParams = httpParams.set('include_archived', params.includeArchived.toString()); - } if (params?.includeDrafts !== undefined) { httpParams = httpParams.set('include_drafts', params.includeDrafts.toString()); } @@ -68,10 +64,6 @@ export class AssistantApiService { return this.http.put(`${this.baseUrl()}/${id}`, request); } - archiveAssistant(id: string): Observable { - return this.http.post(`${this.baseUrl()}/${id}/archive`, {}); - } - deleteAssistant(id: string): Observable { return this.http.delete(`${this.baseUrl()}/${id}`); } diff --git a/frontend/ai.client/src/app/assistants/services/assistant.service.spec.ts b/frontend/ai.client/src/app/assistants/services/assistant.service.spec.ts index 529c8093..56660562 100644 --- a/frontend/ai.client/src/app/assistants/services/assistant.service.spec.ts +++ b/frontend/ai.client/src/app/assistants/services/assistant.service.spec.ts @@ -18,7 +18,6 @@ describe('AssistantService', () => { getAssistants: vi.fn(), createAssistant: vi.fn(), updateAssistant: vi.fn(), - archiveAssistant: vi.fn(), deleteAssistant: vi.fn(), getAssistant: vi.fn(), shareAssistant: vi.fn(), @@ -61,11 +60,10 @@ describe('AssistantService', () => { const mockResponse = { assistants: [{ assistantId: '1', name: 'Assistant 1' }] }; mockApiService.getAssistants.mockReturnValue(of(mockResponse)); - await service.loadAssistants(true, false, true); + await service.loadAssistants(true, true); expect(mockApiService.getAssistants).toHaveBeenCalledWith({ includeDrafts: true, - includeArchived: false, includePublic: true }); expect(service.assistants$()).toEqual(mockResponse.assistants); diff --git a/frontend/ai.client/src/app/assistants/services/assistant.service.ts b/frontend/ai.client/src/app/assistants/services/assistant.service.ts index bc43afb1..58de320a 100644 --- a/frontend/ai.client/src/app/assistants/services/assistant.service.ts +++ b/frontend/ai.client/src/app/assistants/services/assistant.service.ts @@ -5,8 +5,6 @@ import { CreateAssistantDraftRequest, CreateAssistantRequest, UpdateAssistantRequest, - ShareAssistantRequest, - UnshareAssistantRequest, AssistantSharesResponse } from '../models/assistant.model'; import { AssistantApiService } from './assistant-api.service'; @@ -44,14 +42,13 @@ export class AssistantService { } } - async loadAssistants(includeDrafts = false, includeArchived = false, includePublic = false): Promise { + async loadAssistants(includeDrafts = false, includePublic = false): Promise { this.loading.set(true); this.error.set(null); try { const response = await firstValueFrom(this.apiService.getAssistants({ includeDrafts, - includeArchived, includePublic })); @@ -114,26 +111,6 @@ export class AssistantService { } } - async archiveAssistant(id: string): Promise { - this.loading.set(true); - this.error.set(null); - - try { - await firstValueFrom(this.apiService.archiveAssistant(id)); - - // Remove from local list (archived assistants are hidden by default) - this.assistants.update(current => - current.filter(a => a.assistantId !== id) - ); - } catch (err) { - const errorMessage = err instanceof Error ? err.message : 'Failed to archive assistant'; - this.error.set(errorMessage); - throw err; - } finally { - this.loading.set(false); - } - } - async deleteAssistant(id: string): Promise { this.loading.set(true); this.error.set(null); diff --git a/frontend/ai.client/src/app/assistants/services/document.service.ts b/frontend/ai.client/src/app/assistants/services/document.service.ts index 76a8beeb..54323b13 100644 --- a/frontend/ai.client/src/app/assistants/services/document.service.ts +++ b/frontend/ai.client/src/app/assistants/services/document.service.ts @@ -259,6 +259,36 @@ export class DocumentService { } } + /** + * Report that a client-side S3 upload failed. + * Marks the document as 'failed' in the backend so polling stops + * and the error is visible on page refresh. + * + * @param assistantId - The assistant identifier + * @param documentId - The document identifier + * @param error - User-friendly error message + * @param details - Optional technical error details + */ + async reportUploadFailure( + assistantId: string, + documentId: string, + error: string, + details?: string, + ): Promise { + try { + await firstValueFrom( + this.http.post( + `${this.baseUrl()}/${assistantId}/documents/${documentId}/upload-failed`, + { error, details }, + ), + ); + } catch (err) { + // Best-effort — don't throw if this fails, the stale document + // timeout will eventually catch it + console.warn('Failed to report upload failure to backend:', err); + } + } + /** * Poll document status until it reaches a terminal state (complete or failed). * Uses exponential backoff with a maximum interval. diff --git a/frontend/ai.client/src/app/auth/auth-pbt.spec.ts b/frontend/ai.client/src/app/auth/auth-pbt.spec.ts index 9076b46e..809b7f8a 100644 --- a/frontend/ai.client/src/app/auth/auth-pbt.spec.ts +++ b/frontend/ai.client/src/app/auth/auth-pbt.spec.ts @@ -4,10 +4,11 @@ import * as fc from 'fast-check'; // Shared fast-check arbitraries for auth RBAC property-based tests // Feature: auth-rbac-tests -const arbRoleName = fc.stringOf( - fc.constantFrom(...'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-'.split('')), - { minLength: 1, maxLength: 30 } -); +const arbRoleName = fc.string({ + unit: fc.constantFrom(...'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-'.split('')), + minLength: 1, + maxLength: 30, +}); const arbRoleList = fc.array(arbRoleName, { maxLength: 10 }); diff --git a/frontend/ai.client/src/app/components/error-toast/error-toast.component.ts b/frontend/ai.client/src/app/components/error-toast/error-toast.component.ts index 46954d4d..8d57ca86 100644 --- a/frontend/ai.client/src/app/components/error-toast/error-toast.component.ts +++ b/frontend/ai.client/src/app/components/error-toast/error-toast.component.ts @@ -1,5 +1,5 @@ import { Component, ChangeDetectionStrategy, inject, computed } from '@angular/core'; -import { ErrorService, ErrorMessage } from '../../services/error/error.service'; +import { ErrorService } from '../../services/error/error.service'; /** * Error toast component that displays error messages from ErrorService diff --git a/frontend/ai.client/src/app/components/sidenav/components/session-list/session-list.ts b/frontend/ai.client/src/app/components/sidenav/components/session-list/session-list.ts index eccc2434..f104758e 100644 --- a/frontend/ai.client/src/app/components/sidenav/components/session-list/session-list.ts +++ b/frontend/ai.client/src/app/components/sidenav/components/session-list/session-list.ts @@ -312,7 +312,7 @@ export class SessionList { const dialogRef = this.dialog.open(ConfirmationDialogComponent, { data: { title: 'Delete Conversation', - message: 'Are you sure you want to delete this conversation? This action cannot be undone.', + message: 'Are you sure you want to delete this conversation? This action cannot be undone. Any shared links to this conversation will stop working.', confirmText: 'Delete', cancelText: 'Cancel', destructive: true diff --git a/frontend/ai.client/src/app/components/toast/toast.component.ts b/frontend/ai.client/src/app/components/toast/toast.component.ts index adfa1bbb..1fe38c91 100644 --- a/frontend/ai.client/src/app/components/toast/toast.component.ts +++ b/frontend/ai.client/src/app/components/toast/toast.component.ts @@ -7,7 +7,7 @@ import { heroInformationCircle, heroXMark } from '@ng-icons/heroicons/outline'; -import { ToastService, ToastMessage, ToastType } from '../../services/toast/toast.service'; +import { ToastService, ToastType } from '../../services/toast/toast.service'; /** * Toast notification component that displays messages from ToastService. diff --git a/frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts b/frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts index 5ed02e90..59b22345 100644 --- a/frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts +++ b/frontend/ai.client/src/app/components/tooltip/tooltip.directive.ts @@ -6,7 +6,6 @@ import { OnDestroy, TemplateRef, ViewContainerRef, - effect, signal, DestroyRef, } from '@angular/core'; @@ -14,11 +13,10 @@ import { Overlay, OverlayRef, ConnectedPosition, - ScrollStrategy, } from '@angular/cdk/overlay'; import { ComponentPortal } from '@angular/cdk/portal'; import { takeUntilDestroyed } from '@angular/core/rxjs-interop'; -import { merge, fromEvent } from 'rxjs'; +import { fromEvent } from 'rxjs'; import { filter } from 'rxjs/operators'; import { TooltipComponent } from './tooltip.component'; diff --git a/frontend/ai.client/src/app/fine-tuning/pages/dashboard/fine-tuning-dashboard.page.html b/frontend/ai.client/src/app/fine-tuning/pages/dashboard/fine-tuning-dashboard.page.html index bd12534a..81873db3 100644 --- a/frontend/ai.client/src/app/fine-tuning/pages/dashboard/fine-tuning-dashboard.page.html +++ b/frontend/ai.client/src/app/fine-tuning/pages/dashboard/fine-tuning-dashboard.page.html @@ -36,6 +36,23 @@

Fine-Tuning

} + +
+ + + What is fine-tuning and how can it help my research? + + +
+

+ If you have a need for a language model to perform automatic coding or classification for your research, and the input is text, then this is a potentially useful option for you. Fine-tuning takes an existing small language model and helps it become better at coding/classification on the data that you provide. +

+

+ To get started, upload a .csv file with two columns: text (your input data) and label (the code/classification you want the model to predict). You can control the train/test split, and once fine-tuning is complete, download the model for your own use. We also provide the ability to run inference on any future data you collect. +

+
+
+ @if (state.error(); as errorMsg) { @@ -80,9 +86,22 @@ export class UserMessageComponent implements AfterViewInit { expanded = signal(false); isOverflowing = signal(false); + private localSettings = inject(LocalSettingsService); + readonly maxHeightPx = MAX_HEIGHT_PX; + /** Original user message before prompt modification — skipped when debug output is enabled */ + displayText = computed((): string | null => { + if (this.localSettings.showDebugOutput()) return null; + const metadata = this.message().metadata; + if (metadata && typeof metadata['displayText'] === 'string') { + return metadata['displayText']; + } + return null; + }); + hasTextContent = computed(() => { + if (this.displayText()) return true; return this.message().content.some( (block: ContentBlock) => block.type === 'text' && block.text ); diff --git a/frontend/ai.client/src/app/session/components/message-list/message-list.component.ts b/frontend/ai.client/src/app/session/components/message-list/message-list.component.ts index f31dab9f..8ca7f641 100644 --- a/frontend/ai.client/src/app/session/components/message-list/message-list.component.ts +++ b/frontend/ai.client/src/app/session/components/message-list/message-list.component.ts @@ -41,7 +41,7 @@ export class MessageListComponent implements OnDestroy { if (this.isBrowser) { // Only recalculate when message count changes, not on every message update effect(() => { - const messageCount = this.messages().length; + this.messages().length; this.calculateSpacerHeight(); }); diff --git a/frontend/ai.client/src/app/session/services/chat/chat-http.service.ts b/frontend/ai.client/src/app/session/services/chat/chat-http.service.ts index a19f8628..16a8a519 100644 --- a/frontend/ai.client/src/app/session/services/chat/chat-http.service.ts +++ b/frontend/ai.client/src/app/session/services/chat/chat-http.service.ts @@ -72,14 +72,12 @@ export class ChatHttpService { } else if (response.status === 403) { // Handle forbidden (e.g., usage limit exceeded) let errorMessage = 'Access forbidden'; - let errorDetail: string | undefined; try { const errorData = await response.json(); if (errorData.error) { // Structured error from backend errorMessage = errorData.error.message || errorMessage; - errorDetail = errorData.error.detail; } else if (errorData.message) { errorMessage = errorData.message; } @@ -91,14 +89,12 @@ export class ChatHttpService { } else if (response.status >= 400 && response.status < 500 && response.status !== 429) { // Client-side errors are usually non-retriable let errorMessage = `Request failed with status ${response.status}`; - let errorDetail: string | undefined; try { const errorData = await response.json(); if (errorData.error) { // Structured error from backend errorMessage = errorData.error.message || errorMessage; - errorDetail = errorData.error.detail; } else if (errorData.message) { errorMessage = errorData.message; } diff --git a/frontend/ai.client/src/app/session/services/models/message.model.ts b/frontend/ai.client/src/app/session/services/models/message.model.ts index 2d74cfe1..c468f9c0 100644 --- a/frontend/ai.client/src/app/session/services/models/message.model.ts +++ b/frontend/ai.client/src/app/session/services/models/message.model.ts @@ -109,7 +109,7 @@ export interface Message { content: ContentBlock[]; /** ISO timestamp when the message was created */ created_at?: string; - /** Optional metadata associated with the message */ + /** Optional metadata associated with the message (may include displayText with the original user input before prompt modification) */ metadata?: Record | null; /** RAG citations from knowledge base retrieval (assistant messages only) */ citations?: Citation[]; diff --git a/frontend/ai.client/src/app/session/services/session/session.service.ts b/frontend/ai.client/src/app/session/services/session/session.service.ts index e74e9f1b..96dd45e1 100644 --- a/frontend/ai.client/src/app/session/services/session/session.service.ts +++ b/frontend/ai.client/src/app/session/services/session/session.service.ts @@ -219,7 +219,7 @@ export class SessionService { const apiResponse = this.sessionsResource.value(); const localCache = this.localSessionsCache(); - if (!apiResponse || apiResponse === null) { + if (!apiResponse) { // Resource hasn't loaded yet or is disabled, return cached sessions only return { sessions: localCache, diff --git a/frontend/ai.client/src/app/settings/connections/services/connections.service.ts b/frontend/ai.client/src/app/settings/connections/services/connections.service.ts index b74912de..b356ba11 100644 --- a/frontend/ai.client/src/app/settings/connections/services/connections.service.ts +++ b/frontend/ai.client/src/app/settings/connections/services/connections.service.ts @@ -8,7 +8,6 @@ import { OAuthConnectionListResponse, OAuthProvider, OAuthProviderListResponse, - OAuthConnectResponse, } from '../models'; /** diff --git a/frontend/ai.client/src/app/settings/oauth-callback/oauth-callback.page.ts b/frontend/ai.client/src/app/settings/oauth-callback/oauth-callback.page.ts index 907080f8..3ab5489a 100644 --- a/frontend/ai.client/src/app/settings/oauth-callback/oauth-callback.page.ts +++ b/frontend/ai.client/src/app/settings/oauth-callback/oauth-callback.page.ts @@ -5,7 +5,6 @@ import { OnInit, OnDestroy, signal, - computed, } from '@angular/core'; import { Router, ActivatedRoute } from '@angular/router'; import { NgIcon, provideIcons } from '@ng-icons/core'; diff --git a/frontend/ai.client/src/app/settings/pages/chat-preferences/chat-preferences-settings.page.ts b/frontend/ai.client/src/app/settings/pages/chat-preferences/chat-preferences-settings.page.ts index ecfe1cda..11c24f19 100644 --- a/frontend/ai.client/src/app/settings/pages/chat-preferences/chat-preferences-settings.page.ts +++ b/frontend/ai.client/src/app/settings/pages/chat-preferences/chat-preferences-settings.page.ts @@ -7,7 +7,7 @@ import { } from '@angular/core'; import { RouterLink } from '@angular/router'; import { NgIcon, provideIcons } from '@ng-icons/core'; -import { heroSparkles, heroChatBubbleLeftRight, heroChevronRight } from '@ng-icons/heroicons/outline'; +import { heroSparkles, heroChatBubbleLeftRight, heroChevronRight, heroBugAnt } from '@ng-icons/heroicons/outline'; import { ModelService } from '../../../session/services/model/model.service'; import { UserSettingsService } from '../../../services/user-settings.service'; import { LocalSettingsService } from '../../../services/local-settings.service'; @@ -17,7 +17,7 @@ import { LocalSettingsService } from '../../../services/local-settings.service'; changeDetection: ChangeDetectionStrategy.OnPush, imports: [NgIcon, RouterLink], providers: [ - provideIcons({ heroSparkles, heroChatBubbleLeftRight, heroChevronRight }), + provideIcons({ heroSparkles, heroChatBubbleLeftRight, heroChevronRight, heroBugAnt }), ], host: { class: 'block' }, template: ` @@ -99,6 +99,38 @@ import { LocalSettingsService } from '../../../services/local-settings.service'; + +
+
+
+
+ +
+
+ +

+ Show the full prompt sent to the model instead of the original message. +

+
+
+ + +
+ + +
+
+
+
', standalone: true, }) class MockMessageListComponent { - @Input() messages: any[] = []; - @Input() embeddedMode = false; + messages = input.required(); + embeddedMode = input(false); +} + +// Dynamically create the component under test with the mock dependency +@Component({ + selector: 'app-shared-view-test', + template: '
', + standalone: true, +}) +class TestSharedViewPage { + // We'll test the actual SharedViewPage logic by importing it dynamically } describe('SharedViewPage', () => { - let component: SharedViewPage; - let fixture: ComponentFixture; let mockShareService: any; let mockSessionService: any; let mockUserService: any; @@ -49,7 +55,7 @@ describe('SharedViewPage', () => { ], }; - function createComponent(shareId: string | null = 'share-001') { + function setupMocks(shareId: string | null = 'share-001') { mockShareService = { getSharedConversation: vi.fn(), exportSharedConversation: vi.fn(), @@ -67,9 +73,7 @@ describe('SharedViewPage', () => { navigate: vi.fn(), }; - TestBed.resetTestingModule(); - TestBed.configureTestingModule({ - imports: [SharedViewPage], + return { providers: [ { provide: ShareService, useValue: mockShareService }, { provide: SessionService, useValue: mockSessionService }, @@ -86,135 +90,151 @@ describe('SharedViewPage', () => { }, }, ], - }); + }; + } - // Swap the real MessageListComponent for the mock to avoid MarkdownService + // Import SharedViewPage dynamically and override its imports + async function createComponent(shareId: string | null = 'share-001') { + const { providers } = setupMocks(shareId); + + // Dynamically import to avoid template resolution at module load time + const { SharedViewPage } = await import('./shared-view.page'); + + TestBed.resetTestingModule(); + + // Override the component to use mock MessageListComponent TestBed.overrideComponent(SharedViewPage, { - remove: { imports: [MessageListComponent] }, - add: { imports: [MockMessageListComponent] }, + set: { + imports: [MockMessageListComponent], + template: '
', + }, }); - fixture = TestBed.createComponent(SharedViewPage); - component = fixture.componentInstance; + await TestBed.configureTestingModule({ + imports: [SharedViewPage], + providers, + }).compileComponents(); + + const fixture = TestBed.createComponent(SharedViewPage); + const component = fixture.componentInstance; + + return { fixture, component }; } // ----------------------------------------------------------------------- - // Basic rendering + // Basic component creation and lifecycle // ----------------------------------------------------------------------- - it('should create the component', () => { - createComponent(); + it('should create the component', async () => { + const { component } = await createComponent(); expect(component).toBeTruthy(); }); - it('should display conversation title on success', async () => { - createComponent(); - mockShareService.getSharedConversation.mockResolvedValue(mockConversation); - - await component.ngOnInit(); - fixture.detectChanges(); - - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Test Shared Conversation'); + it('should initialize with loading state', async () => { + const { component } = await createComponent(); + expect((component as any).isLoading()).toBe(true); + expect((component as any).conversation()).toBeNull(); + expect((component as any).errorStatus()).toBeNull(); }); - it('should display read-only snapshot banner', async () => { - createComponent(); + // ----------------------------------------------------------------------- + // ngOnInit - successful load + // ----------------------------------------------------------------------- + + it('should load conversation on init', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockResolvedValue(mockConversation); await component.ngOnInit(); - fixture.detectChanges(); - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Shared read-only snapshot'); + expect(mockShareService.getSharedConversation).toHaveBeenCalledWith('share-001'); + expect((component as any).conversation()).toEqual(mockConversation); + expect((component as any).messages().length).toBe(2); + expect((component as any).isLoading()).toBe(false); }); - it('should not display a message input field', async () => { - createComponent(); + it('should set conversation title from response', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockResolvedValue(mockConversation); await component.ngOnInit(); - fixture.detectChanges(); - const el = fixture.nativeElement as HTMLElement; - const textarea = el.querySelector('textarea'); - const messageInput = el.querySelector('app-message-input'); - expect(textarea).toBeNull(); - expect(messageInput).toBeNull(); + expect((component as any).conversation()!.title).toBe('Test Shared Conversation'); }); // ----------------------------------------------------------------------- - // Error states + // ngOnInit - error states // ----------------------------------------------------------------------- - it('should display access denied for 403 error', async () => { - createComponent(); + it('should set 403 error status on access denied', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockRejectedValue({ status: 403 }); await component.ngOnInit(); - fixture.detectChanges(); - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Access denied'); + expect((component as any).errorStatus()).toBe(403); + expect((component as any).isLoading()).toBe(false); }); - it('should display not found for 404 error', async () => { - createComponent(); + it('should set 404 error status on not found', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockRejectedValue({ status: 404 }); await component.ngOnInit(); - fixture.detectChanges(); - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Conversation not found'); + expect((component as any).errorStatus()).toBe(404); + expect((component as any).isLoading()).toBe(false); }); - it('should display not found when shareId is missing from route', async () => { - createComponent(null); + it('should set 404 error when shareId is missing from route', async () => { + const { component } = await createComponent(null); await component.ngOnInit(); - fixture.detectChanges(); - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Conversation not found'); + expect((component as any).errorStatus()).toBe(404); + expect((component as any).isLoading()).toBe(false); + expect(mockShareService.getSharedConversation).not.toHaveBeenCalled(); }); - it('should display generic error for 500', async () => { - createComponent(); + it('should set 500 error status on server error', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockRejectedValue({ status: 500 }); await component.ngOnInit(); - fixture.detectChanges(); - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Something went wrong'); + expect((component as any).errorStatus()).toBe(500); + expect((component as any).isLoading()).toBe(false); + }); + + it('should default to 500 error when status is not provided', async () => { + const { component } = await createComponent(); + mockShareService.getSharedConversation.mockRejectedValue(new Error('Network error')); + + await component.ngOnInit(); + + expect((component as any).errorStatus()).toBe(500); }); // ----------------------------------------------------------------------- - // Export to new conversation + // Export functionality // ----------------------------------------------------------------------- - it('should display export button when conversation is loaded', async () => { - createComponent(); + it('should call exportSharedConversation on export', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockResolvedValue(mockConversation); + mockShareService.exportSharedConversation.mockResolvedValue({ + sessionId: 'new-sess-001', + title: 'Test Shared Conversation (shared)', + }); await component.ngOnInit(); - fixture.detectChanges(); - - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).toContain('Export to new conversation'); - }); - - it('should not display export button when loading', () => { - createComponent(); - fixture.detectChanges(); + await (component as any).onExport(); - const el = fixture.nativeElement as HTMLElement; - expect(el.textContent).not.toContain('Export to new conversation'); + expect(mockShareService.exportSharedConversation).toHaveBeenCalledWith('share-001'); }); - it('should call exportSharedConversation and navigate on export', async () => { - createComponent(); + it('should add session to cache after export', async () => { + const { component } = await createComponent(); mockShareService.getSharedConversation.mockResolvedValue(mockConversation); mockShareService.exportSharedConversation.mockResolvedValue({ sessionId: 'new-sess-001', @@ -222,32 +242,90 @@ describe('SharedViewPage', () => { }); await component.ngOnInit(); - fixture.detectChanges(); - await (component as any).onExport(); - fixture.detectChanges(); - expect(mockShareService.exportSharedConversation).toHaveBeenCalledWith('share-001'); expect(mockSessionService.addSessionToCache).toHaveBeenCalledWith( 'new-sess-001', 'user-002', 'Test Shared Conversation (shared)', ); + }); + + it('should navigate to new session after export', async () => { + const { component } = await createComponent(); + mockShareService.getSharedConversation.mockResolvedValue(mockConversation); + mockShareService.exportSharedConversation.mockResolvedValue({ + sessionId: 'new-sess-001', + title: 'Test Shared Conversation (shared)', + }); + + await component.ngOnInit(); + await (component as any).onExport(); + expect(mockRouter.navigate).toHaveBeenCalledWith(['/s', 'new-sess-001']); }); + it('should set isExporting during export', async () => { + const { component } = await createComponent(); + mockShareService.getSharedConversation.mockResolvedValue(mockConversation); + + let resolveExport: (value: any) => void; + mockShareService.exportSharedConversation.mockReturnValue( + new Promise((resolve) => { + resolveExport = resolve; + }), + ); + + await component.ngOnInit(); + const exportPromise = (component as any).onExport(); + + expect((component as any).isExporting()).toBe(true); + + resolveExport!({ sessionId: 'new-sess-001', title: 'Test' }); + await exportPromise; + + expect((component as any).isExporting()).toBe(false); + }); + it('should handle export failure gracefully', async () => { - createComponent(); + const { component } = await createComponent(); mockShareService.getSharedConversation.mockResolvedValue(mockConversation); mockShareService.exportSharedConversation.mockRejectedValue(new Error('Export failed')); await component.ngOnInit(); - fixture.detectChanges(); // Should not throw await (component as any).onExport(); - fixture.detectChanges(); expect(mockRouter.navigate).not.toHaveBeenCalled(); + expect((component as any).isExporting()).toBe(false); + }); + + it('should not export when conversation is null', async () => { + const { component } = await createComponent(); + // Don't load conversation + + await (component as any).onExport(); + + expect(mockShareService.exportSharedConversation).not.toHaveBeenCalled(); + }); + + it('should use anonymous userId when user is not logged in', async () => { + const { component } = await createComponent(); + mockShareService.getSharedConversation.mockResolvedValue(mockConversation); + mockShareService.exportSharedConversation.mockResolvedValue({ + sessionId: 'new-sess-001', + title: 'Test', + }); + mockUserService.currentUser.mockReturnValue(null); + + await component.ngOnInit(); + await (component as any).onExport(); + + expect(mockSessionService.addSessionToCache).toHaveBeenCalledWith( + 'new-sess-001', + 'anonymous', + 'Test', + ); }); }); diff --git a/frontend/ai.client/src/app/shared/shared-view.page.ts b/frontend/ai.client/src/app/shared/shared-view.page.ts index 29a77cf5..c24abf9b 100644 --- a/frontend/ai.client/src/app/shared/shared-view.page.ts +++ b/frontend/ai.client/src/app/shared/shared-view.page.ts @@ -1,50 +1,36 @@ -import { Component, ChangeDetectionStrategy, inject, signal, OnInit } from '@angular/core'; +import { Component, ChangeDetectionStrategy, computed, inject, signal, OnInit } from '@angular/core'; import { ActivatedRoute, Router } from '@angular/router'; import { DatePipe } from '@angular/common'; import { NgIcon, provideIcons } from '@ng-icons/core'; import { heroLockClosed, heroExclamationTriangle, - heroDocumentDuplicate, + heroChatBubbleLeftRight, } from '@ng-icons/heroicons/outline'; import { ShareService, SharedConversationResponse } from '../session/services/share/share.service'; import { MessageListComponent } from '../session/components/message-list/message-list.component'; import { SessionService } from '../session/services/session/session.service'; import { UserService } from '../auth/user.service'; +import { SidenavService } from '../services/sidenav/sidenav.service'; import { Message } from '../session/services/models/message.model'; @Component({ selector: 'app-shared-view', changeDetection: ChangeDetectionStrategy.OnPush, imports: [NgIcon, DatePipe, MessageListComponent], - providers: [provideIcons({ heroLockClosed, heroExclamationTriangle, heroDocumentDuplicate })], + providers: [provideIcons({ heroLockClosed, heroExclamationTriangle, heroChatBubbleLeftRight })], template: `
-
+
-
@if (conversation()) { - Shared {{ conversation()!.createdAt | date:'medium' }} + · {{ conversation()!.createdAt | date:'medium' }} - }
@@ -82,12 +68,30 @@ import { Message } from '../session/services/models/message.model';
-
+
+ + +
+ +
}
`, @@ -98,6 +102,13 @@ export class SharedViewPage implements OnInit { private shareService = inject(ShareService); private sessionService = inject(SessionService); private userService = inject(UserService); + private sidenavService = inject(SidenavService); + + /** Left offset for the floating button so it centers over the content area, not the viewport */ + readonly buttonLeft = computed(() => { + const sidebarVisible = !this.sidenavService.isCollapsed() && !this.sidenavService.isHidden(); + return sidebarVisible ? '18rem' : '0'; + }); protected conversation = signal(null); protected messages = signal([]); diff --git a/frontend/ai.client/src/index.html b/frontend/ai.client/src/index.html index 854ea468..2cfc39e7 100644 --- a/frontend/ai.client/src/index.html +++ b/frontend/ai.client/src/index.html @@ -21,7 +21,7 @@ const stored = localStorage.getItem(storageKey); const preference = (stored === 'light' || stored === 'dark' || stored === 'system') ? stored : 'system'; - let theme = 'light'; + let theme; if (preference === 'system') { theme = window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'; } else { diff --git a/frontend/ai.client/vitest.config.ts b/frontend/ai.client/vitest.config.ts deleted file mode 100644 index b951a942..00000000 --- a/frontend/ai.client/vitest.config.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - coverage: { - reporter: ['text', 'json', 'html'], - }, - }, -}); diff --git a/infrastructure/lib/app-api-stack.ts b/infrastructure/lib/app-api-stack.ts index 064b296d..494b733b 100644 --- a/infrastructure/lib/app-api-stack.ts +++ b/infrastructure/lib/app-api-stack.ts @@ -4,11 +4,9 @@ import * as ecs from "aws-cdk-lib/aws-ecs"; import * as ecr from "aws-cdk-lib/aws-ecr"; import * as elbv2 from "aws-cdk-lib/aws-elasticloadbalancingv2"; import * as dynamodb from "aws-cdk-lib/aws-dynamodb"; -import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager"; import * as iam from "aws-cdk-lib/aws-iam"; import * as ssm from "aws-cdk-lib/aws-ssm"; import * as logs from "aws-cdk-lib/aws-logs"; -import * as kms from "aws-cdk-lib/aws-kms"; import * as lambda from "aws-cdk-lib/aws-lambda"; import * as lambdaEventSources from "aws-cdk-lib/aws-lambda-event-sources"; import * as sns from "aws-cdk-lib/aws-sns"; @@ -72,7 +70,7 @@ export class AppApiStack extends cdk.Stack { // Import ALB const albArn = ssm.StringParameter.valueForStringParameter(this, `/${config.projectPrefix}/network/alb-arn`); - const alb = elbv2.ApplicationLoadBalancer.fromApplicationLoadBalancerAttributes(this, "ImportedAlb", { + const _alb = elbv2.ApplicationLoadBalancer.fromApplicationLoadBalancerAttributes(this, "ImportedAlb", { loadBalancerArn: albArn, securityGroupId: albSecurityGroupId, }); @@ -1022,6 +1020,7 @@ export class AppApiStack extends cdk.Stack { container.addEnvironment('SAGEMAKER_EXECUTION_ROLE_ARN', sagemakerRoleArn); container.addEnvironment('SAGEMAKER_SECURITY_GROUP_ID', sagemakerSgId); container.addEnvironment('SAGEMAKER_SUBNET_IDS', ftPrivateSubnetIds); + container.addEnvironment('FINE_TUNING_DEFAULT_QUOTA_HOURS', String(config.fineTuning.defaultQuotaHours)); // Grant ECS task role: DynamoDB access to fine-tuning tables taskDefinition.taskRole.addToPrincipalPolicy( diff --git a/infrastructure/lib/config.ts b/infrastructure/lib/config.ts index 7a754f96..c6249477 100644 --- a/infrastructure/lib/config.ts +++ b/infrastructure/lib/config.ts @@ -87,6 +87,7 @@ export interface RagIngestionConfig { export interface FineTuningConfig { enabled: boolean; // Enable/disable SageMaker Fine-Tuning stack + defaultQuotaHours: number; // Default monthly GPU-hour quota for all users (0 = whitelist-only) } /** @@ -213,6 +214,7 @@ export function loadConfig(scope: cdk.App): AppConfig { }, fineTuning: { enabled: parseBooleanEnv(process.env.CDK_FINE_TUNING_ENABLED) ?? scope.node.tryGetContext('fineTuning')?.enabled ?? false, + defaultQuotaHours: parseIntEnv(process.env.CDK_FINE_TUNING_DEFAULT_QUOTA_HOURS) ?? scope.node.tryGetContext('fineTuning')?.defaultQuotaHours ?? 0, }, tags: { ...(scope.node.tryGetContext('tags') || {}), diff --git a/infrastructure/lib/frontend-stack.ts b/infrastructure/lib/frontend-stack.ts index a7ca5445..579a43cd 100644 --- a/infrastructure/lib/frontend-stack.ts +++ b/infrastructure/lib/frontend-stack.ts @@ -123,7 +123,7 @@ export class FrontendStack extends cdk.Stack { }); // Create Origin Access Control (OAC) for CloudFront - const oac = new cloudfront.CfnOriginAccessControl(this, 'FrontendOAC', { + const _oac = new cloudfront.CfnOriginAccessControl(this, 'FrontendOAC', { originAccessControlConfig: { name: getResourceName(config, 'frontend-oac'), originAccessControlOriginType: 's3', diff --git a/infrastructure/lib/inference-api-stack.ts b/infrastructure/lib/inference-api-stack.ts index 408289f4..29f6b25c 100644 --- a/infrastructure/lib/inference-api-stack.ts +++ b/infrastructure/lib/inference-api-stack.ts @@ -59,7 +59,7 @@ export class InferenceApiStack extends cdk.Stack { getResourceName(config, 'inference-api') ); - const containerImageUri = `${ecrRepository.repositoryUri}:${imageTag}`; + const _containerImageUri = `${ecrRepository.repositoryUri}:${imageTag}`; // ============================================================ // IAM Execution Role for AgentCore Runtime diff --git a/infrastructure/lib/infrastructure-stack.ts b/infrastructure/lib/infrastructure-stack.ts index 63940cbe..6442755c 100644 --- a/infrastructure/lib/infrastructure-stack.ts +++ b/infrastructure/lib/infrastructure-stack.ts @@ -241,7 +241,7 @@ export class InfrastructureStack extends cdk.Stack { }); // HTTP listener only redirects to HTTPS (no target groups here) - const httpRedirectListener = this.alb.addListener('HttpListener', { + const _httpRedirectListener = this.alb.addListener('HttpListener', { port: 80, protocol: elbv2.ApplicationProtocol.HTTP, defaultAction: elbv2.ListenerAction.redirect({ diff --git a/infrastructure/lib/rag-ingestion-stack.ts b/infrastructure/lib/rag-ingestion-stack.ts index d14d7cfa..95df0376 100644 --- a/infrastructure/lib/rag-ingestion-stack.ts +++ b/infrastructure/lib/rag-ingestion-stack.ts @@ -69,7 +69,7 @@ export class RagIngestionStack extends cdk.Stack { `/${config.projectPrefix}/network/availability-zones` ); - const vpc = ec2.Vpc.fromVpcAttributes(this, 'ImportedVpc', { + const _vpc = ec2.Vpc.fromVpcAttributes(this, 'ImportedVpc', { vpcId: vpcId, vpcCidrBlock: vpcCidr, availabilityZones: cdk.Fn.split(',', availabilityZonesString), @@ -180,6 +180,7 @@ export class RagIngestionStack extends cdk.Stack { pointInTimeRecovery: true, removalPolicy: getRemovalPolicy(config), encryption: dynamodb.TableEncryption.AWS_MANAGED, + timeToLiveAttribute: 'ttl', }); // Add Global Secondary Indexes @@ -231,7 +232,7 @@ export class RagIngestionStack extends cdk.Stack { getResourceName(config, 'rag-ingestion') ); - const containerImageUri = `${ecrRepository.repositoryUri}:${imageTag}`; + const _containerImageUri = `${ecrRepository.repositoryUri}:${imageTag}`; const ingestionLogGroup = new logs.LogGroup(this, 'RagIngestionLogGroup', { retention: logs.RetentionDays.ONE_WEEK, diff --git a/infrastructure/package-lock.json b/infrastructure/package-lock.json index 59e53bb1..b25a27f7 100644 --- a/infrastructure/package-lock.json +++ b/infrastructure/package-lock.json @@ -1,24 +1,24 @@ { "name": "infrastructure", - "version": "1.0.0-beta.19", + "version": "1.0.0-beta.20", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "infrastructure", - "version": "1.0.0-beta.19", + "version": "1.0.0-beta.20", "dependencies": { - "aws-cdk-lib": "2.244.0", - "constructs": "10.5.1" + "aws-cdk-lib": "2.245.0", + "constructs": "10.6.0" }, "bin": { "infrastructure": "bin/infrastructure.js" }, "devDependencies": { - "@types/jest": "29.5.14", - "@types/node": "24.10.1", - "aws-cdk": "2.1113.0", - "jest": "29.7.0", + "@types/jest": "30.0.0", + "@types/node": "25.5.0", + "aws-cdk": "2.1115.0", + "jest": "30.3.0", "ts-jest": "29.4.6", "ts-node": "10.9.2", "typescript": "5.9.3" @@ -37,9 +37,9 @@ "license": "Apache-2.0" }, "node_modules/@aws-cdk/cloud-assembly-schema": { - "version": "52.2.0", - "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-52.2.0.tgz", - "integrity": "sha512-ourZjixQ/UfsZc7gdk3vt1eHBODMUjQTYYYCY3ZX8fiXyHtWNDAYZPrXUK96jpCC2fLP+tfHTJrBjZ563pmcEw==", + "version": "53.10.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-53.10.0.tgz", + "integrity": "sha512-/gJgJQh9SHIIN82GZ4BB0WS3z3HcKFF734yNOkX0stBeyIfaBl2x476dihVCCM1GpVqnueC9DUA3CyZJOOPitg==", "bundleDependencies": [ "jsonschema", "semver" @@ -47,7 +47,7 @@ "license": "Apache-2.0", "dependencies": { "jsonschema": "~1.4.1", - "semver": "^7.7.3" + "semver": "^7.7.4" }, "engines": { "node": ">= 18.0.0" @@ -62,7 +62,7 @@ } }, "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { - "version": "7.7.3", + "version": "7.7.4", "inBundle": true, "license": "ISC", "bin": { @@ -205,9 +205,9 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", "dev": true, "license": "MIT", "engines": { @@ -372,13 +372,13 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", - "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", + "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -498,13 +498,13 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", - "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", + "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -592,6 +592,58 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@emnapi/core": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz", + "integrity": "sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.2.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.1.tgz", + "integrity": "sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.0.tgz", + "integrity": "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", @@ -620,61 +672,60 @@ } }, "node_modules/@jest/console": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", - "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-30.3.0.tgz", + "integrity": "sha512-PAwCvFJ4696XP2qZj+LAn1BWjZaJ6RjG6c7/lkMaUJnkyMS34ucuIsfqYvfskVNvUI27R/u4P1HMYFnlVXG/Ww==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", + "@jest/types": "30.3.0", "@types/node": "*", - "chalk": "^4.0.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", + "chalk": "^4.1.2", + "jest-message-util": "30.3.0", + "jest-util": "30.3.0", "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/core": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", - "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-30.3.0.tgz", + "integrity": "sha512-U5mVPsBxLSO6xYbf+tgkymLx+iAhvZX43/xI1+ej2ZOPnPdkdO1CzDmFKh2mZBn2s4XZixszHeQnzp1gm/DIxw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/reporters": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/console": "30.3.0", + "@jest/pattern": "30.0.1", + "@jest/reporters": "30.3.0", + "@jest/test-result": "30.3.0", + "@jest/transform": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-changed-files": "^29.7.0", - "jest-config": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-resolve-dependencies": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "jest-watcher": "^29.7.0", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-ansi": "^6.0.0" + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-changed-files": "30.3.0", + "jest-config": "30.3.0", + "jest-haste-map": "30.3.0", + "jest-message-util": "30.3.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.3.0", + "jest-resolve-dependencies": "30.3.0", + "jest-runner": "30.3.0", + "jest-runtime": "30.3.0", + "jest-snapshot": "30.3.0", + "jest-util": "30.3.0", + "jest-validate": "30.3.0", + "jest-watcher": "30.3.0", + "pretty-format": "30.3.0", + "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -685,117 +736,150 @@ } } }, + "node_modules/@jest/diff-sequences": { + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/diff-sequences/-/diff-sequences-30.3.0.tgz", + "integrity": "sha512-cG51MVnLq1ecVUaQ3fr6YuuAOitHK1S4WUJHnsPFE/quQr33ADUx1FfrTCpMCRxvy0Yr9BThKpDjSlcTi91tMA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, "node_modules/@jest/environment": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", - "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-30.3.0.tgz", + "integrity": "sha512-SlLSF4Be735yQXyh2+mctBOzNDx5s5uLv88/j8Qn1wH679PDcwy67+YdADn8NJnGjzlXtN62asGH/T4vWOkfaw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/fake-timers": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "jest-mock": "^29.7.0" + "jest-mock": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-30.3.0.tgz", + "integrity": "sha512-76Nlh4xJxk2D/9URCn3wFi98d2hb19uWE1idLsTt2ywhvdOldbw3S570hBgn25P4ICUZ/cBjybrBex2g17IDbg==", "dev": true, "license": "MIT", "dependencies": { - "expect": "^29.7.0", - "jest-snapshot": "^29.7.0" + "expect": "30.3.0", + "jest-snapshot": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/expect-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", - "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-30.3.0.tgz", + "integrity": "sha512-j0+W5iQQ8hBh7tHZkTQv3q2Fh/M7Je72cIsYqC4OaktgtO7v1So9UTjp6uPBHIaB6beoF/RRsCgMJKvti0wADA==", "dev": true, "license": "MIT", "dependencies": { - "jest-get-type": "^29.6.3" + "@jest/get-type": "30.1.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/fake-timers": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", - "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-30.3.0.tgz", + "integrity": "sha512-WUQDs8SOP9URStX1DzhD425CqbN/HxUYCTwVrT8sTVBfMvFqYt/s61EK5T05qnHu0po6RitXIvP9otZxYDzTGQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "@sinonjs/fake-timers": "^10.0.2", + "@jest/types": "30.3.0", + "@sinonjs/fake-timers": "^15.0.0", "@types/node": "*", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" + "jest-message-util": "30.3.0", + "jest-mock": "30.3.0", + "jest-util": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/get-type": { + "version": "30.1.0", + "resolved": "https://registry.npmjs.org/@jest/get-type/-/get-type-30.1.0.tgz", + "integrity": "sha512-eMbZE2hUnx1WV0pmURZY9XoXPkUYjpc55mb0CrhtdWLtzMQPFvu/rZkTLZFTsdaVQa+Tr4eWAteqcUzoawq/uA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/globals": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", - "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-30.3.0.tgz", + "integrity": "sha512-+owLCBBdfpgL3HU+BD5etr1SvbXpSitJK0is1kiYjJxAAJggYMRQz5hSdd5pq1sSggfxPbw2ld71pt4x5wwViA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.3.0", + "@jest/expect": "30.3.0", + "@jest/types": "30.3.0", + "jest-mock": "30.3.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/pattern": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/pattern/-/pattern-30.0.1.tgz", + "integrity": "sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/types": "^29.6.3", - "jest-mock": "^29.7.0" + "@types/node": "*", + "jest-regex-util": "30.0.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/reporters": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", - "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-30.3.0.tgz", + "integrity": "sha512-a09z89S+PkQnL055bVj8+pe2Caed2PBOaczHcXCykW5ngxX9EWx/1uAwncxc/HiU0oZqfwseMjyhxgRjS49qPw==", "dev": true, "license": "MIT", "dependencies": { "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", + "@jest/console": "30.3.0", + "@jest/test-result": "30.3.0", + "@jest/transform": "30.3.0", + "@jest/types": "30.3.0", + "@jridgewell/trace-mapping": "^0.3.25", "@types/node": "*", - "chalk": "^4.0.0", - "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", + "chalk": "^4.1.2", + "collect-v8-coverage": "^1.0.2", + "exit-x": "^0.2.2", + "glob": "^10.5.0", + "graceful-fs": "^4.2.11", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^6.0.0", "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.0", + "istanbul-lib-source-maps": "^5.0.0", "istanbul-reports": "^3.1.3", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", + "jest-message-util": "30.3.0", + "jest-util": "30.3.0", + "jest-worker": "30.3.0", "slash": "^3.0.0", - "string-length": "^4.0.1", - "strip-ansi": "^6.0.0", + "string-length": "^4.0.2", "v8-to-istanbul": "^9.0.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -807,108 +891,124 @@ } }, "node_modules/@jest/schemas": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", - "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils": { + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/snapshot-utils/-/snapshot-utils-30.3.0.tgz", + "integrity": "sha512-ORbRN9sf5PP82v3FXNSwmO1OTDR2vzR2YTaR+E3VkSBZ8zadQE6IqYdYEeFH1NIkeB2HIGdF02dapb6K0Mj05g==", "dev": true, "license": "MIT", "dependencies": { - "@sinclair/typebox": "^0.27.8" + "@jest/types": "30.3.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "natural-compare": "^1.4.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/source-map": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", - "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-30.0.1.tgz", + "integrity": "sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "callsites": "^3.0.0", - "graceful-fs": "^4.2.9" + "@jridgewell/trace-mapping": "^0.3.25", + "callsites": "^3.1.0", + "graceful-fs": "^4.2.11" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/test-result": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", - "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-30.3.0.tgz", + "integrity": "sha512-e/52nJGuD74AKTSe0P4y5wFRlaXP0qmrS17rqOMHeSwm278VyNyXE3gFO/4DTGF9w+65ra3lo3VKj0LBrzmgdQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" + "@jest/console": "30.3.0", + "@jest/types": "30.3.0", + "@types/istanbul-lib-coverage": "^2.0.6", + "collect-v8-coverage": "^1.0.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/test-sequencer": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", - "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-30.3.0.tgz", + "integrity": "sha512-dgbWy9b8QDlQeRZcv7LNF+/jFiiYHTKho1xirauZ7kVwY7avjFF6uTT0RqlgudB5OuIPagFdVtfFMosjVbk1eA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/test-result": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", + "@jest/test-result": "30.3.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.3.0", "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/transform": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", - "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.3.0.tgz", + "integrity": "sha512-TLKY33fSLVd/lKB2YI1pH69ijyUblO/BQvCj566YvnwuzoTNr648iE0j22vRvVNk2HsPwByPxATg3MleS3gf5A==", "dev": true, "license": "MIT", "dependencies": { - "@babel/core": "^7.11.6", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "babel-plugin-istanbul": "^6.1.1", - "chalk": "^4.0.0", + "@babel/core": "^7.27.4", + "@jest/types": "30.3.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", "convert-source-map": "^2.0.0", "fast-json-stable-stringify": "^2.1.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "micromatch": "^4.0.4", - "pirates": "^4.0.4", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.3.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.3.0", + "pirates": "^4.0.7", "slash": "^3.0.0", - "write-file-atomic": "^4.0.2" + "write-file-atomic": "^5.0.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/types": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", - "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.3.0.tgz", + "integrity": "sha512-JHm87k7bA33hpBngtU8h6UBub/fqqA9uXfw+21j5Hmk7ooPHlboRNxHq0JcMtC+n8VJGP1mcfnD3Mk+XKe1oSw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/schemas": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", "@types/node": "*", - "@types/yargs": "^17.0.8", - "chalk": "^4.0.0" + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jridgewell/gen-mapping": { @@ -961,10 +1061,47 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" + } + }, "node_modules/@sinclair/typebox": { - "version": "0.27.8", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", - "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "version": "0.34.49", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.49.tgz", + "integrity": "sha512-brySQQs7Jtn0joV8Xh9ZV/hZb9Ozb0pmazDIASBkYKCjXrXU3mpcFahmK/z4YDhGkQvP9mWJbVyahdtU5wQA+A==", "dev": true, "license": "MIT" }, @@ -979,13 +1116,13 @@ } }, "node_modules/@sinonjs/fake-timers": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", - "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-15.2.0.tgz", + "integrity": "sha512-+SM3gQi95RWZLlD+Npy/UC5mHftlXwnVJMRpMyiqjrF4yNnbvi/Ubh3x9sLw6gxWSuibOn00uiLu1CKozehWlQ==", "dev": true, "license": "BSD-3-Clause", "dependencies": { - "@sinonjs/commons": "^3.0.0" + "@sinonjs/commons": "^3.0.1" } }, "node_modules/@tsconfig/node10": { @@ -1016,6 +1153,17 @@ "dev": true, "license": "MIT" }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/babel__core": { "version": "7.20.5", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", @@ -1061,16 +1209,6 @@ "@babel/types": "^7.28.2" } }, - "node_modules/@types/graceful-fs": { - "version": "4.1.9", - "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", - "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", @@ -1099,24 +1237,24 @@ } }, "node_modules/@types/jest": { - "version": "29.5.14", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", - "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "version": "30.0.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-30.0.0.tgz", + "integrity": "sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==", "dev": true, "license": "MIT", "dependencies": { - "expect": "^29.0.0", - "pretty-format": "^29.0.0" + "expect": "^30.0.0", + "pretty-format": "^30.0.0" } }, "node_modules/@types/node": { - "version": "24.10.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz", - "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", + "version": "25.5.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.5.0.tgz", + "integrity": "sha512-jp2P3tQMSxWugkCUKLRPVUpGaL5MVFwF8RDuSRztfwgN1wmqJeMSbKlnEtQqU8UrhTmzEmZdu2I6v2dpp7XIxw==", "dev": true, "license": "MIT", "dependencies": { - "undici-types": "~7.16.0" + "undici-types": "~7.18.0" } }, "node_modules/@types/stack-utils": { @@ -1143,6 +1281,282 @@ "dev": true, "license": "MIT" }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/acorn": { "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", @@ -1186,13 +1600,16 @@ } }, "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, "node_modules/ansi-styles": { @@ -1243,9 +1660,9 @@ } }, "node_modules/aws-cdk": { - "version": "2.1113.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1113.0.tgz", - "integrity": "sha512-7D2cVJ66tRZ7KbdTfQLgwPu/XvAeb3r42MMVG605kaxWdIUczdJcuk9x5JVahDCFKRcjepLLYu5N+ovnHFYxBQ==", + "version": "2.1115.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1115.0.tgz", + "integrity": "sha512-PpNNflDt1L2TxpMh2h7cPHnFkDVeY1hwIxuGuvswS08mA0syOT4OmZx8hZYdcLru6NceCsn0x/7uTHpb6Hzo5A==", "dev": true, "license": "Apache-2.0", "bin": { @@ -1256,9 +1673,9 @@ } }, "node_modules/aws-cdk-lib": { - "version": "2.244.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.244.0.tgz", - "integrity": "sha512-j5FVeZv5W+v6j6OnW8RjoN04T+8pYvDJJV7yXhhj4IiGDKPgMH3fflQLQXJousd2QQk+nSAjghDVJcrZ4GFyGA==", + "version": "2.245.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.245.0.tgz", + "integrity": "sha512-Yfeb+wKC6s+Ttm/N93C6vY6ksyCh68WaG/j3N6dalJWTW/V4o6hUolHm+v2c2IofJEUS45c5AF/EEj24e9hfMA==", "bundleDependencies": [ "@balena/dockerignore", "@aws-cdk/cloud-assembly-api", @@ -1277,8 +1694,8 @@ "dependencies": { "@aws-cdk/asset-awscli-v1": "2.2.263", "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.1", - "@aws-cdk/cloud-assembly-api": "^2.1.1", - "@aws-cdk/cloud-assembly-schema": "^52.1.0", + "@aws-cdk/cloud-assembly-api": "^2.2.0", + "@aws-cdk/cloud-assembly-schema": "^53.0.0", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", "fs-extra": "^11.3.3", @@ -1289,7 +1706,7 @@ "punycode": "^2.3.1", "semver": "^7.7.4", "table": "^6.9.0", - "yaml": "1.10.2" + "yaml": "1.10.3" }, "engines": { "node": ">= 20.0.0" @@ -1299,7 +1716,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-api": { - "version": "2.1.1", + "version": "2.2.0", "bundleDependencies": [ "jsonschema", "semver" @@ -1308,13 +1725,13 @@ "license": "Apache-2.0", "dependencies": { "jsonschema": "~1.4.1", - "semver": "^7.7.3" + "semver": "^7.7.4" }, "engines": { "node": ">= 18.0.0" }, "peerDependencies": { - "@aws-cdk/cloud-assembly-schema": ">=52.1.0" + "@aws-cdk/cloud-assembly-schema": ">=53.0.0" } }, "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-api/node_modules/jsonschema": { @@ -1326,7 +1743,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-api/node_modules/semver": { - "version": "7.7.3", + "version": "7.7.4", "inBundle": true, "license": "ISC", "bin": { @@ -1641,7 +2058,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/yaml": { - "version": "1.10.2", + "version": "1.10.3", "inBundle": true, "license": "ISC", "engines": { @@ -1649,75 +2066,58 @@ } }, "node_modules/babel-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", - "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.3.0.tgz", + "integrity": "sha512-gRpauEU2KRrCox5Z296aeVHR4jQ98BCnu0IO332D/xpHNOsIH/bgSRk9k6GbKIbBw8vFeN6ctuu6tV8WOyVfYQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/transform": "^29.7.0", - "@types/babel__core": "^7.1.14", - "babel-plugin-istanbul": "^6.1.1", - "babel-preset-jest": "^29.6.3", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", + "@jest/transform": "30.3.0", + "@types/babel__core": "^7.20.5", + "babel-plugin-istanbul": "^7.0.1", + "babel-preset-jest": "30.3.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { - "@babel/core": "^7.8.0" + "@babel/core": "^7.11.0 || ^8.0.0-0" } }, "node_modules/babel-plugin-istanbul": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", - "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", "dev": true, "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^5.0.4", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", "test-exclude": "^6.0.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", - "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.12.3", - "@babel/parser": "^7.14.7", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" + "node": ">=12" } }, "node_modules/babel-plugin-jest-hoist": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", - "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.3.0.tgz", + "integrity": "sha512-+TRkByhsws6sfPjVaitzadk1I0F5sPvOVUH5tyTSzhePpsGIVrdeunHSw/C36QeocS95OOk8lunc4rlu5Anwsg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.3.3", - "@babel/types": "^7.3.3", - "@types/babel__core": "^7.1.14", - "@types/babel__traverse": "^7.0.6" + "@types/babel__core": "^7.20.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/babel-preset-current-node-syntax": { @@ -1748,20 +2148,20 @@ } }, "node_modules/babel-preset-jest": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", - "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.3.0.tgz", + "integrity": "sha512-6ZcUbWHC+dMz2vfzdNwi87Z1gQsLNK2uLuK1Q89R11xdvejcivlYYwDlEv0FHX3VwEXpbBQ9uufB/MUNpZGfhQ==", "dev": true, "license": "MIT", "dependencies": { - "babel-plugin-jest-hoist": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0" + "babel-plugin-jest-hoist": "30.3.0", + "babel-preset-current-node-syntax": "^1.2.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { - "@babel/core": "^7.0.0" + "@babel/core": "^7.11.0 || ^8.0.0-beta.1" } }, "node_modules/balanced-match": { @@ -1782,27 +2182,13 @@ } }, "node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", "dev": true, "license": "MIT", "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" + "balanced-match": "^1.0.0" } }, "node_modules/browserslist": { @@ -1938,9 +2324,9 @@ } }, "node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.4.0.tgz", + "integrity": "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==", "dev": true, "funding": [ { @@ -1954,9 +2340,9 @@ } }, "node_modules/cjs-module-lexer": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", - "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-2.2.0.tgz", + "integrity": "sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ==", "dev": true, "license": "MIT" }, @@ -1975,26 +2361,89 @@ "node": ">=12" } }, - "node_modules/co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "license": "MIT", "engines": { - "iojs": ">= 1.0.0", - "node": ">= 0.12.0" + "node": ">=8" } }, - "node_modules/collect-v8-coverage": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.3.tgz", - "integrity": "sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw==", + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true, "license": "MIT" }, - "node_modules/color-convert": { - "version": "2.0.1", + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.3.tgz", + "integrity": "sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, @@ -2021,9 +2470,9 @@ "license": "MIT" }, "node_modules/constructs": { - "version": "10.5.1", - "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.5.1.tgz", - "integrity": "sha512-f/TfFXiS3G/yVIXDjOQn9oTlyu9Wo7Fxyjj7lb8r92iO81jR2uST+9MstxZTmDGx/CgIbxCXkFXgupnLTNxQZg==", + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.6.0.tgz", + "integrity": "sha512-TxHOnBO5zMo/G76ykzGF/wMpEHu257TbWiIxP9K0Yv/+t70UzgBQiTqjkAsWOPC6jW91DzJI0+ehQV6xDRNBuQ==", "license": "Apache-2.0" }, "node_modules/convert-source-map": { @@ -2033,28 +2482,6 @@ "dev": true, "license": "MIT" }, - "node_modules/create-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", - "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "prompts": "^2.0.1" - }, - "bin": { - "create-jest": "bin/create-jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, "node_modules/create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", @@ -2096,9 +2523,9 @@ } }, "node_modules/dedent": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.0.tgz", - "integrity": "sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==", + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.2.tgz", + "integrity": "sha512-WzMx3mW98SN+zn3hgemf4OzdmyNhhhKz5Ay0pUfQiMQ3e1g+xmTJWp/pKdwKVXhdSkAEGIIzqeuWrL3mV/AXbA==", "dev": true, "license": "MIT", "peerDependencies": { @@ -2131,24 +2558,21 @@ } }, "node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.4.tgz", + "integrity": "sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==", "dev": true, "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" } }, - "node_modules/diff-sequences": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", - "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } + "license": "MIT" }, "node_modules/electron-to-chromium": { "version": "1.5.264", @@ -2171,9 +2595,9 @@ } }, "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true, "license": "MIT" }, @@ -2245,30 +2669,32 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/exit": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "node_modules/exit-x": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/exit-x/-/exit-x-0.2.2.tgz", + "integrity": "sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } }, "node_modules/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-30.3.0.tgz", + "integrity": "sha512-1zQrciTiQfRdo7qJM1uG4navm8DayFa2TgCSRlzUyNkhcJ6XUZF3hjnpkyr3VhAqPH7i/9GkG7Tv5abz6fqz0Q==", "dev": true, "license": "MIT", "dependencies": { - "@jest/expect-utils": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0" + "@jest/expect-utils": "30.3.0", + "@jest/get-type": "30.1.0", + "jest-matcher-utils": "30.3.0", + "jest-message-util": "30.3.0", + "jest-mock": "30.3.0", + "jest-util": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/fast-json-stable-stringify": { @@ -2288,19 +2714,6 @@ "bser": "2.1.1" } }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", @@ -2315,6 +2728,36 @@ "node": ">=8" } }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2323,9 +2766,9 @@ "license": "ISC" }, "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -2337,16 +2780,6 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", @@ -2391,22 +2824,22 @@ } }, "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, "license": "ISC", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" }, - "engines": { - "node": "*" + "bin": { + "glob": "dist/esm/bin.mjs" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -2451,19 +2884,6 @@ "node": ">=8" } }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -2537,22 +2957,6 @@ "dev": true, "license": "MIT" }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -2573,16 +2977,6 @@ "node": ">=6" } }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -2631,9 +3025,9 @@ } }, "node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -2659,15 +3053,15 @@ } }, "node_modules/istanbul-lib-source-maps": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", - "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", "dev": true, "license": "BSD-3-Clause", "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" + "istanbul-lib-coverage": "^3.0.0" }, "engines": { "node": ">=10" @@ -2687,23 +3081,39 @@ "node": ">=8" } }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, "node_modules/jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", - "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-30.3.0.tgz", + "integrity": "sha512-AkXIIFcaazymvey2i/+F94XRnM6TsVLZDhBMLsd1Sf/W0wzsvvpjeyUrCZD6HGG4SDYPgDJDBKeiJTBb10WzMg==", "dev": true, "license": "MIT", "dependencies": { - "@jest/core": "^29.7.0", - "@jest/types": "^29.6.3", - "import-local": "^3.0.2", - "jest-cli": "^29.7.0" + "@jest/core": "30.3.0", + "@jest/types": "30.3.0", + "import-local": "^3.2.0", + "jest-cli": "30.3.0" }, "bin": { "jest": "bin/jest.js" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -2715,76 +3125,75 @@ } }, "node_modules/jest-changed-files": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", - "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-30.3.0.tgz", + "integrity": "sha512-B/7Cny6cV5At6M25EWDgf9S617lHivamL8vl6KEpJqkStauzcG4e+WPfDgMMF+H4FVH4A2PLRyvgDJan4441QA==", "dev": true, "license": "MIT", "dependencies": { - "execa": "^5.0.0", - "jest-util": "^29.7.0", + "execa": "^5.1.1", + "jest-util": "30.3.0", "p-limit": "^3.1.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-circus": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", - "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-30.3.0.tgz", + "integrity": "sha512-PyXq5szeSfR/4f1lYqCmmQjh0vqDkURUYi9N6whnHjlRz4IUQfMcXkGLeEoiJtxtyPqgUaUUfyQlApXWBSN1RA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/environment": "30.3.0", + "@jest/expect": "30.3.0", + "@jest/test-result": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "chalk": "^4.0.0", + "chalk": "^4.1.2", "co": "^4.6.0", - "dedent": "^1.0.0", - "is-generator-fn": "^2.0.0", - "jest-each": "^29.7.0", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", + "dedent": "^1.6.0", + "is-generator-fn": "^2.1.0", + "jest-each": "30.3.0", + "jest-matcher-utils": "30.3.0", + "jest-message-util": "30.3.0", + "jest-runtime": "30.3.0", + "jest-snapshot": "30.3.0", + "jest-util": "30.3.0", "p-limit": "^3.1.0", - "pretty-format": "^29.7.0", - "pure-rand": "^6.0.0", + "pretty-format": "30.3.0", + "pure-rand": "^7.0.0", "slash": "^3.0.0", - "stack-utils": "^2.0.3" + "stack-utils": "^2.0.6" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-cli": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", - "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-30.3.0.tgz", + "integrity": "sha512-l6Tqx+j1fDXJEW5bqYykDQQ7mQg+9mhWXtnj+tQZrTWYHyHoi6Be8HPumDSA+UiX2/2buEgjA58iJzdj146uCw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/core": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "create-jest": "^29.7.0", - "exit": "^0.1.2", - "import-local": "^3.0.2", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "yargs": "^17.3.1" + "@jest/core": "30.3.0", + "@jest/test-result": "30.3.0", + "@jest/types": "30.3.0", + "chalk": "^4.1.2", + "exit-x": "^0.2.2", + "import-local": "^3.2.0", + "jest-config": "30.3.0", + "jest-util": "30.3.0", + "jest-validate": "30.3.0", + "yargs": "^17.7.2" }, "bin": { "jest": "bin/jest.js" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -2796,215 +3205,210 @@ } }, "node_modules/jest-config": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", - "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/test-sequencer": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-jest": "^29.7.0", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "deepmerge": "^4.2.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-circus": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "micromatch": "^4.0.4", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-30.3.0.tgz", + "integrity": "sha512-WPMAkMAtNDY9P/oKObtsRG/6KTrhtgPJoBTmk20uDn4Uy6/3EJnnaZJre/FMT1KVRx8cve1r7/FlMIOfRVWL4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/get-type": "30.1.0", + "@jest/pattern": "30.0.1", + "@jest/test-sequencer": "30.3.0", + "@jest/types": "30.3.0", + "babel-jest": "30.3.0", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "deepmerge": "^4.3.1", + "glob": "^10.5.0", + "graceful-fs": "^4.2.11", + "jest-circus": "30.3.0", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.3.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.3.0", + "jest-runner": "30.3.0", + "jest-util": "30.3.0", + "jest-validate": "30.3.0", "parse-json": "^5.2.0", - "pretty-format": "^29.7.0", + "pretty-format": "30.3.0", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "@types/node": "*", + "esbuild-register": ">=3.4.0", "ts-node": ">=9.0.0" }, "peerDependenciesMeta": { "@types/node": { "optional": true }, + "esbuild-register": { + "optional": true + }, "ts-node": { "optional": true } } }, "node_modules/jest-diff": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", - "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-30.3.0.tgz", + "integrity": "sha512-n3q4PDQjS4LrKxfWB3Z5KNk1XjXtZTBwQp71OP0Jo03Z6V60x++K5L8k6ZrW8MY8pOFylZvHM0zsjS1RqlHJZQ==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "diff-sequences": "^29.6.3", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" + "@jest/diff-sequences": "30.3.0", + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "pretty-format": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-docblock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", - "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-30.2.0.tgz", + "integrity": "sha512-tR/FFgZKS1CXluOQzZvNH3+0z9jXr3ldGSD8bhyuxvlVUwbeLOGynkunvlTMxchC5urrKndYiwCFC0DLVjpOCA==", "dev": true, "license": "MIT", "dependencies": { - "detect-newline": "^3.0.0" + "detect-newline": "^3.1.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-each": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", - "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-30.3.0.tgz", + "integrity": "sha512-V8eMndg/aZ+3LnCJgSm13IxS5XSBM22QSZc9BtPK8Dek6pm+hfUNfwBdvsB3d342bo1q7wnSkC38zjX259qZNA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", - "jest-util": "^29.7.0", - "pretty-format": "^29.7.0" + "@jest/get-type": "30.1.0", + "@jest/types": "30.3.0", + "chalk": "^4.1.2", + "jest-util": "30.3.0", + "pretty-format": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-environment-node": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", - "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-30.3.0.tgz", + "integrity": "sha512-4i6HItw/JSiJVsC5q0hnKIe/hbYfZLVG9YJ/0pU9Hz2n/9qZe3Rhn5s5CUZA5ORZlcdT/vmAXRMyONXJwPrmYQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/environment": "30.3.0", + "@jest/fake-timers": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" + "jest-mock": "30.3.0", + "jest-util": "30.3.0", + "jest-validate": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-get-type": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", - "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-haste-map": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", - "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.3.0.tgz", + "integrity": "sha512-mMi2oqG4KRU0R9QEtscl87JzMXfUhbKaFqOxmjb2CKcbHcUGFrJCBWHmnTiUqi6JcnzoBlO4rWfpdl2k/RfLCA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "@types/graceful-fs": "^4.1.3", + "@jest/types": "30.3.0", "@types/node": "*", - "anymatch": "^3.0.3", - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.2.9", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "micromatch": "^4.0.4", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.3.0", + "jest-worker": "30.3.0", + "picomatch": "^4.0.3", "walker": "^1.0.8" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "optionalDependencies": { - "fsevents": "^2.3.2" + "fsevents": "^2.3.3" } }, "node_modules/jest-leak-detector": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", - "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-30.3.0.tgz", + "integrity": "sha512-cuKmUUGIjfXZAiGJ7TbEMx0bcqNdPPI6P1V+7aF+m/FUJqFDxkFR4JqkTu8ZOiU5AaX/x0hZ20KaaIPXQzbMGQ==", "dev": true, "license": "MIT", "dependencies": { - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" + "@jest/get-type": "30.1.0", + "pretty-format": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-matcher-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", - "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-30.3.0.tgz", + "integrity": "sha512-HEtc9uFQgaUHkC7nLSlQL3Tph4Pjxt/yiPvkIrrDCt9jhoLIgxaubo1G+CFOnmHYMxHwwdaSN7mkIFs6ZK8OhA==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "jest-diff": "30.3.0", + "pretty-format": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-message-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", - "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-30.3.0.tgz", + "integrity": "sha512-Z/j4Bo+4ySJ+JPJN3b2Qbl9hDq3VrXmnjjGEWD/x0BCXeOXPTV1iZYYzl2X8c1MaCOL+ewMyNBcm88sboE6YWw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.12.13", - "@jest/types": "^29.6.3", - "@types/stack-utils": "^2.0.0", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", + "@babel/code-frame": "^7.27.1", + "@jest/types": "30.3.0", + "@types/stack-utils": "^2.0.3", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.3", + "pretty-format": "30.3.0", "slash": "^3.0.0", - "stack-utils": "^2.0.3" + "stack-utils": "^2.0.6" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-mock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", - "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-30.3.0.tgz", + "integrity": "sha512-OTzICK8CpE+t4ndhKrwlIdbM6Pn8j00lvmSmq5ejiO+KxukbLjgOflKWMn3KE34EZdQm5RqTuKj+5RIEniYhog==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", + "@jest/types": "30.3.0", "@types/node": "*", - "jest-util": "^29.7.0" + "jest-util": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-pnp-resolver": { @@ -3026,153 +3430,154 @@ } }, "node_modules/jest-regex-util": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", - "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", "dev": true, "license": "MIT", "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-resolve": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", - "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-30.3.0.tgz", + "integrity": "sha512-NRtTAHQlpd15F9rUR36jqwelbrDV/dY4vzNte3S2kxCKUJRYNd5/6nTSbYiak1VX5g8IoFF23Uj5TURkUW8O5g==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-pnp-resolver": "^1.2.2", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "resolve": "^1.20.0", - "resolve.exports": "^2.0.0", - "slash": "^3.0.0" + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.3.0", + "jest-pnp-resolver": "^1.2.3", + "jest-util": "30.3.0", + "jest-validate": "30.3.0", + "slash": "^3.0.0", + "unrs-resolver": "^1.7.11" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-resolve-dependencies": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", - "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-30.3.0.tgz", + "integrity": "sha512-9ev8s3YN6Hsyz9LV75XUwkCVFlwPbaFn6Wp75qnI0wzAINYWY8Fb3+6y59Rwd3QaS3kKXffHXsZMziMavfz/nw==", "dev": true, "license": "MIT", "dependencies": { - "jest-regex-util": "^29.6.3", - "jest-snapshot": "^29.7.0" + "jest-regex-util": "30.0.1", + "jest-snapshot": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-runner": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", - "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-30.3.0.tgz", + "integrity": "sha512-gDv6C9LGKWDPLia9TSzZwf4h3kMQCqyTpq+95PODnTRDO0g9os48XIYYkS6D236vjpBir2fF63YmJFtqkS5Duw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/environment": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/console": "30.3.0", + "@jest/environment": "30.3.0", + "@jest/test-result": "30.3.0", + "@jest/transform": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "chalk": "^4.0.0", + "chalk": "^4.1.2", "emittery": "^0.13.1", - "graceful-fs": "^4.2.9", - "jest-docblock": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-leak-detector": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-resolve": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-util": "^29.7.0", - "jest-watcher": "^29.7.0", - "jest-worker": "^29.7.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.3.0", + "jest-haste-map": "30.3.0", + "jest-leak-detector": "30.3.0", + "jest-message-util": "30.3.0", + "jest-resolve": "30.3.0", + "jest-runtime": "30.3.0", + "jest-util": "30.3.0", + "jest-watcher": "30.3.0", + "jest-worker": "30.3.0", "p-limit": "^3.1.0", "source-map-support": "0.5.13" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-runtime": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", - "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-30.3.0.tgz", + "integrity": "sha512-CgC+hIBJbuh78HEffkhNKcbXAytQViplcl8xupqeIWyKQF50kCQA8J7GeJCkjisC6hpnC9Muf8jV5RdtdFbGng==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/globals": "^29.7.0", - "@jest/source-map": "^29.6.3", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/environment": "30.3.0", + "@jest/fake-timers": "30.3.0", + "@jest/globals": "30.3.0", + "@jest/source-map": "30.0.1", + "@jest/test-result": "30.3.0", + "@jest/transform": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "chalk": "^4.0.0", - "cjs-module-lexer": "^1.0.0", - "collect-v8-coverage": "^1.0.0", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", + "chalk": "^4.1.2", + "cjs-module-lexer": "^2.1.0", + "collect-v8-coverage": "^1.0.2", + "glob": "^10.5.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.3.0", + "jest-message-util": "30.3.0", + "jest-mock": "30.3.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.3.0", + "jest-snapshot": "30.3.0", + "jest-util": "30.3.0", "slash": "^3.0.0", "strip-bom": "^4.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-snapshot": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", - "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@babel/generator": "^7.7.2", - "@babel/plugin-syntax-jsx": "^7.7.2", - "@babel/plugin-syntax-typescript": "^7.7.2", - "@babel/types": "^7.3.3", - "@jest/expect-utils": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0", - "chalk": "^4.0.0", - "expect": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "natural-compare": "^1.4.0", - "pretty-format": "^29.7.0", - "semver": "^7.5.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-30.3.0.tgz", + "integrity": "sha512-f14c7atpb4O2DeNhwcvS810Y63wEn8O1HqK/luJ4F6M4NjvxmAKQwBUWjbExUtMxWJQ0wVgmCKymeJK6NZMnfQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@babel/generator": "^7.27.5", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1", + "@babel/types": "^7.27.3", + "@jest/expect-utils": "30.3.0", + "@jest/get-type": "30.1.0", + "@jest/snapshot-utils": "30.3.0", + "@jest/transform": "30.3.0", + "@jest/types": "30.3.0", + "babel-preset-current-node-syntax": "^1.2.0", + "chalk": "^4.1.2", + "expect": "30.3.0", + "graceful-fs": "^4.2.11", + "jest-diff": "30.3.0", + "jest-matcher-utils": "30.3.0", + "jest-message-util": "30.3.0", + "jest-util": "30.3.0", + "pretty-format": "30.3.0", + "semver": "^7.7.2", + "synckit": "^0.11.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-snapshot/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -3183,39 +3588,39 @@ } }, "node_modules/jest-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", - "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.3.0.tgz", + "integrity": "sha512-/jZDa00a3Sz7rdyu55NLrQCIrbyIkbBxareejQI315f/i8HjYN+ZWsDLLpoQSiUIEIyZF/R8fDg3BmB8AtHttg==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", + "@jest/types": "30.3.0", "@types/node": "*", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "graceful-fs": "^4.2.9", - "picomatch": "^2.2.3" + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.3" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-validate": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", - "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-30.3.0.tgz", + "integrity": "sha512-I/xzC8h5G+SHCb2P2gWkJYrNiTbeL47KvKeW5EzplkyxzBRBw1ssSHlI/jXec0ukH2q7x2zAWQm7015iusg62Q==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "camelcase": "^6.2.0", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", + "@jest/get-type": "30.1.0", + "@jest/types": "30.3.0", + "camelcase": "^6.3.0", + "chalk": "^4.1.2", "leven": "^3.1.0", - "pretty-format": "^29.7.0" + "pretty-format": "30.3.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-validate/node_modules/camelcase": { @@ -3232,39 +3637,40 @@ } }, "node_modules/jest-watcher": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", - "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-30.3.0.tgz", + "integrity": "sha512-PJ1d9ThtTR8aMiBWUdcownq9mDdLXsQzJayTk4kmaBRHKvwNQn+ANveuhEBUyNI2hR1TVhvQ8D5kHubbzBHR/w==", "dev": true, "license": "MIT", "dependencies": { - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/test-result": "30.3.0", + "@jest/types": "30.3.0", "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", "emittery": "^0.13.1", - "jest-util": "^29.7.0", - "string-length": "^4.0.1" + "jest-util": "30.3.0", + "string-length": "^4.0.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-worker": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", - "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.3.0.tgz", + "integrity": "sha512-DrCKkaQwHexjRUFTmPzs7sHQe0TSj9nvDALKGdwmK5mW9v7j90BudWirKAJHt3QQ9Dhrg1F7DogPzhChppkJpQ==", "dev": true, "license": "MIT", "dependencies": { "@types/node": "*", - "jest-util": "^29.7.0", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.3.0", "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" + "supports-color": "^8.1.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-worker/node_modules/supports-color": { @@ -3337,16 +3743,6 @@ "node": ">=6" } }, - "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -3411,9 +3807,9 @@ } }, "node_modules/make-dir/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -3447,20 +3843,6 @@ "dev": true, "license": "MIT" }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -3472,16 +3854,19 @@ } }, "node_modules/minimatch": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", - "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^1.1.7" + "brace-expansion": "^2.0.2" }, "engines": { - "node": "*" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/minimist": { @@ -3494,6 +3879,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/minipass": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -3501,6 +3896,22 @@ "dev": true, "license": "MIT" }, + "node_modules/napi-postinstall": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.4.tgz", + "integrity": "sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==", + "dev": true, + "license": "MIT", + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -3633,6 +4044,13 @@ "node": ">=6" } }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -3682,12 +4100,29 @@ "node": ">=8" } }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", "dev": true, - "license": "MIT" + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" }, "node_modules/picocolors": { "version": "1.1.1", @@ -3697,13 +4132,13 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { - "node": ">=8.6" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/jonschlinkert" @@ -3733,18 +4168,18 @@ } }, "node_modules/pretty-format": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", - "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "version": "30.3.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-30.3.0.tgz", + "integrity": "sha512-oG4T3wCbfeuvljnyAzhBvpN45E8iOTXCU/TD3zXW80HA3dQ4ahdqMkWGiPWZvjpQwlbyHrPTWUAqUzGzv4l1JQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/schemas": "^29.6.3", - "ansi-styles": "^5.0.0", - "react-is": "^18.0.0" + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/pretty-format/node_modules/ansi-styles": { @@ -3760,24 +4195,10 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/prompts": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", - "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "kleur": "^3.0.3", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/pure-rand": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", - "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-7.0.1.tgz", + "integrity": "sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==", "dev": true, "funding": [ { @@ -3808,31 +4229,10 @@ "node": ">=0.10.0" } }, - "node_modules/resolve": { - "version": "1.22.11", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", - "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-cwd": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", - "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", "dev": true, "license": "MIT", "dependencies": { @@ -3852,16 +4252,6 @@ "node": ">=8" } }, - "node_modules/resolve.exports": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", - "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, "node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -3902,13 +4292,6 @@ "dev": true, "license": "ISC" }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true, - "license": "MIT" - }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -3974,7 +4357,49 @@ "node": ">=10" } }, + "node_modules/string-length/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-length/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", @@ -3989,7 +4414,54 @@ "node": ">=8" } }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-ansi": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz", + "integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.2.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", @@ -4002,6 +4474,16 @@ "node": ">=8" } }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", @@ -4048,17 +4530,20 @@ "node": ">=8" } }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "node_modules/synckit": { + "version": "0.11.12", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.12.tgz", + "integrity": "sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==", "dev": true, "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, "engines": { - "node": ">= 0.4" + "node": "^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://opencollective.com/synckit" } }, "node_modules/test-exclude": { @@ -4076,26 +4561,59 @@ "node": ">=8" } }, - "node_modules/tmpl": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", - "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, - "license": "BSD-3-Clause" + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, - "license": "MIT", + "license": "ISC", "dependencies": { - "is-number": "^7.0.0" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, "engines": { - "node": ">=8.0" + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, "node_modules/ts-jest": { "version": "29.4.6", "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.6.tgz", @@ -4219,6 +4737,14 @@ } } }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, "node_modules/type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -4271,12 +4797,47 @@ } }, "node_modules/undici-types": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", - "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "version": "7.18.2", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.18.2.tgz", + "integrity": "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==", "dev": true, "license": "MIT" }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, "node_modules/update-browserslist-db": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.2.tgz", @@ -4364,6 +4925,25 @@ "license": "MIT" }, "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", @@ -4381,6 +4961,64 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -4389,17 +5027,30 @@ "license": "ISC" }, "node_modules/write-file-atomic": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", - "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", "dev": true, "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.7" + "signal-exit": "^4.0.1" }, "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/y18n": { @@ -4448,6 +5099,51 @@ "node": ">=12" } }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/infrastructure/package.json b/infrastructure/package.json index e99cf857..b0ca4601 100644 --- a/infrastructure/package.json +++ b/infrastructure/package.json @@ -1,6 +1,6 @@ { "name": "infrastructure", - "version": "1.0.0-beta.19", + "version": "1.0.0-beta.20", "bin": { "infrastructure": "bin/infrastructure.js" }, @@ -11,16 +11,19 @@ "cdk": "cdk" }, "devDependencies": { - "@types/jest": "29.5.14", - "@types/node": "24.10.1", - "aws-cdk": "2.1113.0", - "jest": "29.7.0", + "@types/jest": "30.0.0", + "@types/node": "25.5.0", + "aws-cdk": "2.1115.0", + "jest": "30.3.0", "ts-jest": "29.4.6", "ts-node": "10.9.2", "typescript": "5.9.3" }, "dependencies": { - "aws-cdk-lib": "2.244.0", - "constructs": "10.5.1" + "aws-cdk-lib": "2.245.0", + "constructs": "10.6.0" + }, + "overrides": { + "picomatch": ">=2.3.2" } } diff --git a/infrastructure/test/helpers/mock-config.ts b/infrastructure/test/helpers/mock-config.ts index ce28bdd8..be50c71c 100644 --- a/infrastructure/test/helpers/mock-config.ts +++ b/infrastructure/test/helpers/mock-config.ts @@ -78,6 +78,7 @@ export function createMockConfig(overrides: Partial = {}): AppConfig }, fineTuning: { enabled: false, + defaultQuotaHours: 0, }, tags: { ManagedBy: 'CDK', Environment: 'test' }, }; diff --git a/infrastructure/test/rag-ingestion-stack.test.ts b/infrastructure/test/rag-ingestion-stack.test.ts index 51cea5f5..b5fa6d56 100644 --- a/infrastructure/test/rag-ingestion-stack.test.ts +++ b/infrastructure/test/rag-ingestion-stack.test.ts @@ -82,6 +82,7 @@ describe('RagIngestionStack', () => { }, fineTuning: { enabled: false, + defaultQuotaHours: 0, }, tags: { ManagedBy: 'CDK', diff --git a/infrastructure/test/sagemaker-fine-tuning-stack.test.ts b/infrastructure/test/sagemaker-fine-tuning-stack.test.ts index d12b14fb..50abe97b 100644 --- a/infrastructure/test/sagemaker-fine-tuning-stack.test.ts +++ b/infrastructure/test/sagemaker-fine-tuning-stack.test.ts @@ -17,7 +17,7 @@ describe('SageMakerFineTuningStack', () => { let template: Template; beforeEach(() => { - const config = createMockConfig({ fineTuning: { enabled: true } }); + const config = createMockConfig({ fineTuning: { enabled: true, defaultQuotaHours: 0 } }); app = new cdk.App(); mockSsmContext(app, config, ['SageMakerFineTuningStack']); @@ -440,7 +440,7 @@ describe('SageMakerFineTuningStack', () => { test('sets RETAIN removal policy when retainDataOnDelete is true', () => { const retainConfig = createMockConfig({ - fineTuning: { enabled: true }, + fineTuning: { enabled: true, defaultQuotaHours: 0 }, retainDataOnDelete: true, }); const retainApp = new cdk.App(); diff --git a/scripts/common/sync-version.sh b/scripts/common/sync-version.sh index 275b5d1b..166bbaa9 100755 --- a/scripts/common/sync-version.sh +++ b/scripts/common/sync-version.sh @@ -36,6 +36,7 @@ fi PYPROJECT="${REPO_ROOT}/backend/pyproject.toml" FE_PKG="${REPO_ROOT}/frontend/ai.client/package.json" INFRA_PKG="${REPO_ROOT}/infrastructure/package.json" +README="${REPO_ROOT}/README.md" CHECK_MODE=false if [ "${1:-}" = "--check" ]; then @@ -62,6 +63,8 @@ sync_or_check() { PY_VER=$(grep -oP '^version\s*=\s*"\K[^"]+' "${PYPROJECT}" || echo "") FE_VER=$(grep -oP '"version"\s*:\s*"\K[^"]+' "${FE_PKG}" | head -1 || echo "") INFRA_VER=$(grep -oP '"version"\s*:\s*"\K[^"]+' "${INFRA_PKG}" | head -1 || echo "") +README_BADGE_VER=$(grep -oP 'badge/Release-v\K[^-][^?]*(?=-)' "${README}" | head -1 | sed 's/--/-/g' || echo "") +README_CURRENT_VER=$(grep -oP '\*\*Current release:\*\* v\K.*' "${README}" | tr -d '[:space:]' || echo "") # uv.lock uses PEP 440 format (e.g., 1.0.0b16 instead of 1.0.0-beta.16) UV_LOCK="${REPO_ROOT}/backend/uv.lock" @@ -77,6 +80,8 @@ if [ "${CHECK_MODE}" = true ]; then sync_or_check "${PYPROJECT}" "${PY_VER}" "backend/pyproject.toml" sync_or_check "${FE_PKG}" "${FE_VER}" "frontend/ai.client/package.json" sync_or_check "${INFRA_PKG}" "${INFRA_VER}" "infrastructure/package.json" + sync_or_check "${README}" "${README_BADGE_VER}" "README.md (badge)" + sync_or_check "${README}" "${README_CURRENT_VER}" "README.md (current release)" if [ -f "${UV_LOCK}" ]; then sync_or_check "${UV_LOCK}" "${UV_LOCK_VER}" "backend/uv.lock" "${PEP440_VERSION}" fi @@ -103,6 +108,13 @@ echo -e "${GREEN}[UPDATED]${NC} frontend/ai.client/package.json" sed -i "0,/\"version\": \"[^\"]*\"/s/\"version\": \"[^\"]*\"/\"version\": \"${VERSION}\"/" "${INFRA_PKG}" echo -e "${GREEN}[UPDATED]${NC} infrastructure/package.json" +# README.md: version badge and "Current release" text +# shields.io uses -- for literal hyphens in badge text +BADGE_VERSION=$(echo "${VERSION}" | sed 's/-/--/g') +sed -i "s|badge/Release-v[^?]*|badge/Release-v${BADGE_VERSION}-6366f1|" "${README}" +sed -i "s|\*\*Current release:\*\* v.*|\*\*Current release:\*\* v${VERSION}|" "${README}" +echo -e "${GREEN}[UPDATED]${NC} README.md (badge + current release)" + # Regenerate lockfiles so they reflect the new version echo -e "\nRegenerating lockfiles..." diff --git a/scripts/stack-rag-ingestion/deploy.sh b/scripts/stack-rag-ingestion/deploy.sh index 9dd87c6c..a508bb2e 100644 --- a/scripts/stack-rag-ingestion/deploy.sh +++ b/scripts/stack-rag-ingestion/deploy.sh @@ -93,6 +93,7 @@ main() { # Construct ECR repository URI REPO_NAME="${CDK_PROJECT_PREFIX}-rag-ingestion" ECR_URI="${CDK_AWS_ACCOUNT}.dkr.ecr.${CDK_AWS_REGION}.amazonaws.com/${REPO_NAME}" + FUNCTION_NAME="${CDK_PROJECT_PREFIX}-rag-ingestion" log_info "ECR Repository URI: ${ECR_URI}" @@ -105,13 +106,27 @@ main() { log_info "Using pre-built image with version tag: ${IMAGE_TAG}" log_info "Image URI: ${ECR_URI}:${IMAGE_TAG}" + # Force Lambda to use the latest image digest. + # CDK uses an SSM-resolved image tag (semver) which doesn't change between + # builds, so CloudFormation often reports "no changes" even when the + # underlying image has new layers. This explicit update-function-code call + # ensures the Lambda always picks up the freshly-pushed image. + log_info "Updating Lambda function code to latest image..." + aws lambda update-function-code \ + --function-name "${FUNCTION_NAME}" \ + --image-uri "${ECR_URI}:${IMAGE_TAG}" \ + --region "${CDK_AWS_REGION}" \ + --output json > /dev/null + + # Wait for the update to complete before declaring success + log_info "Waiting for Lambda function update to complete..." + aws lambda wait function-updated \ + --function-name "${FUNCTION_NAME}" \ + --region "${CDK_AWS_REGION}" + + log_success "Lambda function updated to image ${ECR_URI}:${IMAGE_TAG}" + log_success "RAG Ingestion deployment completed successfully!" - log_info "" - log_info "Next steps:" - log_info " 1. Check Lambda function status in AWS Console" - log_info " 2. Monitor CloudWatch Logs for Lambda execution" - log_info " 3. Test document upload to S3 bucket" - log_info " 4. Verify embeddings are stored in vector store" } main "$@"