diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..1ad83f66 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,7 @@ +# Default code owner for all files +* @thepagent + +# Explicitly protect the CODEOWNERS file itself. While the wildcard above +# already covers it, this ensures ownership is preserved even if more +# specific patterns are added later (CODEOWNERS uses last-match-wins). +/.github/CODEOWNERS @thepagent diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 00000000..bc8fc2e1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,22 @@ +name: Bug Report +description: Report a bug +labels: [bug, needs-triage] +body: + - type: textarea + attributes: + label: Description + description: What happened? + validations: + required: true + - type: textarea + attributes: + label: Steps to Reproduce + description: How can we reproduce this? + validations: + required: true + - type: textarea + attributes: + label: Expected Behavior + description: What did you expect to happen? + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..3ba13e0c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 00000000..2b873e1a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,16 @@ +name: Feature Request +description: Suggest a new feature +labels: [feature, needs-triage] +body: + - type: textarea + attributes: + label: Description + description: What feature would you like? + validations: + required: true + - type: textarea + attributes: + label: Use Case + description: Why do you need this? + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/guidance.yml b/.github/ISSUE_TEMPLATE/guidance.yml new file mode 100644 index 00000000..4ba921fd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/guidance.yml @@ -0,0 +1,10 @@ +name: Guidance +description: Ask a question or request guidance +labels: [guidance, needs-triage] +body: + - type: textarea + attributes: + label: Question + description: What do you need help with? + validations: + required: true diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..d4ab1a57 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,67 @@ +# GitHub Copilot Code Review Instructions + +## Review Philosophy +- Only comment when you have HIGH CONFIDENCE (>80%) that an issue exists +- Be concise: one sentence per comment when possible +- Focus on actionable feedback, not observations +- Silence is preferred over noisy false positives + +## Project Context +- **OpenAB**: A lightweight ACP (Agent Client Protocol) harness bridging Discord ↔ any ACP-compatible coding CLI over stdio JSON-RPC +- **Language**: Rust 2021 edition, single binary +- **Async runtime**: tokio (full features) +- **Discord**: serenity 0.12 (gateway + cache) +- **Error handling**: `anyhow::Result` everywhere, no `unwrap()` in production paths +- **Serialization**: serde + serde_json for ACP JSON-RPC, toml for config +- **Key modules**: `acp/connection.rs` (ACP stdio bridge), `acp/pool.rs` (session pool), `discord.rs` (Discord event handler), `config.rs` (TOML config), `usage.rs` (pluggable quota runners), `reactions.rs` (emoji reactions), `stt.rs` (speech-to-text) + +## Priority Areas (Review These) + +### Correctness +- Logic errors that could cause panics or incorrect behavior +- ACP JSON-RPC protocol violations (wrong method names, missing fields, incorrect response routing) +- Race conditions in async code (especially in the reader loop and session pool) +- Resource leaks (child processes not killed, channels not closed) +- Off-by-one in timeout calculations +- Incorrect error propagation — `unwrap()` in non-test code is always a bug + +### Concurrency & Safety +- Multiple atomic fields updated independently — document if readers may see mixed snapshots +- `Mutex` held across `.await` points (potential deadlock) +- Session pool lock scope — `RwLock` held during I/O can stall all sessions +- Child process lifecycle — `kill_on_drop` must be set, zombie processes must not accumulate + +### ACP Protocol +- `session/request_permission` must always get a response (auto-allow or forwarded) +- `session/update` notifications must not be consumed — forward to subscriber after capture +- `usage_update`, `available_commands_update`, `tool_call`, `agent_message_chunk` must be classified correctly +- Timeout values: initialize=90s, session/new=120s, others=30s (Gemini cold-start is slow) + +### Discord API +- Messages >2000 chars will be rejected — truncate or split +- Slash command registration is per-guild, max 100 per bot +- Autocomplete responses must return within 3s (no heavy I/O) +- Ephemeral messages for errors, regular messages for results + +### Config & Deployment +- `config.toml` fields must have sensible defaults — missing `[usage]` section should not crash +- Environment variable expansion via `${VAR}` must handle missing vars gracefully +- Agent `env` map is passed to child processes — sensitive values should not be logged + +## CI Pipeline (Do Not Flag These) +- `cargo fmt --check` — formatting is enforced by CI +- `cargo clippy --all-targets -- -D warnings` — lint warnings are enforced by CI +- `cargo test` — test failures are caught by CI + +## Skip These (Low Value) +- Style/formatting — CI handles via rustfmt +- Clippy warnings — CI handles +- Minor naming suggestions unless truly confusing +- Suggestions to add comments for self-documenting code +- Logging level suggestions unless security-relevant +- Import ordering + +## Response Format +1. State the problem (1 sentence) +2. Why it matters (1 sentence, only if not obvious) +3. Suggested fix (code snippet or specific action) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9ed390ba..51301bfd 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,26 +2,17 @@ name: Build & Release on: push: - branches: - - main - paths: - - "src/**" - - "Cargo.toml" - - "Cargo.lock" - - "Dockerfile" + tags: + - "v*" workflow_dispatch: inputs: - chart_bump: - description: 'Chart version bump type' + tag: + description: 'Version tag (e.g. v0.7.0-beta.1 or v0.7.0)' required: true - type: choice - options: - - patch - - minor - - major - default: patch + type: string + default: 'v' dry_run: - description: 'Dry run (show changes without committing)' + description: 'Dry run (build only, no push)' required: false type: boolean default: false @@ -31,20 +22,66 @@ env: IMAGE_NAME: ${{ github.repository }} jobs: - build-image: + resolve-tag: runs-on: ubuntu-latest + outputs: + tag: ${{ steps.resolve.outputs.tag }} + chart_version: ${{ steps.resolve.outputs.chart_version }} + is_prerelease: ${{ steps.resolve.outputs.is_prerelease }} + steps: + - name: Resolve and validate tag + id: resolve + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + TAG="${{ inputs.tag }}" + else + TAG="${GITHUB_REF_NAME}" + fi + + # Validate tag format + if [[ ! "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+ ]]; then + echo "::error::Invalid tag format '${TAG}'. Expected v{major}.{minor}.{patch}[-prerelease]" + exit 1 + fi + + CHART_VERSION="${TAG#v}" + + # Pre-release if version contains '-' (e.g. 0.7.0-beta.1) + if [[ "$CHART_VERSION" == *-* ]]; then + IS_PRERELEASE="true" + else + IS_PRERELEASE="false" + fi + + echo "tag=${TAG}" >> "$GITHUB_OUTPUT" + echo "chart_version=${CHART_VERSION}" >> "$GITHUB_OUTPUT" + echo "is_prerelease=${IS_PRERELEASE}" >> "$GITHUB_OUTPUT" + + # ── Pre-release path: full build ────────────────────────────── + + build-image: + needs: resolve-tag + if: ${{ needs.resolve-tag.outputs.is_prerelease == 'true' }} + strategy: + matrix: + variant: + - { suffix: "", dockerfile: "Dockerfile", artifact: "default" } + - { suffix: "-codex", dockerfile: "Dockerfile.codex", artifact: "codex" } + - { suffix: "-claude", dockerfile: "Dockerfile.claude", artifact: "claude" } + - { suffix: "-gemini", dockerfile: "Dockerfile.gemini", artifact: "gemini" } + platform: + - { os: linux/amd64, runner: ubuntu-latest } + - { os: linux/arm64, runner: ubuntu-24.04-arm } + runs-on: ${{ matrix.platform.runner }} permissions: contents: read packages: write - outputs: - version: ${{ steps.meta.outputs.version }} - digest: ${{ steps.build.outputs.digest }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: docker/setup-buildx-action@v3 - - uses: docker/login-action@v3 + - uses: docker/login-action@v4 with: registry: ${{ env.REGISTRY }} username: ${{ github.repository_owner }} @@ -52,87 +89,172 @@ jobs: - name: Docker metadata id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@v6 with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=sha,prefix= - type=raw,value=latest + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}${{ matrix.variant.suffix }} - - name: Build and push + - name: Build and push by digest id: build uses: docker/build-push-action@v6 with: context: . - platforms: linux/amd64,linux/arm64 - push: ${{ inputs.dry_run != true }} - tags: ${{ steps.meta.outputs.tags }} - cache-from: type=gha - cache-to: type=gha,mode=max - - bump-chart: - needs: build-image - if: inputs.dry_run != true + file: ${{ matrix.variant.dockerfile }} + platforms: ${{ matrix.platform.os }} + outputs: type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}${{ matrix.variant.suffix }},push-by-digest=true,name-canonical=true,push=${{ inputs.dry_run != true }} + cache-from: type=gha,scope=${{ matrix.variant.suffix }}-${{ matrix.platform.os }} + cache-to: type=gha,scope=${{ matrix.variant.suffix }}-${{ matrix.platform.os }},mode=max + + - name: Export digest + if: inputs.dry_run != true + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + if: inputs.dry_run != true + uses: actions/upload-artifact@v4 + with: + name: digests-${{ matrix.variant.artifact }}-${{ matrix.platform.runner }} + path: /tmp/digests/* + retention-days: 1 + + merge-manifests: + needs: [resolve-tag, build-image] + if: ${{ inputs.dry_run != true && needs.resolve-tag.outputs.is_prerelease == 'true' }} + strategy: + matrix: + variant: + - { suffix: "", artifact: "default" } + - { suffix: "-codex", artifact: "codex" } + - { suffix: "-claude", artifact: "claude" } + - { suffix: "-gemini", artifact: "gemini" } runs-on: ubuntu-latest permissions: - contents: write + contents: read packages: write - pages: write steps: - - name: Generate App token - id: app-token - uses: actions/create-github-app-token@v1 + - name: Download digests + uses: actions/download-artifact@v4 with: - app-id: ${{ secrets.APP_ID }} - private-key: ${{ secrets.APP_PRIVATE_KEY }} + path: /tmp/digests + pattern: digests-${{ matrix.variant.artifact }}-* + merge-multiple: true + + - uses: docker/setup-buildx-action@v3 - - uses: actions/checkout@v4 + - uses: docker/login-action@v4 with: - fetch-depth: 0 + registry: ${{ env.REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker metadata + id: meta + uses: docker/metadata-action@v6 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}${{ matrix.variant.suffix }} + tags: | + type=sha,prefix= + type=semver,pattern={{version}},value=${{ needs.resolve-tag.outputs.tag }} - - name: Get current chart version - id: current + - name: Create manifest list + working-directory: /tmp/digests run: | - chart_version=$(grep '^version:' charts/agent-broker/Chart.yaml | awk '{print $2}') - echo "chart_version=$chart_version" >> "$GITHUB_OUTPUT" + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}${{ matrix.variant.suffix }}@sha256:%s ' *) + + # ── Stable path: promote pre-release image (no rebuild) ────── - - name: Bump chart version - id: bump + promote-stable: + needs: resolve-tag + if: ${{ inputs.dry_run != true && needs.resolve-tag.outputs.is_prerelease == 'false' }} + strategy: + matrix: + variant: + - { suffix: "" } + - { suffix: "-codex" } + - { suffix: "-claude" } + - { suffix: "-gemini" } + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - uses: docker/setup-buildx-action@v3 + + - uses: docker/login-action@v4 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Find pre-release image + id: find-prerelease run: | - current="${{ steps.current.outputs.chart_version }}" - IFS='.' read -r major minor patch <<< "$current" - bump_type="${{ inputs.chart_bump }}" - bump_type="${bump_type:-patch}" - case "$bump_type" in - major) major=$((major + 1)); minor=0; patch=0 ;; - minor) minor=$((minor + 1)); patch=0 ;; - patch) patch=$((patch + 1)) ;; - esac - new_version="${major}.${minor}.${patch}" - echo "new_version=$new_version" >> "$GITHUB_OUTPUT" - - - name: Update Chart.yaml and values.yaml + CHART_VERSION="${{ needs.resolve-tag.outputs.chart_version }}" + # Find latest pre-release tag matching this version (e.g. v0.7.0-beta.1) + PRERELEASE_TAG=$(git tag -l "v${CHART_VERSION}-*" --sort=-v:refname | head -1) + if [ -z "$PRERELEASE_TAG" ]; then + echo "::error::No pre-release tag found for v${CHART_VERSION}-*. Run a pre-release build first." + exit 1 + fi + PRERELEASE_VERSION="${PRERELEASE_TAG#v}" + echo "Found pre-release: ${PRERELEASE_TAG} (${PRERELEASE_VERSION})" + echo "prerelease_version=${PRERELEASE_VERSION}" >> "$GITHUB_OUTPUT" + + - name: Verify pre-release image exists run: | - SHORT_SHA="${{ github.sha }}" - SHORT_SHA="${SHORT_SHA:0:7}" - sed -i "s/^version: .*/version: ${{ steps.bump.outputs.new_version }}/" charts/agent-broker/Chart.yaml - sed -i "s/^appVersion: .*/appVersion: \"${SHORT_SHA}\"/" charts/agent-broker/Chart.yaml - sed -i "s|repository: .*|repository: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}|" charts/agent-broker/values.yaml - sed -i "s/tag: .*/tag: \"${SHORT_SHA}\"/" charts/agent-broker/values.yaml - - - name: Commit and push - env: - GH_TOKEN: ${{ steps.app-token.outputs.token }} + IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}${{ matrix.variant.suffix }}" + PRERELEASE_VERSION="${{ steps.find-prerelease.outputs.prerelease_version }}" + echo "Checking ${IMAGE}:${PRERELEASE_VERSION} ..." + docker buildx imagetools inspect "${IMAGE}:${PRERELEASE_VERSION}" || \ + { echo "::error::Image ${IMAGE}:${PRERELEASE_VERSION} not found — build the pre-release first"; exit 1; } + + - name: Promote to stable tags run: | - git config user.name "thepagent" - git config user.email "thepagent@users.noreply.github.com" - git remote set-url origin https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }} - git add charts/agent-broker/Chart.yaml charts/agent-broker/values.yaml - git commit -m "chore: release chart ${{ steps.bump.outputs.new_version }}" - git push - - - name: Trigger chart release - env: - GH_TOKEN: ${{ steps.app-token.outputs.token }} + IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}${{ matrix.variant.suffix }}" + PRERELEASE_VERSION="${{ steps.find-prerelease.outputs.prerelease_version }}" + CHART_VERSION="${{ needs.resolve-tag.outputs.chart_version }}" + MAJOR_MINOR="${CHART_VERSION%.*}" + + echo "Promoting ${IMAGE}:${PRERELEASE_VERSION} → ${CHART_VERSION}, ${MAJOR_MINOR}, latest" + docker buildx imagetools create \ + -t "${IMAGE}:${CHART_VERSION}" \ + -t "${IMAGE}:${MAJOR_MINOR}" \ + -t "${IMAGE}:latest" \ + "${IMAGE}:${PRERELEASE_VERSION}" + + # ── Chart release (runs after either path) ─────────────────── + + release-chart: + needs: [resolve-tag, merge-manifests, promote-stable] + if: >- + ${{ always() && inputs.dry_run != true && + needs.resolve-tag.result == 'success' && + (needs.merge-manifests.result == 'success' || needs.promote-stable.result == 'success') }} + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v6 + + - name: Install Helm + uses: azure/setup-helm@v4 + + - uses: docker/login-action@v4 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push chart to OCI run: | - gh workflow run release.yml --repo ${{ github.repository }} + CHART_VERSION="${{ needs.resolve-tag.outputs.chart_version }}" + helm package charts/openab + helm push openab-${CHART_VERSION}.tgz oci://ghcr.io/${{ github.repository_owner }}/charts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..4239edd9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,33 @@ +name: CI + +on: + pull_request: + paths: + - "src/**" + - "Cargo.toml" + - "Cargo.lock" + - "Dockerfile*" + +env: + CARGO_TERM_COLOR: always + +jobs: + check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy + + - uses: Swatinem/rust-cache@v2 + + - name: cargo check + run: cargo check + + - name: cargo clippy + run: cargo clippy -- -D warnings + + - name: cargo test + run: cargo test diff --git a/.github/workflows/issue-triage.yml b/.github/workflows/issue-triage.yml new file mode 100644 index 00000000..bfcd524f --- /dev/null +++ b/.github/workflows/issue-triage.yml @@ -0,0 +1,20 @@ +name: Issue Triage +on: + issues: + types: [opened] + workflow_dispatch: + inputs: + issue_number: + description: "Issue number to add needs-triage label" + required: true + type: number +jobs: + add-label: + runs-on: ubuntu-latest + permissions: + issues: write + steps: + - run: gh issue edit "$ISSUE_NUMBER" --add-label needs-triage --repo ${{ github.repository }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBER: ${{ github.event.issue.number || github.event.inputs.issue_number }} diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml new file mode 100644 index 00000000..b87d63f4 --- /dev/null +++ b/.github/workflows/release-pr.yml @@ -0,0 +1,87 @@ +name: Release PR + +on: + workflow_dispatch: + inputs: + version: + description: "Version (leave empty for auto bump, or specify e.g. 0.8.0-beta.1)" + required: false + type: string + bump: + description: "Auto bump type (ignored when version is specified)" + required: false + type: choice + options: + - patch + - minor + - major + default: patch + +jobs: + create-release-pr: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + steps: + - name: Generate App token + id: app-token + uses: actions/create-github-app-token@v3 + with: + client-id: ${{ secrets.APP_ID }} + private-key: ${{ secrets.APP_PRIVATE_KEY }} + + - uses: actions/checkout@v6 + with: + token: ${{ steps.app-token.outputs.token }} + fetch-depth: 0 + + - name: Resolve version + id: version + run: | + if [ -n "${{ inputs.version }}" ]; then + VERSION="${{ inputs.version }}" + else + CURRENT=$(grep '^version:' charts/openab/Chart.yaml | awk '{print $2}') + BASE="${CURRENT%%-*}" + IFS='.' read -r major minor patch <<< "$BASE" + case "${{ inputs.bump }}" in + major) major=$((major + 1)); minor=0; patch=0 ;; + minor) minor=$((minor + 1)); patch=0 ;; + patch) patch=$((patch + 1)) ;; + esac + VERSION="${major}.${minor}.${patch}-beta.1" + fi + echo "version=${VERSION}" >> "$GITHUB_OUTPUT" + echo "::notice::Release version: ${VERSION}" + + # Determine stable version (strip pre-release suffix) + STABLE="${VERSION%%-*}" + echo "stable=${STABLE}" >> "$GITHUB_OUTPUT" + + - name: Update version files + run: | + VERSION="${{ steps.version.outputs.version }}" + STABLE="${{ steps.version.outputs.stable }}" + # Chart.yaml always gets the full version (beta or stable) + sed -i "s/^version: .*/version: ${VERSION}/" charts/openab/Chart.yaml + sed -i "s/^appVersion: .*/appVersion: \"${VERSION}\"/" charts/openab/Chart.yaml + # Cargo.toml only gets stable version (main stays clean) + sed -i "s/^version = .*/version = \"${STABLE}\"/" Cargo.toml + + - name: Create release PR + env: + GH_TOKEN: ${{ steps.app-token.outputs.token }} + run: | + VERSION="${{ steps.version.outputs.version }}" + BRANCH="release/v${VERSION}" + git config user.name "openab-app[bot]" + git config user.email "274185012+openab-app[bot]@users.noreply.github.com" + git checkout -b "$BRANCH" + git add -A + git commit -m "release: v${VERSION}" + git push origin "$BRANCH" + gh pr create \ + --title "release: v${VERSION}" \ + --body "Merge this PR to tag \`v${VERSION}\` and trigger the build pipeline." \ + --base main --head "$BRANCH" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index abe26f17..c2be4c13 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,7 +6,7 @@ on: branches: - main paths: - - "charts/agent-broker/Chart.yaml" + - "charts/openab/Chart.yaml" jobs: release: @@ -14,10 +14,9 @@ jobs: permissions: contents: write pages: write - packages: write steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 @@ -29,13 +28,6 @@ jobs: - name: Install Helm uses: azure/setup-helm@v4 - - name: Login to GHCR - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Run chart-releaser uses: helm/chart-releaser-action@v1.6.0 with: @@ -43,21 +35,13 @@ jobs: env: CR_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Push chart to OCI registry - run: | - CHART=charts/agent-broker - NAME=$(grep '^name:' ${CHART}/Chart.yaml | awk '{print $2}') - VERSION=$(grep '^version:' ${CHART}/Chart.yaml | awk '{print $2}') - helm package ${CHART} - helm push ${NAME}-${VERSION}.tgz oci://ghcr.io/${{ github.repository_owner }} - - - name: Append OCI install instructions to release notes + - name: Append install instructions to release notes env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | OWNER="${{ github.repository_owner }}" REPO="${{ github.event.repository.name }}" - CHART=charts/agent-broker + CHART=charts/openab NAME=$(grep '^name:' ${CHART}/Chart.yaml | awk '{print $2}') VERSION=$(grep '^version:' ${CHART}/Chart.yaml | awk '{print $2}') APP_VERSION=$(grep '^appVersion:' ${CHART}/Chart.yaml | awk '{print $2}' | tr -d '"') @@ -78,14 +62,14 @@ jobs: ### Helm Repository (GitHub Pages) \`\`\`bash - helm repo add agent-broker https://${OWNER}.github.io/${REPO} + helm repo add openab https://${OWNER}.github.io/${REPO} helm repo update - helm install agent-broker agent-broker/agent-broker --version ${VERSION} + helm install openab openab/openab --version ${VERSION} \`\`\` ### OCI Registry \`\`\`bash - helm install agent-broker oci://ghcr.io/${OWNER}/agent-broker --version ${VERSION} + helm install openab oci://ghcr.io/${OWNER}/charts/openab --version ${VERSION} \`\`\` EOF diff --git a/.github/workflows/tag-on-merge.yml b/.github/workflows/tag-on-merge.yml new file mode 100644 index 00000000..e414d933 --- /dev/null +++ b/.github/workflows/tag-on-merge.yml @@ -0,0 +1,38 @@ +name: Tag on Release PR merge + +on: + pull_request: + types: [closed] + branches: [main] + +jobs: + tag: + if: github.event.pull_request.merged == true && startsWith(github.event.pull_request.head.ref, 'release/') + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Generate App token + id: app-token + uses: actions/create-github-app-token@v3 + with: + client-id: ${{ secrets.APP_ID }} + private-key: ${{ secrets.APP_PRIVATE_KEY }} + + - uses: actions/checkout@v6 + with: + token: ${{ steps.app-token.outputs.token }} + + - name: Create and push tag + run: | + # release/v0.8.0-beta.1 → v0.8.0-beta.1 + VERSION="${GITHUB_HEAD_REF#release/}" + if [[ ! "$VERSION" =~ ^v[0-9]+\.[0-9]+\.[0-9]+ ]]; then + echo "::error::Invalid version format '${VERSION}'. Expected v{major}.{minor}.{patch}[-prerelease]" + exit 1 + fi + git config user.name "openab-app[bot]" + git config user.email "274185012+openab-app[bot]@users.noreply.github.com" + git tag "$VERSION" + git push origin "$VERSION" + echo "::notice::Tagged ${VERSION}" diff --git a/Cargo.lock b/Cargo.lock index ea342114..7c98b754 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,23 +8,6 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" -[[package]] -name = "agent-broker" -version = "0.1.0" -dependencies = [ - "anyhow", - "rand 0.8.5", - "regex", - "serde", - "serde_json", - "serenity", - "tokio", - "toml", - "tracing", - "tracing-subscriber", - "uuid", -] - [[package]] name = "aho-corasick" version = "1.1.4" @@ -66,6 +49,12 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + [[package]] name = "base64" version = "0.22.1" @@ -93,12 +82,24 @@ version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" +[[package]] +name = "bytemuck" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" + [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +[[package]] +name = "byteorder-lite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" + [[package]] name = "bytes" version = "1.11.1" @@ -107,9 +108,9 @@ checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" [[package]] name = "cc" -version = "1.2.57" +version = "1.2.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a0dd1ca384932ff3641c8718a02769f1698e7563dc6974ffd03346116310423" +checksum = "43c5703da9466b66a946814e1adf53ea2c90f10063b86290cc9eb67ce3478a20" dependencies = [ "find-msvc-tools", "shlex", @@ -127,6 +128,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + [[package]] name = "cpufeatures" version = "0.2.17" @@ -222,6 +229,15 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + [[package]] name = "find-msvc-tools" version = "0.1.9" @@ -379,6 +395,16 @@ dependencies = [ "wasip3", ] +[[package]] +name = "gif" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee8cfcc411d9adbbaba82fb72661cc1bcca13e8bba98b364e62b2dba8f960159" +dependencies = [ + "color_quant", + "weezl", +] + [[package]] name = "hashbrown" version = "0.14.5" @@ -396,9 +422,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.16.1" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51" [[package]] name = "heck" @@ -447,9 +473,9 @@ checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "hyper" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca" dependencies = [ "atomic-waker", "bytes", @@ -460,7 +486,6 @@ dependencies = [ "httparse", "itoa", "pin-project-lite", - "pin-utils", "smallvec", "tokio", "want", @@ -508,12 +533,13 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c" dependencies = [ "displaydoc", "potential_utf", + "utf8_iter", "yoke", "zerofrom", "zerovec", @@ -521,9 +547,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29" dependencies = [ "displaydoc", "litemap", @@ -534,9 +560,9 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4" dependencies = [ "icu_collections", "icu_normalizer_data", @@ -548,15 +574,15 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" +checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38" [[package]] name = "icu_properties" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de" dependencies = [ "icu_collections", "icu_locale_core", @@ -568,15 +594,15 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" +checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14" [[package]] name = "icu_provider" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421" dependencies = [ "displaydoc", "icu_locale_core", @@ -614,14 +640,42 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "image" +version = "0.25.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85ab80394333c02fe689eaf900ab500fbd0c2213da414687ebf995a65d5a6104" +dependencies = [ + "bytemuck", + "byteorder-lite", + "color_quant", + "gif", + "image-webp", + "moxcms", + "num-traits", + "png", + "zune-core", + "zune-jpeg", +] + +[[package]] +name = "image-webp" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" +dependencies = [ + "byteorder-lite", + "quick-error", +] + [[package]] name = "indexmap" -version = "2.13.0" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9" dependencies = [ "equivalent", - "hashbrown 0.16.1", + "hashbrown 0.17.0", "serde", "serde_core", ] @@ -634,9 +688,9 @@ checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" [[package]] name = "iri-string" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8e7418f59cc01c88316161279a7f665217ae316b388e58a0d10e29f54f1e5eb" +checksum = "25e659a4bb38e810ebc252e53b5814ff908a8c58c2a9ce2fae1bbec24cbf4e20" dependencies = [ "memchr", "serde", @@ -650,10 +704,12 @@ checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" [[package]] name = "js-sys" -version = "0.3.91" +version = "0.3.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" +checksum = "2964e92d1d9dc3364cae4d718d93f227e3abb088e747d92e0395bfdedf1c12ca" dependencies = [ + "cfg-if", + "futures-util", "once_cell", "wasm-bindgen", ] @@ -672,15 +728,15 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "libc" -version = "0.2.183" +version = "0.2.184" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" +checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af" [[package]] name = "litemap" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" +checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0" [[package]] name = "lock_api" @@ -746,15 +802,25 @@ dependencies = [ [[package]] name = "mio" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1" dependencies = [ "libc", "wasi", "windows-sys 0.61.2", ] +[[package]] +name = "moxcms" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb85c154ba489f01b25c0d36ae69a87e4a1c73a72631fc6c0eb6dde34a73e44b" +dependencies = [ + "num-traits", + "pxfm", +] + [[package]] name = "nu-ansi-term" version = "0.50.3" @@ -766,9 +832,18 @@ dependencies = [ [[package]] name = "num-conv" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" +checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] [[package]] name = "once_cell" @@ -776,6 +851,26 @@ version = "1.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" +[[package]] +name = "openab" +version = "0.6.6" +dependencies = [ + "anyhow", + "base64", + "image", + "rand 0.8.5", + "regex", + "reqwest", + "serde", + "serde_json", + "serenity", + "tokio", + "toml", + "tracing", + "tracing-subscriber", + "uuid", +] + [[package]] name = "parking_lot" version = "0.12.5" @@ -812,16 +907,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] -name = "pin-utils" -version = "0.1.0" +name = "png" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +checksum = "60769b8b31b2a9f263dae2776c37b1b28ae246943cf719eb6946a1db05128a61" +dependencies = [ + "bitflags", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] [[package]] name = "potential_utf" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564" dependencies = [ "zerovec", ] @@ -860,6 +962,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "pxfm" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5a041e753da8b807c9255f28de81879c78c876392ff2469cde94799b2896b9d" + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + [[package]] name = "quinn" version = "0.11.9" @@ -1091,9 +1205,9 @@ dependencies = [ [[package]] name = "rustc-hash" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe" [[package]] name = "rustls" @@ -1118,7 +1232,7 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.10", + "rustls-webpki 0.103.11", "subtle", "zeroize", ] @@ -1146,9 +1260,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.10" +version = "0.103.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef" +checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4" dependencies = [ "ring", "rustls-pki-types", @@ -1185,9 +1299,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +checksum = "8a7852d02fc848982e0c167ef163aaff9cd91dc640ba85e263cb1ce46fae51cd" [[package]] name = "serde" @@ -1331,9 +1445,9 @@ dependencies = [ [[package]] name = "simd-adler32" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" +checksum = "703d5c7ef118737c72f1af64ad2f6f8c5e1921f818cdcb97b8fe6fc69bf66214" [[package]] name = "slab" @@ -1482,9 +1596,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d" dependencies = [ "displaydoc", "zerovec", @@ -1507,9 +1621,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.50.0" +version = "1.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" +checksum = "f66bf9585cda4b724d3e78ab34b73fb2bbaba9011b9bfdf69dc836382ea13b8c" dependencies = [ "bytes", "libc", @@ -1524,9 +1638,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496" dependencies = [ "proc-macro2", "quote", @@ -1834,9 +1948,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.22.0" +version = "1.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" +checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9" dependencies = [ "getrandom 0.4.2", "js-sys", @@ -1890,9 +2004,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.114" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" +checksum = "0bf938a0bacb0469e83c1e148908bd7d5a6010354cf4fb73279b7447422e3a89" dependencies = [ "cfg-if", "once_cell", @@ -1903,23 +2017,19 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.64" +version = "0.4.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" +checksum = "f371d383f2fb139252e0bfac3b81b265689bf45b6874af544ffa4c975ac1ebf8" dependencies = [ - "cfg-if", - "futures-util", "js-sys", - "once_cell", "wasm-bindgen", - "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.114" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" +checksum = "eeff24f84126c0ec2db7a449f0c2ec963c6a49efe0698c4242929da037ca28ed" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1927,9 +2037,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.114" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" +checksum = "9d08065faf983b2b80a79fd87d8254c409281cf7de75fc4b773019824196c904" dependencies = [ "bumpalo", "proc-macro2", @@ -1940,9 +2050,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.114" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" +checksum = "5fd04d9e306f1907bd13c6361b5c6bfc7b3b3c095ed3f8a9246390f8dbdee129" dependencies = [ "unicode-ident", ] @@ -1996,9 +2106,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.91" +version = "0.3.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" +checksum = "4f2dfbb17949fa2088e5d39408c48368947b86f7834484e87b73de55bc14d97d" dependencies = [ "js-sys", "wasm-bindgen", @@ -2032,6 +2142,12 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "weezl" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88" + [[package]] name = "windows-link" version = "0.2.1" @@ -2293,15 +2409,15 @@ dependencies = [ [[package]] name = "writeable" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" +checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4" [[package]] name = "yoke" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca" dependencies = [ "stable_deref_trait", "yoke-derive", @@ -2310,9 +2426,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e" dependencies = [ "proc-macro2", "quote", @@ -2322,18 +2438,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.47" +version = "0.8.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efbb2a062be311f2ba113ce66f697a4dc589f85e78a4aea276200804cea0ed87" +checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.47" +version = "0.8.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e8bc7269b54418e7aeeef514aa68f8690b8c0489a06b0136e5f57c4c5ccab89" +checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4" dependencies = [ "proc-macro2", "quote", @@ -2342,18 +2458,18 @@ dependencies = [ [[package]] name = "zerofrom" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1" dependencies = [ "proc-macro2", "quote", @@ -2369,9 +2485,9 @@ checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zerotrie" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf" dependencies = [ "displaydoc", "yoke", @@ -2380,9 +2496,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239" dependencies = [ "yoke", "zerofrom", @@ -2391,9 +2507,9 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555" dependencies = [ "proc-macro2", "quote", @@ -2405,3 +2521,18 @@ name = "zmij" version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" + +[[package]] +name = "zune-core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9" + +[[package]] +name = "zune-jpeg" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27bc9d5b815bc103f142aa054f561d9187d191692ec7c2d1e2b4737f8dbd7296" +dependencies = [ + "zune-core", +] diff --git a/Cargo.toml b/Cargo.toml index c32b9e63..77b8ebe2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "agent-broker" -version = "0.1.0" +name = "openab" +version = "0.7.1" edition = "2021" [dependencies] @@ -15,3 +15,6 @@ uuid = { version = "1", features = ["v4"] } regex = "1" anyhow = "1" rand = "0.8" +reqwest = { version = "0.12", default-features = false, features = ["rustls-tls", "multipart", "json"] } +base64 = "0.22" +image = { version = "0.25", default-features = false, features = ["jpeg", "png", "gif", "webp"] } diff --git a/Dockerfile b/Dockerfile index 08159bc5..fdb14e2a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,8 +2,9 @@ FROM rust:1-bookworm AS builder WORKDIR /build COPY Cargo.toml Cargo.lock ./ +RUN mkdir src && echo 'fn main() {}' > src/main.rs && cargo build --release && rm -rf src COPY src/ src/ -RUN cargo build --release +RUN touch src/main.rs && cargo build --release # --- Runtime stage --- FROM debian:bookworm-slim @@ -13,17 +14,30 @@ RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates RUN ARCH=$(dpkg --print-architecture) && \ if [ "$ARCH" = "arm64" ]; then URL="https://desktop-release.q.us-east-1.amazonaws.com/latest/kirocli-aarch64-linux.zip"; \ else URL="https://desktop-release.q.us-east-1.amazonaws.com/latest/kirocli-x86_64-linux.zip"; fi && \ - curl --proto '=https' --tlsv1.2 -sSf "$URL" -o /tmp/kirocli.zip && \ + curl --proto '=https' --tlsv1.2 -sSf --retry 3 --retry-delay 5 "$URL" -o /tmp/kirocli.zip && \ unzip /tmp/kirocli.zip -d /tmp && \ cp /tmp/kirocli/bin/* /usr/local/bin/ && \ chmod +x /usr/local/bin/kiro-cli* && \ rm -rf /tmp/kirocli /tmp/kirocli.zip -RUN mkdir -p /home/agent/.local/share/kiro-cli /home/agent/.kiro +# Install gh CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + -o /usr/share/keyrings/githubcli-archive-keyring.gpg && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + > /etc/apt/sources.list.d/github-cli.list && \ + apt-get update && apt-get install -y --no-install-recommends gh && \ + rm -rf /var/lib/apt/lists/* + +RUN useradd -m -s /bin/bash -u 1000 agent +RUN mkdir -p /home/agent/.local/share/kiro-cli /home/agent/.kiro && \ + chown -R agent:agent /home/agent ENV HOME=/home/agent WORKDIR /home/agent -COPY --from=builder /build/target/release/agent-broker /usr/local/bin/agent-broker +COPY --from=builder --chown=agent:agent /build/target/release/openab /usr/local/bin/openab -ENTRYPOINT ["agent-broker"] -CMD ["/etc/agent-broker/config.toml"] +USER agent +HEALTHCHECK --interval=30s --timeout=5s --retries=3 \ + CMD pgrep -x openab || exit 1 +ENTRYPOINT ["openab"] +CMD ["/etc/openab/config.toml"] diff --git a/Dockerfile.claude b/Dockerfile.claude new file mode 100644 index 00000000..2c8b90ab --- /dev/null +++ b/Dockerfile.claude @@ -0,0 +1,33 @@ +# --- Build stage --- +FROM rust:1-bookworm AS builder +WORKDIR /build +COPY Cargo.toml Cargo.lock ./ +RUN mkdir src && echo 'fn main() {}' > src/main.rs && cargo build --release && rm -rf src +COPY src/ src/ +RUN touch src/main.rs && cargo build --release + +# --- Runtime stage --- +FROM node:22-bookworm-slim +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/* + +# Install claude-agent-acp adapter and Claude Code CLI +RUN npm install -g @agentclientprotocol/claude-agent-acp@0.25.0 @anthropic-ai/claude-code --retry 3 + +# Install gh CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + -o /usr/share/keyrings/githubcli-archive-keyring.gpg && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + > /etc/apt/sources.list.d/github-cli.list && \ + apt-get update && apt-get install -y --no-install-recommends gh && \ + rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/node +WORKDIR /home/node + +COPY --from=builder --chown=node:node /build/target/release/openab /usr/local/bin/openab + +USER node +HEALTHCHECK --interval=30s --timeout=5s --retries=3 \ + CMD pgrep -x openab || exit 1 +ENTRYPOINT ["openab"] +CMD ["/etc/openab/config.toml"] diff --git a/Dockerfile.codex b/Dockerfile.codex new file mode 100644 index 00000000..b7ab4921 --- /dev/null +++ b/Dockerfile.codex @@ -0,0 +1,33 @@ +# --- Build stage --- +FROM rust:1-bookworm AS builder +WORKDIR /build +COPY Cargo.toml Cargo.lock ./ +RUN mkdir src && echo 'fn main() {}' > src/main.rs && cargo build --release && rm -rf src +COPY src/ src/ +RUN touch src/main.rs && cargo build --release + +# --- Runtime stage --- +FROM node:22-bookworm-slim +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/* + +# Pre-install codex-acp and codex CLI globally +RUN npm install -g @zed-industries/codex-acp@0.9.5 @openai/codex --retry 3 + +# Install gh CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + -o /usr/share/keyrings/githubcli-archive-keyring.gpg && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + > /etc/apt/sources.list.d/github-cli.list && \ + apt-get update && apt-get install -y --no-install-recommends gh && \ + rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/node +WORKDIR /home/node + +COPY --from=builder --chown=node:node /build/target/release/openab /usr/local/bin/openab + +USER node +HEALTHCHECK --interval=30s --timeout=5s --retries=3 \ + CMD pgrep -x openab || exit 1 +ENTRYPOINT ["openab"] +CMD ["/etc/openab/config.toml"] diff --git a/Dockerfile.gemini b/Dockerfile.gemini new file mode 100644 index 00000000..a5ce9201 --- /dev/null +++ b/Dockerfile.gemini @@ -0,0 +1,33 @@ +# --- Build stage --- +FROM rust:1-bookworm AS builder +WORKDIR /build +COPY Cargo.toml Cargo.lock ./ +RUN mkdir src && echo 'fn main() {}' > src/main.rs && cargo build --release && rm -rf src +COPY src/ src/ +RUN touch src/main.rs && cargo build --release + +# --- Runtime stage --- +FROM node:22-bookworm-slim +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/* + +# Install Gemini CLI (native ACP support via --acp) +RUN npm install -g @google/gemini-cli --retry 3 + +# Install gh CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + -o /usr/share/keyrings/githubcli-archive-keyring.gpg && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + > /etc/apt/sources.list.d/github-cli.list && \ + apt-get update && apt-get install -y --no-install-recommends gh && \ + rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/node +WORKDIR /home/node + +COPY --from=builder --chown=node:node /build/target/release/openab /usr/local/bin/openab + +USER node +HEALTHCHECK --interval=30s --timeout=5s --retries=3 \ + CMD pgrep -x openab || exit 1 +ENTRYPOINT ["openab"] +CMD ["/etc/openab/config.toml"] diff --git a/README.md b/README.md index 829b8ba5..772f5f72 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,19 @@ -# agent-broker +# OpenAB — Open Agent Broker -A Rust bridge service between Discord and any ACP-compatible coding CLI (Kiro CLI, Claude Code, Codex, Gemini, etc.) using the [Agent Client Protocol](https://github.com/anthropics/agent-protocol) over stdio JSON-RPC. +A lightweight, secure, cloud-native ACP harness that bridges Discord and any [Agent Client Protocol](https://github.com/anthropics/agent-protocol)-compatible coding CLI (Kiro CLI, Claude Code, Codex, Gemini, etc.) over stdio JSON-RPC — delivering the next-generation development experience. + +🪼 **Join our community!** Come say hi on Discord — we'd love to have you: **[🪼 OpenAB — Official](https://discord.gg/YNksK9M6)** 🎉 ``` ┌──────────────┐ Gateway WS ┌──────────────┐ ACP stdio ┌──────────────┐ -│ Discord │◄─────────────►│ agent-broker │──────────────►│ coding CLI │ +│ Discord │◄─────────────►│ openab │──────────────►│ coding CLI │ │ User │ │ (Rust) │◄── JSON-RPC ──│ (acp mode) │ └──────────────┘ └──────────────┘ └──────────────┘ ``` ## Demo -![agent-broker demo](images/demo.png) +![openab demo](images/demo.png) ## Features @@ -23,6 +25,7 @@ A Rust bridge service between Discord and any ACP-compatible coding CLI (Kiro CL - **Session pool** — one CLI process per thread, auto-managed lifecycle - **ACP protocol** — JSON-RPC over stdio with tool call, thinking, and permission auto-reply support - **Kubernetes-ready** — Dockerfile + k8s manifests with PVC for auth persistence +- **Voice message STT** — auto-transcribes Discord voice messages via Groq, OpenAI, or local Whisper server ([docs/stt.md](docs/stt.md)) ## Quick Start @@ -48,6 +51,7 @@ Edit `config.toml`: [discord] bot_token = "${DISCORD_BOT_TOKEN}" allowed_channels = ["YOUR_CHANNEL_ID"] +# allowed_users = ["YOUR_USER_ID"] # optional: restrict who can use the bot [agent] command = "kiro-cli" @@ -65,7 +69,7 @@ cargo run # Production cargo build --release -./target/release/agent-broker config.toml +./target/release/openab config.toml ``` If no config path is given, it defaults to `config.toml` in the current directory. @@ -81,36 +85,78 @@ The bot creates a thread. After that, just type in the thread — no @mention ne ## Pluggable Agent Backends -> **Note:** Currently only **Kiro CLI** is supported and tested. Other ACP-compatible CLIs (Claude Code, Codex, Gemini) should work in theory but are untested. Contributions and bug reports welcome. +Supports Kiro CLI, Claude Code, Codex, Gemini, and any ACP-compatible CLI. + +| Agent key | CLI | ACP Adapter | Auth | +|-----------|-----|-------------|------| +| `kiro` (default) | Kiro CLI | Native `kiro-cli acp` | `kiro-cli login --use-device-flow` | +| `codex` | Codex | [@zed-industries/codex-acp](https://github.com/zed-industries/codex-acp) | `codex login --device-auth` | +| `claude` | Claude Code | [@agentclientprotocol/claude-agent-acp](https://github.com/agentclientprotocol/claude-agent-acp) | `claude setup-token` | +| `gemini` | Gemini CLI | Native `gemini --acp` | Google OAuth or `GEMINI_API_KEY` | + +### Helm Install (recommended) + +See the **[Helm chart docs](https://openabdev.github.io/openab)** for full installation instructions, values reference, and multi-agent examples. + +```bash +helm repo add openab https://openabdev.github.io/openab +helm repo update + +# Kiro CLI only (default) +helm install openab openab/openab \ + --set agents.kiro.discord.botToken="$DISCORD_BOT_TOKEN" \ + --set-string 'agents.kiro.discord.allowedChannels[0]=YOUR_CHANNEL_ID' + +# Claude Code only (disable default kiro) +helm install openab openab/openab \ + --set agents.kiro.enabled=false \ + --set agents.claude.discord.botToken="$DISCORD_BOT_TOKEN" \ + --set-string 'agents.claude.discord.allowedChannels[0]=YOUR_CHANNEL_ID' \ + --set agents.claude.image=ghcr.io/openabdev/openab-claude:latest \ + --set agents.claude.command=claude-agent-acp \ + --set agents.claude.workingDir=/home/node + +# Multi-agent (kiro + claude in one release) +helm install openab openab/openab \ + --set agents.kiro.discord.botToken="$KIRO_BOT_TOKEN" \ + --set-string 'agents.kiro.discord.allowedChannels[0]=KIRO_CHANNEL_ID' \ + --set agents.claude.discord.botToken="$CLAUDE_BOT_TOKEN" \ + --set-string 'agents.claude.discord.allowedChannels[0]=CLAUDE_CHANNEL_ID' \ + --set agents.claude.image=ghcr.io/openabdev/openab-claude:latest \ + --set agents.claude.command=claude-agent-acp \ + --set agents.claude.workingDir=/home/node +``` + +Each agent key in `agents` map creates its own Deployment, ConfigMap, Secret, and PVC. Set `agents..enabled: false` to skip creating resources for an agent. -Swap the `[agent]` block to use any ACP-compatible CLI. The `env` field supports `${VAR}` expansion from the process environment. +### Manual config.toml + +For non-Helm deployments, configure the `[agent]` block per CLI: ```toml # Kiro CLI (default) [agent] command = "kiro-cli" args = ["acp", "--trust-all-tools"] -working_dir = "/tmp" +working_dir = "/home/agent" -# Claude Code +# Codex (requires codex-acp in PATH) [agent] -command = "claude" -args = ["--acp"] -working_dir = "/tmp" -env = { ANTHROPIC_API_KEY = "${ANTHROPIC_API_KEY}" } +command = "codex-acp" +args = [] +working_dir = "/home/node" -# Codex +# Claude Code (requires claude-agent-acp in PATH) [agent] -command = "codex" -args = ["--acp"] -working_dir = "/tmp" -env = { OPENAI_API_KEY = "${OPENAI_API_KEY}" } +command = "claude-agent-acp" +args = [] +working_dir = "/home/node" # Gemini [agent] command = "gemini" args = ["--acp"] -working_dir = "/tmp" +working_dir = "/home/node" env = { GEMINI_API_KEY = "${GEMINI_API_KEY}" } ``` @@ -120,6 +166,7 @@ env = { GEMINI_API_KEY = "${GEMINI_API_KEY}" } [discord] bot_token = "${DISCORD_BOT_TOKEN}" # supports env var expansion allowed_channels = ["123456789"] # channel ID allowlist +# allowed_users = ["987654321"] # user ID allowlist (empty = all users) [agent] command = "kiro-cli" # CLI command @@ -154,7 +201,7 @@ error_hold_ms = 2500 # keep error emoji for 2.5s ## Kubernetes Deployment -The Docker image bundles both `agent-broker` and `kiro-cli` in a single container (agent-broker spawns kiro-cli as a child process). +The Docker image bundles both `openab` and `kiro-cli` in a single container (openab spawns kiro-cli as a child process). ### Pod Architecture @@ -162,7 +209,7 @@ The Docker image bundles both `agent-broker` and `kiro-cli` in a single containe ┌─ Kubernetes Pod ─────────────────────────────────────────────────┐ │ │ │ ┌─────────────────────────────────────────────────────────┐ │ -│ │ agent-broker (main process, PID 1) │ │ +│ │ openab (main process, PID 1) │ │ │ │ │ │ │ │ ┌──────────────┐ ┌──────────────┐ ┌───────────┐ │ │ │ │ │ Discord │ │ Session Pool │ │ Reaction │ │ │ @@ -198,30 +245,28 @@ The Docker image bundles both `agent-broker` and `kiro-cli` in a single containe └──────────────────┘ └──────────────┘ ``` -- **Single container** — agent-broker is PID 1, spawns kiro-cli as a child process +- **Single container** — openab is PID 1, spawns kiro-cli as a child process - **stdio JSON-RPC** — ACP communication over stdin/stdout, no network ports needed - **Session pool** — one kiro-cli process per Discord thread, up to `max_sessions` - **PVC** — persists OAuth tokens and settings across pod restarts ### Install with Your Coding CLI -Use this prompt with any coding CLI (Kiro CLI, Claude Code, Codex, Gemini, etc.) on the host that has `helm` and `kubectl` access to your cluster: - -> Install agent-broker on my local k8s cluster using the Helm chart from https://thepagent.github.io/agent-broker. My Discord bot token is in the environment variable DISCORD_BOT_TOKEN and my channel ID is . After install, authenticate kiro-cli inside the pod using kiro-cli login --use-device-flow, then restart the deployment. +See the **[Helm chart docs](https://openabdev.github.io/openab)** for per-agent install commands (Kiro CLI, Claude Code, Codex, Gemini) and values reference. ### Build & Push ```bash -docker build -t agent-broker:latest . -docker tag agent-broker:latest /agent-broker:latest -docker push /agent-broker:latest +docker build -t openab:latest . +docker tag openab:latest /openab:latest +docker push /openab:latest ``` ### Deploy ```bash # Create the secret with your bot token -kubectl create secret generic agent-broker-secret \ +kubectl create secret generic openab-secret \ --from-literal=discord-bot-token="your-token" # Edit k8s/configmap.yaml with your channel IDs @@ -235,13 +280,13 @@ kubectl apply -f k8s/deployment.yaml kiro-cli requires a one-time OAuth login. The PVC persists the tokens across pod restarts. ```bash -kubectl exec -it deployment/agent-broker -- kiro-cli login --use-device-flow +kubectl exec -it deployment/openab-kiro -- kiro-cli login --use-device-flow ``` Follow the device code flow in your browser, then restart the pod: ```bash -kubectl rollout restart deployment agent-broker +kubectl rollout restart deployment/openab-kiro ``` ### Manifests @@ -249,7 +294,7 @@ kubectl rollout restart deployment agent-broker | File | Purpose | |------|---------| | `k8s/deployment.yaml` | Single-container pod with config + data volume mounts | -| `k8s/configmap.yaml` | `config.toml` mounted at `/etc/agent-broker/` | +| `k8s/configmap.yaml` | `config.toml` mounted at `/etc/openab/` | | `k8s/secret.yaml` | `DISCORD_BOT_TOKEN` injected as env var | | `k8s/pvc.yaml` | Persistent storage for auth + settings | diff --git a/RELEASING.md b/RELEASING.md new file mode 100644 index 00000000..8e2f35d6 --- /dev/null +++ b/RELEASING.md @@ -0,0 +1,202 @@ +# Releasing + +## Version Scheme + +Versions follow SemVer (e.g. `0.7.0`). Version bumps are controlled via `workflow_dispatch`: + +| Method | 效果 | 範例 | +|---|---|---| +| Auto patch (default) | patch bump + beta | `0.6.0 → 0.6.1-beta.1` | +| Auto minor | minor bump + beta | `0.6.0 → 0.7.0-beta.1` | +| Auto major | major bump + beta | `0.6.0 → 1.0.0-beta.1` | +| Manual | 自行指定 | `0.8.0-beta.1` or `0.8.0` | + +## Release Flow (Tag-Driven) + +> **核心原則:測過什麼就發什麼 (what you tested is what you ship)** +> stable release 不重新 build,直接 promote pre-release 驗證過的 image。 + +##### Step 1 — 建立 Release PR + +``` + ┌─────────────────────────────────────────────────────────────────┐ + │ Maintainer 到 Actions → Release PR → Run workflow │ + │ │ + │ 選項 A: 留空 version,選 bump type → 自動算 (e.g. 0.7.0-beta.1) │ + │ 選項 B: 手動填 version (e.g. 0.8.0-beta.1 or 0.8.0) │ + │ │ + │ → release-pr.yml 觸發 │ + │ → 更新 Cargo.toml + Chart.yaml version/appVersion │ + │ → 建立 Release PR (branch: release/v0.7.0-beta.1) │ + └─────────────────────────────────────────────────────────────────┘ +``` + +##### Step 2 — Merge Release PR → 自動打 Tag → Build + +``` + ┌─────────────────────────────────────────────────────────────────┐ + │ Maintainer review & merge Release PR │ + │ │ + │ → tag-on-merge.yml 偵測 release/ branch merge │ + │ → 自動打 tag (e.g. v0.7.0-beta.1) │ + │ → build.yml 觸發 (is_prerelease=true) │ + │ → build-image: 4 variants × 2 platforms (amd64 + arm64) │ + │ → merge-manifests: image tags = + 0.7.0-beta.1 │ + │ → release-chart: helm chart → OCI registry │ + └─────────────────────────────────────────────────────────────────┘ + │ + ▼ + ┌─────────────────────────────────────────────────────────────────┐ + │ 部署 pre-release 進行測試: │ + │ │ + │ helm install openab \ │ + │ oci://ghcr.io/openabdev/charts/openab \ │ + │ --version 0.7.0-beta.1 │ + │ │ + │ 發現 bug?→ 修復 PR merge → 再跑一次 Release PR workflow │ + │ → 手動指定 v0.7.0-beta.2 → merge → 重新測試 │ + └─────────────────────────────────────────────────────────────────┘ +``` + +##### Step 3 — Stable Release(Promote) + +``` + ┌─────────────────────────────────────────────────────────────────┐ + │ 測試通過後,再跑一次 Release PR workflow │ + │ → 手動指定 version: 0.7.0 (不帶 rc) │ + │ → merge Release PR │ + │ → tag-on-merge.yml 打 tag v0.7.0 │ + │ │ + │ → build.yml 觸發 (is_prerelease=false) │ + │ → promote-stable: │ + │ 1. 找到最新的 pre-release tag (v0.7.0-beta.2) │ + │ 2. 驗證 pre-release image 存在 │ + │ 3. re-tag 0.7.0-beta.2 → 0.7.0 / 0.7 / latest │ + │ ⚠️ 不 rebuild,跟 pre-release 是同一個 artifact │ + │ → release-chart: helm chart → OCI registry │ + └─────────────────────────────────────────────────────────────────┘ +``` + +##### Step 4 — Chart Release(自動) + +``` + ┌─────────────────────────────────────────────────────────────────┐ + │ release.yml 偵測到 Chart.yaml 變更 push to main │ + │ → chart-releaser 更新 GitHub Pages helm repo index │ + │ → 附加 install instructions 到 chart release notes │ + └─────────────────────────────────────────────────────────────────┘ +``` + +## 快速指令參考 + +```bash +# ── Pre-release ─────────────────────────────────────── +# 到 Actions → Release PR → Run workflow +# 留空 version,選 patch → 自動算 0.7.0-beta.1 +# 或手動填 version: 0.7.0-beta.1 +# → merge 產生的 Release PR → 自動打 tag → build + +# ── 第二輪 pre-release(beta.1 有 bug 時)───────────── +# 修 bug → PR merge to main +# 再跑 Release PR workflow,手動填 version: 0.7.0-beta.2 +# → merge → 自動打 tag → build + +# ── Stable release ──────────────────────────────────── +# 跑 Release PR workflow,手動填 version: 0.7.0 +# → merge → 自動打 tag → promote beta image (不 rebuild) + +# ── 手動重跑(build 失敗時)────────────────────────── +gh workflow run build.yml -f tag=v0.7.0-beta.1 +gh workflow run build.yml -f tag=v0.7.0 +``` + +## GitHub Releases + +| Release | Tag 格式 | 內容 | +|---|---|---| +| chart-releaser | `openab-0.7.0` | Version Info + Installation instructions | + +## Workflow 對應表 + +| Workflow | 觸發條件 | 用途 | +|---|---|---| +| `ci.yml` | pull_request (src/Cargo/Dockerfile) | cargo check + clippy + test | +| `release-pr.yml` | workflow_dispatch | 建立 Release PR(更新版本檔案) | +| `tag-on-merge.yml` | release/ PR merge to main | 自動打 tag | +| `build.yml` | tag push `v*` | pre-release: 完整 build / stable: promote | +| `release.yml` | Chart.yaml 變更 push to main | chart-releaser 更新 GitHub Pages index | + +## Version 同步 + +release-pr.yml 在 Release PR 中自動更新以下檔案的版本: + +| 檔案 | 欄位 | +|---|---| +| `Cargo.toml` | `version` | +| `charts/openab/Chart.yaml` | `version` | +| `charts/openab/Chart.yaml` | `appVersion` | + +三者統一為同一個 semver(e.g. `0.7.0`)。 + +## Image Variants + +每次 build 產出 4 個 multi-arch image (linux/amd64 + linux/arm64): + +``` +ghcr.io/openabdev/openab # default (kiro-cli) +ghcr.io/openabdev/openab-codex # codex +ghcr.io/openabdev/openab-claude # claude +ghcr.io/openabdev/openab-gemini # gemini +``` + +Image tags 依 release 類型不同: + +| Tag | Stable (`v0.7.0`) | Pre-release (`v0.7.0-beta.1`) | +|---|---|---| +| `` | v (from pre-release) | v | +| `0.7.0` / `0.7.0-beta.1` | v | v | +| `0.7` | v | x | +| `latest` | v | x | + +## Installation + +##### Helm Repository (GitHub Pages) + +```bash +helm repo add openab https://openabdev.github.io/openab +helm repo update +helm install openab openab/openab --version 0.7.0 +``` + +##### OCI Registry + +```bash +helm install openab oci://ghcr.io/openabdev/charts/openab --version 0.7.0 +``` + +## 手動操作 + +| 時機 | 做什麼 | +|---|---| +| 準備 release | Actions → Release PR → Run workflow | +| 需要 beta 測試 | 指定 version 如 `0.7.0-beta.1` | +| 測試通過 | 指定 stable version 如 `0.7.0` → promote | +| build 失敗或需重跑 | `gh workflow run build.yml -f tag=` | + +## GitHub App 權限 + +release-pr.yml 和 tag-on-merge.yml 使用 GitHub App token 來建立 PR 和推送 tag。App 需要以下 Repository permissions: + +| Permission | Access | +|---|---| +| Contents | Read and write | +| Metadata | Read-only (mandatory) | +| Pull requests | Read and write | + +對應的 secrets:`APP_ID`(Client ID)、`APP_PRIVATE_KEY`。 + +## 限制與注意事項 + +- **Stable release 必須先有 pre-release**:promote-stable 會查找 `v{version}-*` 的 pre-release tag,找不到就失敗 +- **promote 用 version tag 找 image**:不依賴 commit SHA,pre-release 和 stable 可以在不同 commit 上 +- **外部用戶不會裝到 pre-release**:`helm install` 預設只拿 stable 版本,pre-release 需明確指定 `--version` diff --git a/charts/agent-broker/templates/NOTES.txt b/charts/agent-broker/templates/NOTES.txt deleted file mode 100644 index 9cc1c57d..00000000 --- a/charts/agent-broker/templates/NOTES.txt +++ /dev/null @@ -1,17 +0,0 @@ -agent-broker {{ .Chart.AppVersion }} has been installed! - -{{- if not .Values.discord.botToken }} - -⚠️ No bot token was provided. Create the secret manually: - - kubectl create secret generic {{ include "agent-broker.fullname" . }} \ - --from-literal=discord-bot-token="YOUR_TOKEN" -{{- end }} - -Authenticate kiro-cli (first time only): - - kubectl exec -it deployment/{{ include "agent-broker.fullname" . }} -- kiro-cli login --use-device-flow - -Then restart the pod: - - kubectl rollout restart deployment/{{ include "agent-broker.fullname" . }} diff --git a/charts/agent-broker/templates/_helpers.tpl b/charts/agent-broker/templates/_helpers.tpl deleted file mode 100644 index 2ca6992f..00000000 --- a/charts/agent-broker/templates/_helpers.tpl +++ /dev/null @@ -1,34 +0,0 @@ -{{- define "agent-broker.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{- define "agent-broker.fullname" -}} -{{- if .Values.fullnameOverride }} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- $name := default .Chart.Name .Values.nameOverride }} -{{- if contains $name .Release.Name }} -{{- .Release.Name | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} -{{- end }} -{{- end }} -{{- end }} - -{{- define "agent-broker.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{- define "agent-broker.labels" -}} -helm.sh/chart: {{ include "agent-broker.chart" . }} -{{ include "agent-broker.selectorLabels" . }} -{{- if .Chart.AppVersion }} -app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} -{{- end }} -app.kubernetes.io/managed-by: {{ .Release.Service }} -{{- end }} - -{{- define "agent-broker.selectorLabels" -}} -app.kubernetes.io/name: {{ include "agent-broker.name" . }} -app.kubernetes.io/instance: {{ .Release.Name }} -{{- end }} diff --git a/charts/agent-broker/templates/configmap.yaml b/charts/agent-broker/templates/configmap.yaml deleted file mode 100644 index cf5af16b..00000000 --- a/charts/agent-broker/templates/configmap.yaml +++ /dev/null @@ -1,27 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "agent-broker.fullname" . }} - labels: - {{- include "agent-broker.labels" . | nindent 4 }} -data: - config.toml: | - [discord] - bot_token = "${DISCORD_BOT_TOKEN}" - allowed_channels = [{{ range $i, $ch := .Values.discord.allowedChannels }}{{ if $i }}, {{ end }}"{{ $ch }}"{{ end }}] - - [agent] - command = "{{ .Values.agent.command }}" - args = [{{ range $i, $a := .Values.agent.args }}{{ if $i }}, {{ end }}"{{ $a }}"{{ end }}] - working_dir = "{{ .Values.agent.workingDir }}" - {{- if .Values.agent.env }} - env = { {{ range $k, $v := .Values.agent.env }}{{ $k }} = "{{ $v }}", {{ end }} } - {{- end }} - - [pool] - max_sessions = {{ .Values.pool.maxSessions }} - session_ttl_hours = {{ .Values.pool.sessionTtlHours }} - - [reactions] - enabled = {{ .Values.reactions.enabled }} - remove_after_reply = {{ .Values.reactions.removeAfterReply }} diff --git a/charts/agent-broker/templates/deployment.yaml b/charts/agent-broker/templates/deployment.yaml deleted file mode 100644 index 08674b8f..00000000 --- a/charts/agent-broker/templates/deployment.yaml +++ /dev/null @@ -1,79 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "agent-broker.fullname" . }} - labels: - {{- include "agent-broker.labels" . | nindent 4 }} -spec: - replicas: {{ .Values.replicas }} - {{- with .Values.strategy }} - strategy: - {{- toYaml . | nindent 4 }} - {{- end }} - selector: - matchLabels: - {{- include "agent-broker.selectorLabels" . | nindent 6 }} - template: - metadata: - annotations: - checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} - labels: - {{- include "agent-broker.selectorLabels" . | nindent 8 }} - spec: - containers: - - name: agent-broker - image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - env: - - name: DISCORD_BOT_TOKEN - valueFrom: - secretKeyRef: - name: {{ include "agent-broker.fullname" . }} - key: discord-bot-token - - name: HOME - value: /home/agent - {{- range $key, $value := .Values.env }} - - name: {{ $key }} - value: {{ $value | quote }} - {{- end }} - {{- with .Values.envFrom }} - envFrom: - {{- toYaml . | nindent 12 }} - {{- end }} - {{- with .Values.resources }} - resources: - {{- toYaml . | nindent 12 }} - {{- end }} - volumeMounts: - - name: config - mountPath: /etc/agent-broker - readOnly: true - {{- if .Values.persistence.enabled }} - - name: data - mountPath: /home/agent/.kiro - subPath: dot-kiro - - name: data - mountPath: /home/agent/.local/share/kiro-cli - subPath: kiro-cli-data - {{- end }} - {{- with .Values.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} - volumes: - - name: config - configMap: - name: {{ include "agent-broker.fullname" . }} - {{- if .Values.persistence.enabled }} - - name: data - persistentVolumeClaim: - claimName: {{ include "agent-broker.fullname" . }} - {{- end }} diff --git a/charts/agent-broker/templates/pvc.yaml b/charts/agent-broker/templates/pvc.yaml deleted file mode 100644 index cf28384b..00000000 --- a/charts/agent-broker/templates/pvc.yaml +++ /dev/null @@ -1,17 +0,0 @@ -{{- if .Values.persistence.enabled }} -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: {{ include "agent-broker.fullname" . }} - labels: - {{- include "agent-broker.labels" . | nindent 4 }} -spec: - accessModes: - - ReadWriteOnce - {{- if .Values.persistence.storageClass }} - storageClassName: {{ .Values.persistence.storageClass }} - {{- end }} - resources: - requests: - storage: {{ .Values.persistence.size }} -{{- end }} diff --git a/charts/agent-broker/templates/secret.yaml b/charts/agent-broker/templates/secret.yaml deleted file mode 100644 index 13f43be2..00000000 --- a/charts/agent-broker/templates/secret.yaml +++ /dev/null @@ -1,13 +0,0 @@ -{{- if .Values.discord.botToken }} -apiVersion: v1 -kind: Secret -metadata: - name: {{ include "agent-broker.fullname" . }} - labels: - {{- include "agent-broker.labels" . | nindent 4 }} - annotations: - "helm.sh/resource-policy": keep -type: Opaque -data: - discord-bot-token: {{ .Values.discord.botToken | b64enc | quote }} -{{- end }} diff --git a/charts/agent-broker/values.yaml b/charts/agent-broker/values.yaml deleted file mode 100644 index e28c1eb8..00000000 --- a/charts/agent-broker/values.yaml +++ /dev/null @@ -1,45 +0,0 @@ -image: - repository: ghcr.io/thepagent/agent-broker - tag: "1e66133" - pullPolicy: IfNotPresent - -replicas: 1 - -strategy: - type: Recreate - -resources: {} - -persistence: - enabled: true - storageClass: "" - size: 1Gi - -discord: - botToken: "" # set via --set or external secret - allowedChannels: - - "YOUR_CHANNEL_ID" - -agent: - command: kiro-cli - args: - - acp - - --trust-all-tools - workingDir: /tmp - env: {} - # ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY}" - -pool: - maxSessions: 10 - sessionTtlHours: 24 - -reactions: - enabled: true - removeAfterReply: false - -env: {} -envFrom: [] - -nodeSelector: {} -tolerations: [] -affinity: {} diff --git a/charts/agent-broker/Chart.yaml b/charts/openab/Chart.yaml similarity index 68% rename from charts/agent-broker/Chart.yaml rename to charts/openab/Chart.yaml index 17bf32ac..7fb38702 100644 --- a/charts/agent-broker/Chart.yaml +++ b/charts/openab/Chart.yaml @@ -1,6 +1,6 @@ apiVersion: v2 -name: agent-broker +name: openab description: Discord ↔ ACP coding CLI bridge (Kiro CLI, Claude Code, Codex, Gemini) type: application -version: 0.1.5 -appVersion: "1e66133" +version: 0.7.1 +appVersion: "0.7.1" diff --git a/charts/openab/templates/NOTES.txt b/charts/openab/templates/NOTES.txt new file mode 100644 index 00000000..37f1c709 --- /dev/null +++ b/charts/openab/templates/NOTES.txt @@ -0,0 +1,32 @@ +openab {{ .Chart.AppVersion }} has been installed! + +⚠️ Discord channel IDs must be set with --set-string (not --set) to avoid float64 precision loss. + +Agents deployed: +{{- range $name, $cfg := .Values.agents }} +{{- if ne (include "openab.agentEnabled" $cfg) "false" }} + • {{ $name }} ({{ $cfg.command }}) +{{- if not $cfg.discord.botToken }} + ⚠️ No bot token provided. Create the secret manually: + kubectl create secret generic {{ include "openab.agentFullname" (dict "ctx" $ "agent" $name) }} \ + --from-literal=discord-bot-token="YOUR_TOKEN" +{{- end }} + +{{- if eq $cfg.command "kiro-cli" }} + Authenticate: + kubectl exec -it deployment/{{ include "openab.agentFullname" (dict "ctx" $ "agent" $name) }} -- kiro-cli login --use-device-flow +{{- else if eq $cfg.command "codex-acp" }} + Authenticate: + kubectl exec -it deployment/{{ include "openab.agentFullname" (dict "ctx" $ "agent" $name) }} -- codex login --device-auth +{{- else if eq $cfg.command "claude-agent-acp" }} + Authenticate: + kubectl exec -it deployment/{{ include "openab.agentFullname" (dict "ctx" $ "agent" $name) }} -- claude setup-token +{{- else if eq $cfg.command "gemini" }} + Authenticate: + kubectl exec -it deployment/{{ include "openab.agentFullname" (dict "ctx" $ "agent" $name) }} -- gemini +{{- end }} + + Restart after auth: + kubectl rollout restart deployment/{{ include "openab.agentFullname" (dict "ctx" $ "agent" $name) }} +{{- end }} +{{- end }} diff --git a/charts/openab/templates/_helpers.tpl b/charts/openab/templates/_helpers.tpl new file mode 100644 index 00000000..770d557a --- /dev/null +++ b/charts/openab/templates/_helpers.tpl @@ -0,0 +1,67 @@ +{{- define "openab.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{- define "openab.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{- define "openab.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{- define "openab.labels" -}} +helm.sh/chart: {{ include "openab.chart" .ctx }} +app.kubernetes.io/name: {{ include "openab.name" .ctx }} +app.kubernetes.io/instance: {{ .ctx.Release.Name }} +app.kubernetes.io/component: {{ .agent }} +{{- if .ctx.Chart.AppVersion }} +app.kubernetes.io/version: {{ .ctx.Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .ctx.Release.Service }} +{{- end }} + +{{- define "openab.selectorLabels" -}} +app.kubernetes.io/name: {{ include "openab.name" .ctx }} +app.kubernetes.io/instance: {{ .ctx.Release.Name }} +app.kubernetes.io/component: {{ .agent }} +{{- end }} + +{{/* Per-agent resource name: - */}} +{{- define "openab.agentFullname" -}} +{{- printf "%s-%s" (include "openab.fullname" .ctx) .agent | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* Resolve image: agent-level string override → global default (repository:tag, tag defaults to appVersion) */}} +{{- define "openab.agentImage" -}} +{{- if and .cfg.image (kindIs "string" .cfg.image) (ne .cfg.image "") }} +{{- .cfg.image }} +{{- else }} +{{- $tag := default .ctx.Chart.AppVersion .ctx.Values.image.tag }} +{{- printf "%s:%s" .ctx.Values.image.repository $tag }} +{{- end }} +{{- end }} + +{{/* Resolve imagePullPolicy: global default (per-agent image string has no pullPolicy) */}} +{{- define "openab.agentImagePullPolicy" -}} +{{- .ctx.Values.image.pullPolicy }} +{{- end }} + +{{/* Agent enabled: default true unless explicitly set to false */}} +{{- define "openab.agentEnabled" -}} +{{- if eq (.enabled | toString) "false" }}false{{ else }}true{{ end }} +{{- end }} + +{{/* Persistence enabled: default true unless explicitly set to false */}} +{{- define "openab.persistenceEnabled" -}} +{{- if and . .persistence (eq (.persistence.enabled | toString) "false") }}false{{ else }}true{{ end }} +{{- end }} diff --git a/charts/openab/templates/configmap.yaml b/charts/openab/templates/configmap.yaml new file mode 100644 index 00000000..194d8c25 --- /dev/null +++ b/charts/openab/templates/configmap.yaml @@ -0,0 +1,59 @@ +{{- range $name, $cfg := .Values.agents }} +{{- if ne (include "openab.agentEnabled" $cfg) "false" }} +{{- $d := dict "ctx" $ "agent" $name "cfg" $cfg }} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "openab.agentFullname" $d }} + labels: + {{- include "openab.labels" $d | nindent 4 }} +data: + config.toml: | + [discord] + bot_token = "${DISCORD_BOT_TOKEN}" + {{- range $cfg.discord.allowedChannels }} + {{- if regexMatch "e\\+|E\\+" (toString .) }} + {{- fail (printf "discord.allowedChannels contains a mangled ID: %s — use --set-string instead of --set for channel IDs" (toString .)) }} + {{- end }} + {{- end }} + allowed_channels = {{ $cfg.discord.allowedChannels | default list | toJson }} + {{- range $cfg.discord.allowedUsers }} + {{- if regexMatch "e\\+|E\\+" (toString .) }} + {{- fail (printf "discord.allowedUsers contains a mangled ID: %s — use --set-string instead of --set for user IDs" (toString .)) }} + {{- end }} + {{- end }} + allowed_users = {{ $cfg.discord.allowedUsers | default list | toJson }} + + [agent] + command = "{{ $cfg.command }}" + args = {{ if $cfg.args }}{{ $cfg.args | toJson }}{{ else }}[]{{ end }} + working_dir = "{{ $cfg.workingDir | default "/home/agent" }}" + {{- if $cfg.env }} + env = { {{ $first := true }}{{ range $k, $v := $cfg.env }}{{ if not $first }}, {{ end }}{{ $k }} = "{{ $v }}"{{ $first = false }}{{ end }} } + {{- end }} + + [pool] + max_sessions = {{ ($cfg.pool).maxSessions | default 10 }} + session_ttl_hours = {{ ($cfg.pool).sessionTtlHours | default 24 }} + + [reactions] + enabled = {{ ($cfg.reactions).enabled | default true }} + remove_after_reply = {{ ($cfg.reactions).removeAfterReply | default false }} + {{- if ($cfg.stt).enabled }} + {{- if not ($cfg.stt).apiKey }} + {{ fail (printf "agents.%s.stt.apiKey is required when stt.enabled=true" $name) }} + {{- end }} + + [stt] + enabled = true + api_key = "${STT_API_KEY}" + model = "{{ ($cfg.stt).model | default "whisper-large-v3-turbo" }}" + base_url = "{{ ($cfg.stt).baseUrl | default "https://api.groq.com/openai/v1" }}" + {{- end }} + {{- if $cfg.agentsMd }} + AGENTS.md: | + {{- $cfg.agentsMd | nindent 4 }} + {{- end }} +{{- end }} +{{- end }} diff --git a/charts/openab/templates/deployment.yaml b/charts/openab/templates/deployment.yaml new file mode 100644 index 00000000..0d45041d --- /dev/null +++ b/charts/openab/templates/deployment.yaml @@ -0,0 +1,104 @@ +{{- range $name, $cfg := .Values.agents }} +{{- if ne (include "openab.agentEnabled" $cfg) "false" }} +{{- $d := dict "ctx" $ "agent" $name "cfg" $cfg }} +{{- $pvcEnabled := not (eq (include "openab.persistenceEnabled" $cfg) "false") }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "openab.agentFullname" $d }} + labels: + {{- include "openab.labels" $d | nindent 4 }} +spec: + # Hardcoded for PVC-backed agents: RWO volumes can't be shared across pods, + # so rolling updates and multiple replicas are not supported. + replicas: 1 + strategy: + type: Recreate + selector: + matchLabels: + {{- include "openab.selectorLabels" $d | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ $cfg | toJson | sha256sum }} + labels: + {{- include "openab.selectorLabels" $d | nindent 8 }} + spec: + {{- with $.Values.podSecurityContext }} + securityContext: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: openab + image: {{ include "openab.agentImage" $d | quote }} + imagePullPolicy: {{ include "openab.agentImagePullPolicy" $d }} + {{- with $.Values.containerSecurityContext }} + securityContext: + {{- toYaml . | nindent 12 }} + {{- end }} + env: + {{- if $cfg.discord.botToken }} + - name: DISCORD_BOT_TOKEN + valueFrom: + secretKeyRef: + name: {{ include "openab.agentFullname" $d }} + key: discord-bot-token + {{- end }} + {{- if and ($cfg.stt).enabled ($cfg.stt).apiKey }} + - name: STT_API_KEY + valueFrom: + secretKeyRef: + name: {{ include "openab.agentFullname" $d }} + key: stt-api-key + {{- end }} + - name: HOME + value: {{ $cfg.workingDir | default "/home/agent" }} + {{- range $k, $v := $cfg.env }} + - name: {{ $k }} + value: {{ $v | quote }} + {{- end }} + {{- with $cfg.envFrom }} + envFrom: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with $cfg.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + volumeMounts: + - name: config + mountPath: /etc/openab + readOnly: true + {{- if $pvcEnabled }} + - name: data + mountPath: {{ $cfg.workingDir | default "/home/agent" }} + {{- end }} + {{- if $cfg.agentsMd }} + - name: config + mountPath: {{ $cfg.workingDir | default "/home/agent" }}/AGENTS.md + subPath: AGENTS.md + {{- end }} + {{- with $cfg.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $cfg.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $cfg.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + volumes: + - name: config + configMap: + name: {{ include "openab.agentFullname" $d }} + {{- if $pvcEnabled }} + - name: data + persistentVolumeClaim: + claimName: {{ include "openab.agentFullname" $d }} + {{- end }} +{{- end }} +{{- end }} diff --git a/charts/openab/templates/pvc.yaml b/charts/openab/templates/pvc.yaml new file mode 100644 index 00000000..e771e608 --- /dev/null +++ b/charts/openab/templates/pvc.yaml @@ -0,0 +1,23 @@ +{{- range $name, $cfg := .Values.agents }} +{{- if ne (include "openab.agentEnabled" $cfg) "false" }} +{{- if not (eq (include "openab.persistenceEnabled" $cfg) "false") }} +{{- $d := dict "ctx" $ "agent" $name "cfg" $cfg }} +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ include "openab.agentFullname" $d }} + labels: + {{- include "openab.labels" $d | nindent 4 }} +spec: + accessModes: + - ReadWriteOnce + {{- if and $cfg.persistence $cfg.persistence.storageClass }} + storageClassName: {{ $cfg.persistence.storageClass }} + {{- end }} + resources: + requests: + storage: {{ (and $cfg.persistence $cfg.persistence.size) | default "1Gi" }} +{{- end }} +{{- end }} +{{- end }} diff --git a/charts/openab/templates/secret.yaml b/charts/openab/templates/secret.yaml new file mode 100644 index 00000000..2cdd27c8 --- /dev/null +++ b/charts/openab/templates/secret.yaml @@ -0,0 +1,22 @@ +{{- range $name, $cfg := .Values.agents }} +{{- if ne (include "openab.agentEnabled" $cfg) "false" }} +{{- if $cfg.discord.botToken }} +{{- $d := dict "ctx" $ "agent" $name "cfg" $cfg }} +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "openab.agentFullname" $d }} + labels: + {{- include "openab.labels" $d | nindent 4 }} + annotations: + "helm.sh/resource-policy": keep +type: Opaque +data: + discord-bot-token: {{ $cfg.discord.botToken | b64enc | quote }} + {{- if and ($cfg.stt).enabled ($cfg.stt).apiKey }} + stt-api-key: {{ $cfg.stt.apiKey | b64enc | quote }} + {{- end }} +{{- end }} +{{- end }} +{{- end }} diff --git a/charts/openab/values.yaml b/charts/openab/values.yaml new file mode 100644 index 00000000..956374cb --- /dev/null +++ b/charts/openab/values.yaml @@ -0,0 +1,85 @@ +image: + repository: ghcr.io/openabdev/openab + # tag defaults to .Chart.AppVersion + tag: "" + pullPolicy: IfNotPresent + +podSecurityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 + fsGroup: 1000 + +containerSecurityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + +agents: + kiro: + enabled: true # set to false to skip creating resources for this agent + # To add a second agent, uncomment and fill in the block below: + # claude: + # command: claude-agent-acp + # args: [] + # discord: + # botToken: "" + # # ⚠️ Use --set-string for channel IDs to avoid float64 precision loss + # allowedChannels: + # - "YOUR_CHANNEL_ID" + # allowedUsers: [] + # workingDir: /home/agent + # env: {} + # envFrom: [] + # pool: + # maxSessions: 10 + # sessionTtlHours: 24 + # reactions: + # enabled: true + # removeAfterReply: false + # persistence: + # enabled: true + # storageClass: "" + # size: 1Gi + # agentsMd: "" + # resources: {} + # nodeSelector: {} + # tolerations: [] + # affinity: {} + # image: "ghcr.io/openabdev/openab-claude:latest" + image: "" + command: kiro-cli + args: + - acp + - --trust-all-tools + discord: + botToken: "" + # ⚠️ Use --set-string for channel IDs to avoid float64 precision loss + allowedChannels: + - "YOUR_CHANNEL_ID" + # ⚠️ Use --set-string for user IDs to avoid float64 precision loss + allowedUsers: [] # empty = allow all users (default) + workingDir: /home/agent + env: {} + envFrom: [] + pool: + maxSessions: 10 + sessionTtlHours: 24 + reactions: + enabled: true + removeAfterReply: false + stt: + enabled: false + apiKey: "" + model: "whisper-large-v3-turbo" + baseUrl: "https://api.groq.com/openai/v1" + persistence: + enabled: true + storageClass: "" + size: 1Gi # defaults to 1Gi if not set + agentsMd: "" + resources: {} + nodeSelector: {} + tolerations: [] + affinity: {} diff --git a/config.toml.example b/config.toml.example index 619236a6..598c3017 100644 --- a/config.toml.example +++ b/config.toml.example @@ -1,28 +1,29 @@ [discord] bot_token = "${DISCORD_BOT_TOKEN}" allowed_channels = ["1234567890"] +# allowed_users = [""] # empty or omitted = allow all users [agent] command = "kiro-cli" args = ["acp", "--trust-all-tools"] -working_dir = "/tmp" +working_dir = "/home/agent" # [agent] # command = "claude" # args = ["--acp"] -# working_dir = "/tmp" +# working_dir = "/home/agent" # env = { ANTHROPIC_API_KEY = "${ANTHROPIC_API_KEY}" } # [agent] # command = "codex" # args = ["--acp"] -# working_dir = "/tmp" +# working_dir = "/home/agent" # env = { OPENAI_API_KEY = "${OPENAI_API_KEY}" } # [agent] # command = "gemini" # args = ["--acp"] -# working_dir = "/tmp" +# working_dir = "/home/agent" # env = { GEMINI_API_KEY = "${GEMINI_API_KEY}" } [pool] diff --git a/docs/discord-bot-howto.md b/docs/discord-bot-howto.md index c2e189cf..db1a0a06 100644 --- a/docs/discord-bot-howto.md +++ b/docs/discord-bot-howto.md @@ -1,6 +1,6 @@ # Discord Bot Setup Guide -Step-by-step guide to create and configure a Discord bot for agent-broker. +Step-by-step guide to create and configure a Discord bot for openab. ## 1. Create a Discord Application @@ -47,7 +47,14 @@ Step-by-step guide to create and configure a Discord bot for agent-broker. 3. Click **Copy Channel ID** 4. Use this ID in `allowed_channels` in your config -## 7. Configure agent-broker +## 7. Get Your User ID (optional) + +1. Make sure **Developer Mode** is enabled (see step 6) +2. Right-click your own username (in a message or the member list) +3. Click **Copy User ID** +4. Use this ID in `allowed_users` to restrict who can interact with the bot + +## 8. Configure openab Set the bot token and channel ID: @@ -61,15 +68,28 @@ In `config.toml`: [discord] bot_token = "${DISCORD_BOT_TOKEN}" allowed_channels = ["your-channel-id-from-step-6"] +# allowed_users = ["your-user-id-from-step-7"] # optional: restrict who can use the bot ``` +### Access control behavior + +| `allowed_channels` | `allowed_users` | Result | +|---|---|---| +| empty | empty | All users, all channels (default) | +| set | empty | Only these channels, all users | +| empty | set | All channels, only these users | +| set | set | **AND** — must be in allowed channel AND allowed user | + +- Empty `allowed_users` (default) = no user filtering, fully backward compatible +- Denied users get a 🚫 reaction and no reply + For Kubernetes: ```bash -kubectl create secret generic agent-broker-secret \ +kubectl create secret generic openab-secret \ --from-literal=discord-bot-token="your-token-from-step-3" ``` -## 8. Test +## 9. Test In the allowed channel, mention the bot: @@ -84,3 +104,4 @@ The bot should create a thread and respond. After that, just type in the thread - **Bot doesn't respond** — check that the channel ID is correct and the bot has permissions in that channel - **"Sent invalid authentication"** — the bot token is wrong or expired, reset it in the Developer Portal - **"Failed to start agent"** — kiro-cli isn't authenticated, run `kiro-cli login --use-device-flow` inside the container +- **`gh` commands fail with 401** — the agent needs GitHub CLI authentication. See [gh auth device flow guide](gh-auth-device-flow.md) for how to authenticate in a headless container diff --git a/docs/gh-auth-device-flow.md b/docs/gh-auth-device-flow.md new file mode 100644 index 00000000..83e3ad7b --- /dev/null +++ b/docs/gh-auth-device-flow.md @@ -0,0 +1,92 @@ +# GitHub CLI Authentication in Agent Environments + +How to authenticate `gh` (GitHub CLI) when the agent runs in a headless container and the user may be on mobile. + +## Why `gh` auth matters + +`gh` is one of the most common tools agents use to interact with GitHub — reviewing PRs, creating issues, commenting, approving, merging, etc. Before the agent can do any of this, `gh` must be authenticated. + +## Challenges + +This isn't a typical `gh login` scenario. Three things make it tricky: + +1. **The agent runs in a K8s pod with no browser** — `gh auth login --web` can't open a browser, so device flow (code + URL) is the only option +2. **The user might be on mobile, not at a desktop** — they're chatting via Discord on their phone, so the agent must send the URL and code as a clickable message +3. **The user authorizes on their phone** — they tap the link, enter the code in mobile Safari/Chrome, and the agent's background process picks up the token automatically + +``` +┌───────────┐ "review PR #108" ┌───────────┐ gh pr view ┌───────────┐ +│ Discord │──────────────────►│ OpenAB │────────────►│ GitHub │ +│ User │ │ + Agent │◄────────────│ API │ +└───────────┘ └─────┬─────┘ 401 🚫 └───────────┘ + │ + │ needs gh auth login first! + ▼ + ┌───────────┐ device flow ┌───────────┐ + │ Agent │─────────────►│ GitHub │ + │ (nohup) │ code+URL │ /login/ │ + └─────┬─────┘◄─────────────│ device │ + │ └─────┬─────┘ + │ sends code+URL │ + ▼ │ + ┌───────────┐ authorize ┌─────▼─────┐ + │ Discord │─────────────►│ Browser │ + │ User │ enters code │ (mobile) │ + └───────────┘ └───────────┘ +``` + +## The problem with naive approaches + +`gh auth login --web` uses device flow: it prints a one-time code + URL, then polls GitHub until the user authorizes. In an agent environment the shell is synchronous — it blocks until the command finishes: + +| Approach | What happens | +|---|---| +| Run directly | Blocks forever. User never sees the code. | +| `timeout N gh auth login -w` | Code appears only after timeout kills the process — token is never saved. | + +## Solution: `nohup` + background + read log + +```bash +nohup gh auth login --hostname github.com --git-protocol https -p https -w > /tmp/gh-login.log 2>&1 & +sleep 3 && cat /tmp/gh-login.log +``` + +How it works: +1. `nohup ... &` runs `gh` in the background so the shell returns immediately +2. `sleep 3 && cat` reads the log after `gh` has printed the code + URL +3. The agent sends the code + URL to the user (via Discord) +4. The user opens the link (even on mobile), enters the code +5. `gh` detects the authorization and saves the token +6. Done — `gh auth status` confirms login + +## Verify + +```bash +gh auth status +``` + +## Steering / prompt snippet (Kiro CLI only) + +> **Note:** This section applies only to [Kiro CLI](https://kiro.dev) agents. Other agent backends (Claude Code, Codex, Gemini) have their own prompt/config mechanisms. + +To make your Kiro agent always handle `gh login` correctly, create `~/.kiro/steering/gh.md`: + +```bash +mkdir -p ~/.kiro/steering +cat > ~/.kiro/steering/gh.md << 'EOF' +# GitHub CLI + +## Device Flow Login + +When asked to "gh login", always use nohup + background + read log: + +```bash +nohup gh auth login --hostname github.com --git-protocol https -p https -w > /tmp/gh-login.log 2>&1 & +sleep 3 && cat /tmp/gh-login.log +``` + +Never use `timeout`. The shell tool is synchronous — it blocks until the command finishes, so stdout won't be visible until then. `nohup` runs it in the background, `sleep 3 && cat` grabs the code immediately. +EOF +``` + +Kiro CLI automatically picks up `~/.kiro/steering/*.md` files as persistent context, so the agent will remember this across all sessions. diff --git a/docs/helm-publishing.md b/docs/helm-publishing.md new file mode 100644 index 00000000..b4976360 --- /dev/null +++ b/docs/helm-publishing.md @@ -0,0 +1,73 @@ +# Helm Chart Publishing + +OpenAB publishes the Helm chart to two channels automatically via the `Release Charts` workflow (`.github/workflows/release.yml`). + +## Channels + +| Channel | URL | Install command | +|---------|-----|-----------------| +| GitHub Pages | `https://openabdev.github.io/openab` | `helm repo add openab https://openabdev.github.io/openab && helm install openab openab/openab` | +| OCI (GHCR) | `oci://ghcr.io/openabdev/charts/openab` | `helm install openab oci://ghcr.io/openabdev/charts/openab` | + +## How it works + +``` +charts/openab/Chart.yaml changed on main + │ + ▼ +┌─────────────────────────────┐ +│ Release Charts workflow │ +│ .github/workflows/ │ +│ release.yml │ +│ │ +│ 1. helm package │ +│ 2. helm push → OCI (GHCR) │ +│ 3. cr upload → GH Release │ +│ 4. cr index → gh-pages │ +│ 5. Update release notes │ +└─────────────────────────────┘ + │ + ▼ + Both channels updated +``` + +### Trigger + +The workflow runs when `charts/openab/Chart.yaml` is pushed to `main`. This happens automatically when the `Build & Release` workflow merges a chart bump PR. + +### OCI Registry + +`helm push` publishes the packaged chart to `oci://ghcr.io/openabdev/charts`. The GHCR packages must be **public** (configured at org level) for unauthenticated pulls. + +### GitHub Pages + +The [`chart-releaser`](https://github.com/helm/chart-releaser) (`cr`) tool uploads the `.tgz` as a GitHub Release asset, then updates `index.yaml` on the `gh-pages` branch. GitHub Pages serves this as a standard Helm repository. + +## Version flow + +``` +PR merged to main (src/ or Dockerfile changes) + → Build & Release workflow + → Builds Docker images (all 4 variants) + → Creates chart bump PR (patch/minor/major) + → App token merges the PR + → Chart.yaml change triggers Release Charts + → Publishes to OCI + GitHub Pages +``` + +## Stable vs beta + +The `Build & Release` workflow accepts two inputs via `workflow_dispatch`: + +| Input | Description | +|-------|-------------| +| `chart_bump` | `patch`, `minor`, or `major` | +| `release` | `true` for stable (e.g. `0.5.1`), omit for beta (e.g. `0.5.1-beta.34`) | + +Push-triggered builds always produce beta versions. Use `workflow_dispatch` with `release=true` for stable releases. + +Note: Helm hides beta versions by default. Use `--devel` to see them: + +```bash +helm search repo openab/openab --devel --versions +``` diff --git a/docs/steering/triage.md b/docs/steering/triage.md new file mode 100644 index 00000000..2f3594ee --- /dev/null +++ b/docs/steering/triage.md @@ -0,0 +1,50 @@ +# Issue Triage Guide for openabdev/openab + +## Steps + +1. **Confirm type** — ensure one of: `bug`, `feature`, `guidance` +2. **Verify claims** — be skeptical; find source code or official docs to confirm before accepting a bug report as valid +3. **Set priority** — add exactly one: + - `p0` 🔴 Critical — drop everything + - `p1` 🟠 High — address this sprint + - `p2` 🟡 Medium — planned work + - `p3` 🟤 Low — nice to have +4. **Remove `needs-triage`** — triage complete + +## Priority Guidelines + +| Priority | Criteria | +|----------|----------| +| p0 | Security vulnerability, data loss, entire system down | +| p1 | Major feature broken for a class of users (e.g. all Claude Code / Cursor users) | +| p2 | Bug with workaround, or planned feature work | +| p3 | Minor improvement, cosmetic, nice to have | + +## Response Template + +- **Issue at a Glance** — always include an ASCII diagram showing the flow and where things break +- Acknowledge the issue by investigating the relevant source code or official docs +- Confirm root cause or ask clarifying questions +- Link relevant spec/doc references when available +- Invite PR or state next steps +- **Draft response for human approval before posting to the issue comment** + +## Issue at a Glance Example + +``` +Discord User ──► openab ──► Claude Code / Cursor agent + │ + ▼ + session/request_permission + (agent asks: "can I run this tool?") + │ + ▼ + openab auto-reply (WRONG shape): + ┌─────────────────────────────────┐ + │ { "optionId": "allow_always" } │ ← flat, no wrapper + └─────────────────────────────────┘ + │ + ▼ + SDK cannot find `outcome` field + → treats as REFUSAL ❌ +``` diff --git a/docs/stt.md b/docs/stt.md new file mode 100644 index 00000000..157b6f66 --- /dev/null +++ b/docs/stt.md @@ -0,0 +1,164 @@ +# Speech-to-Text (STT) for Voice Messages + +openab can automatically transcribe Discord voice message attachments and forward the transcript to your ACP agent as text. + +## Quick Start + +Add an `[stt]` section to your `config.toml`: + +```toml +[stt] +enabled = true +``` + +If `GROQ_API_KEY` is set in your environment, that's all you need — openab will auto-detect it and use Groq's free tier. You can also set the key explicitly: + +```toml +[stt] +enabled = true +api_key = "${GROQ_API_KEY}" +``` + +## How It Works + +``` +Discord voice message (.ogg) + │ + ▼ + openab downloads the audio file + │ + ▼ + POST /audio/transcriptions → STT provider + │ + ▼ + transcript injected as: + "[Voice message transcript]: " + │ + ▼ + ACP agent receives plain text +``` + +The transcript is prepended to the prompt as a `ContentBlock::Text`, so the downstream agent (Kiro CLI, Claude Code, etc.) sees it as regular text input. + +## Configuration Reference + +```toml +[stt] +enabled = true # default: false +api_key = "${GROQ_API_KEY}" # required for cloud providers +model = "whisper-large-v3-turbo" # default +base_url = "https://api.groq.com/openai/v1" # default +``` + +| Field | Required | Default | Description | +|---|---|---|---| +| `enabled` | no | `false` | Enable/disable STT. When disabled, audio attachments are silently skipped. | +| `api_key` | no* | — | API key for the STT provider. *Auto-detected from `GROQ_API_KEY` env var if not set. For local servers, use any non-empty string (e.g. `"not-needed"`). | +| `model` | no | `whisper-large-v3-turbo` | Whisper model name. Varies by provider. | +| `base_url` | no | `https://api.groq.com/openai/v1` | OpenAI-compatible API base URL. | + +## Deployment Options + +openab uses the standard OpenAI-compatible `/audio/transcriptions` endpoint. Any provider that implements this API works — just change `base_url`. + +### Option 1: Groq Cloud (recommended, free tier) + +```toml +[stt] +enabled = true +api_key = "${GROQ_API_KEY}" +``` + +- Free tier with rate limits +- Model: `whisper-large-v3-turbo` (default) +- Sign up at https://console.groq.com + +### Option 2: OpenAI + +```toml +[stt] +enabled = true +api_key = "${OPENAI_API_KEY}" +model = "whisper-1" +base_url = "https://api.openai.com/v1" +``` + +- ~$0.006 per minute of audio +- Model: `whisper-1` + +### Option 3: Local Whisper Server + +For users running openab on a Mac Mini, home lab, or any machine with a local whisper server: + +```toml +[stt] +enabled = true +api_key = "not-needed" +model = "large-v3-turbo" +base_url = "http://localhost:8080/v1" +``` + +- Audio stays local — never leaves your machine +- No API key or cloud account needed +- Apple Silicon users get hardware acceleration + +Compatible local whisper servers: + +| Server | Install | Apple Silicon | +|---|---|---| +| [faster-whisper-server](https://github.com/fedirz/faster-whisper-server) | `pip install faster-whisper-server` | ✅ CoreML | +| [whisper.cpp server](https://github.com/ggerganov/whisper.cpp) | `brew install whisper-cpp` | ✅ Metal | +| [LocalAI](https://github.com/mudler/LocalAI) | Docker or binary | ✅ | + +### Option 4: LAN / Sidecar Server + +Point to a whisper server running on another machine in your network: + +```toml +[stt] +enabled = true +api_key = "not-needed" +base_url = "http://192.168.1.100:8080/v1" +``` + +### Not Supported + +- **Ollama** — does not expose an `/audio/transcriptions` endpoint. + +## Helm Chart (Kubernetes) + +When deploying via the openab Helm chart, STT is a first-class config block — no manual configmap patching needed: + +```bash +helm upgrade openab openab/openab \ + --set agents.kiro.stt.enabled=true \ + --set agents.kiro.stt.apiKey=gsk_xxx +``` + +The API key is stored in a K8s Secret and injected as an env var (never in plaintext in the configmap). You can also customize model and endpoint: + +```bash +helm upgrade openab openab/openab \ + --set agents.kiro.stt.enabled=true \ + --set agents.kiro.stt.apiKey=gsk_xxx \ + --set agents.kiro.stt.model=whisper-large-v3-turbo \ + --set agents.kiro.stt.baseUrl=https://api.groq.com/openai/v1 +``` + +## Disabling STT + +Omit the `[stt]` section entirely, or set: + +```toml +[stt] +enabled = false +``` + +When disabled, audio attachments are silently skipped with no impact on existing functionality. + +## Technical Notes + +- openab sends `response_format=json` in the transcription request to ensure the response is always parseable JSON. Some local whisper servers default to plain text output without this parameter. +- The actual MIME type from the Discord attachment is passed through to the STT API (e.g. `audio/ogg`, `audio/mp4`, `audio/wav`). +- Environment variables in config values are expanded via `${VAR}` syntax (e.g. `api_key = "${GROQ_API_KEY}"`). +- The `api_key` field is auto-detected from the `GROQ_API_KEY` environment variable when using the default Groq endpoint. If you set a custom `base_url` (e.g. local server), auto-detect is disabled to avoid leaking the Groq key to unrelated endpoints — you must set `api_key` explicitly. diff --git a/k8s/configmap.yaml b/k8s/configmap.yaml index 6af4f32a..79f9f791 100644 --- a/k8s/configmap.yaml +++ b/k8s/configmap.yaml @@ -1,7 +1,7 @@ apiVersion: v1 kind: ConfigMap metadata: - name: agent-broker-config + name: openab-config data: config.toml: | [discord] @@ -11,7 +11,7 @@ data: [agent] command = "kiro-cli" args = ["acp", "--trust-all-tools"] - working_dir = "/tmp" + working_dir = "/home/agent" [pool] max_sessions = 10 diff --git a/k8s/deployment.yaml b/k8s/deployment.yaml index a3574ea7..cb12c2ba 100644 --- a/k8s/deployment.yaml +++ b/k8s/deployment.yaml @@ -1,36 +1,46 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: agent-broker + name: openab labels: - app: agent-broker + app: openab spec: replicas: 1 strategy: type: Recreate # PVC is ReadWriteOnce selector: matchLabels: - app: agent-broker + app: openab template: metadata: labels: - app: agent-broker + app: openab spec: + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 + fsGroup: 1000 containers: - - name: agent-broker - image: agent-broker:latest + - name: openab + image: openab:latest imagePullPolicy: Never + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL env: - name: DISCORD_BOT_TOKEN valueFrom: secretKeyRef: - name: agent-broker-secret + name: openab-secret key: discord-bot-token - name: HOME value: /home/agent volumeMounts: - name: config - mountPath: /etc/agent-broker + mountPath: /etc/openab readOnly: true - name: data mountPath: /home/agent/.kiro @@ -41,7 +51,7 @@ spec: volumes: - name: config configMap: - name: agent-broker-config + name: openab-config - name: data persistentVolumeClaim: - claimName: agent-broker-data + claimName: openab-data diff --git a/k8s/pvc.yaml b/k8s/pvc.yaml index 1df2a630..0f25a3c0 100644 --- a/k8s/pvc.yaml +++ b/k8s/pvc.yaml @@ -1,7 +1,7 @@ apiVersion: v1 kind: PersistentVolumeClaim metadata: - name: agent-broker-data + name: openab-data spec: accessModes: - ReadWriteOnce diff --git a/k8s/secret.yaml b/k8s/secret.yaml index 2fd3e91e..914116b8 100644 --- a/k8s/secret.yaml +++ b/k8s/secret.yaml @@ -1,7 +1,7 @@ apiVersion: v1 kind: Secret metadata: - name: agent-broker-secret + name: openab-secret type: Opaque stringData: discord-bot-token: "REPLACE_ME" diff --git a/src/acp/connection.rs b/src/acp/connection.rs index 991ed9f2..53770509 100644 --- a/src/acp/connection.rs +++ b/src/acp/connection.rs @@ -20,6 +20,29 @@ fn expand_env(val: &str) -> String { } use tokio::time::Instant; +/// A content block for the ACP prompt — either text or image. +#[derive(Debug, Clone)] +pub enum ContentBlock { + Text { text: String }, + Image { media_type: String, data: String }, +} + +impl ContentBlock { + pub fn to_json(&self) -> Value { + match self { + ContentBlock::Text { text } => json!({ + "type": "text", + "text": text + }), + ContentBlock::Image { media_type, data } => json!({ + "type": "image", + "data": data, + "mimeType": media_type + }), + } + } +} + pub struct AcpConnection { _proc: Child, stdin: Arc>, @@ -209,7 +232,7 @@ impl AcpConnection { Some(json!({ "protocolVersion": 1, "clientCapabilities": {}, - "clientInfo": {"name": "agent-broker", "version": "0.1.0"}, + "clientInfo": {"name": "openab", "version": "0.1.0"}, })), ) .await?; @@ -242,11 +265,12 @@ impl AcpConnection { Ok(session_id) } - /// Send a prompt and return a receiver for streaming notifications. - /// The final message on the channel will have id set (the prompt response). + /// Send a prompt with content blocks (text and/or images) and return a receiver + /// for streaming notifications. The final message on the channel will have id set + /// (the prompt response). pub async fn session_prompt( &mut self, - prompt: &str, + content_blocks: Vec, ) -> Result<(mpsc::UnboundedReceiver, u64)> { self.last_active = Instant::now(); @@ -259,12 +283,19 @@ impl AcpConnection { *self.notify_tx.lock().await = Some(tx); let id = self.next_id(); + + // Convert content blocks to JSON + let prompt_json: Vec = content_blocks + .iter() + .map(|b| b.to_json()) + .collect(); + let req = JsonRpcRequest::new( id, "session/prompt", Some(json!({ "sessionId": session_id, - "prompt": [{"type": "text", "text": prompt}], + "prompt": prompt_json, })), ); let data = serde_json::to_string(&req)?; diff --git a/src/acp/mod.rs b/src/acp/mod.rs index 1ae3b8be..c67cad82 100644 --- a/src/acp/mod.rs +++ b/src/acp/mod.rs @@ -4,3 +4,4 @@ pub mod protocol; pub use pool::SessionPool; pub use protocol::{classify_notification, AcpEvent}; +pub use connection::ContentBlock; diff --git a/src/acp/protocol.rs b/src/acp/protocol.rs index d3e96ed5..82f00eb8 100644 --- a/src/acp/protocol.rs +++ b/src/acp/protocol.rs @@ -60,8 +60,8 @@ impl std::fmt::Display for JsonRpcError { pub enum AcpEvent { Text(String), Thinking, - ToolStart { title: String }, - ToolDone { title: String, status: String }, + ToolStart { id: String, title: String }, + ToolDone { id: String, title: String, status: String }, Status, } @@ -70,6 +70,19 @@ pub fn classify_notification(msg: &JsonRpcMessage) -> Option { let update = params.get("update")?; let session_update = update.get("sessionUpdate")?.as_str()?; + // toolCallId is the stable identity across tool_call → tool_call_update + // events for the same tool invocation. claude-agent-acp emits the first + // event before the input fields are streamed in (so the title falls back + // to "Terminal" / "Edit" / etc.) and refines them in a later + // tool_call_update; without the id we can't tell those events belong to + // the same call and end up rendering placeholder + refined as two + // separate lines. + let tool_id = update + .get("toolCallId") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + match session_update { "agent_message_chunk" => { let text = update.get("content")?.get("text")?.as_str()?; @@ -80,15 +93,15 @@ pub fn classify_notification(msg: &JsonRpcMessage) -> Option { } "tool_call" => { let title = update.get("title").and_then(|v| v.as_str()).unwrap_or("").to_string(); - Some(AcpEvent::ToolStart { title }) + Some(AcpEvent::ToolStart { id: tool_id, title }) } "tool_call_update" => { let title = update.get("title").and_then(|v| v.as_str()).unwrap_or("").to_string(); let status = update.get("status").and_then(|v| v.as_str()).unwrap_or("").to_string(); if status == "completed" || status == "failed" { - Some(AcpEvent::ToolDone { title, status }) + Some(AcpEvent::ToolDone { id: tool_id, title, status }) } else { - Some(AcpEvent::ToolStart { title }) + Some(AcpEvent::ToolStart { id: tool_id, title }) } } "plan" => Some(AcpEvent::Status), diff --git a/src/config.rs b/src/config.rs index 719feafa..c4ed3d30 100644 --- a/src/config.rs +++ b/src/config.rs @@ -11,13 +11,43 @@ pub struct Config { pub pool: PoolConfig, #[serde(default)] pub reactions: ReactionsConfig, + #[serde(default)] + pub stt: SttConfig, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct SttConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default)] + pub api_key: String, + #[serde(default = "default_stt_model")] + pub model: String, + #[serde(default = "default_stt_base_url")] + pub base_url: String, } +impl Default for SttConfig { + fn default() -> Self { + Self { + enabled: false, + api_key: String::new(), + model: default_stt_model(), + base_url: default_stt_base_url(), + } + } +} + +fn default_stt_model() -> String { "whisper-large-v3-turbo".into() } +fn default_stt_base_url() -> String { "https://api.groq.com/openai/v1".into() } + #[derive(Debug, Deserialize)] pub struct DiscordConfig { pub bot_token: String, #[serde(default)] pub allowed_channels: Vec, + #[serde(default)] + pub allowed_users: Vec, } #[derive(Debug, Deserialize)] diff --git a/src/discord.rs b/src/discord.rs index c46d60d5..e267064e 100644 --- a/src/discord.rs +++ b/src/discord.rs @@ -1,21 +1,38 @@ -use crate::acp::{classify_notification, AcpEvent, SessionPool}; -use crate::config::ReactionsConfig; +use crate::acp::{classify_notification, AcpEvent, ContentBlock, SessionPool}; +use crate::config::{ReactionsConfig, SttConfig}; +use crate::error_display::{format_coded_error, format_user_error}; use crate::format; use crate::reactions::StatusReactionController; +use base64::engine::general_purpose::STANDARD as BASE64; +use base64::Engine; +use image::ImageReader; +use std::io::Cursor; +use std::sync::LazyLock; use serenity::async_trait; -use serenity::model::channel::Message; +use serenity::model::channel::{Message, ReactionType}; use serenity::model::gateway::Ready; use serenity::model::id::{ChannelId, MessageId}; use serenity::prelude::*; use std::collections::HashSet; use std::sync::Arc; use tokio::sync::watch; -use tracing::{error, info}; +use tracing::{debug, error, info}; + +/// Reusable HTTP client for downloading Discord attachments. +/// Built once with a 30s timeout and rustls TLS (no native-tls deps). +static HTTP_CLIENT: LazyLock = LazyLock::new(|| { + reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build() + .expect("static HTTP client must build") +}); pub struct Handler { pub pool: Arc, pub allowed_channels: HashSet, + pub allowed_users: HashSet, pub reactions_config: ReactionsConfig, + pub stt_config: SttConfig, } #[async_trait] @@ -37,10 +54,21 @@ impl EventHandler for Handler { let in_thread = if !in_allowed_channel { match msg.channel_id.to_channel(&ctx.http).await { - Ok(serenity::model::channel::Channel::Guild(gc)) => gc - .parent_id - .map_or(false, |pid| self.allowed_channels.contains(&pid.get())), - _ => false, + Ok(serenity::model::channel::Channel::Guild(gc)) => { + let result = gc + .parent_id + .is_some_and(|pid| self.allowed_channels.contains(&pid.get())); + tracing::debug!(channel_id = %msg.channel_id, parent_id = ?gc.parent_id, result, "thread check"); + result + } + Ok(other) => { + tracing::debug!(channel_id = %msg.channel_id, kind = ?other, "not a guild channel"); + false + } + Err(e) => { + tracing::debug!(channel_id = %msg.channel_id, error = %e, "to_channel failed"); + false + } } } else { false @@ -53,16 +81,83 @@ impl EventHandler for Handler { return; } + if !self.allowed_users.is_empty() && !self.allowed_users.contains(&msg.author.id.get()) { + tracing::info!(user_id = %msg.author.id, "denied user, ignoring"); + if let Err(e) = msg.react(&ctx.http, ReactionType::Unicode("🚫".into())).await { + tracing::warn!(error = %e, "failed to react with 🚫"); + } + return; + } + let prompt = if is_mentioned { strip_mention(&msg.content) } else { msg.content.trim().to_string() }; - if prompt.is_empty() { + + // No text and no image attachments → skip to avoid wasting session slots + if prompt.is_empty() && msg.attachments.is_empty() { return; } - tracing::debug!(prompt = %prompt, in_thread, "processing"); + // Build content blocks: text + image attachments + let mut content_blocks = vec![]; + + // Inject structured sender context so the downstream CLI can identify who sent the message + let display_name = msg.member.as_ref() + .and_then(|m| m.nick.as_ref()) + .unwrap_or(&msg.author.name); + let sender_ctx = serde_json::json!({ + "schema": "openab.sender.v1", + "sender_id": msg.author.id.to_string(), + "sender_name": msg.author.name, + "display_name": display_name, + "channel": "discord", + "channel_id": msg.channel_id.to_string(), + "is_bot": msg.author.bot, + }); + let prompt_with_sender = format!( + "\n{}\n\n\n{}", + serde_json::to_string(&sender_ctx).unwrap(), + prompt + ); + + // Add text block (always, even if empty, we still send for sender context) + content_blocks.push(ContentBlock::Text { + text: prompt_with_sender.clone(), + }); + + // Process attachments: route by content type (audio → STT, image → encode) + if !msg.attachments.is_empty() { + for attachment in &msg.attachments { + if is_audio_attachment(attachment) { + if self.stt_config.enabled { + if let Some(transcript) = download_and_transcribe(attachment, &self.stt_config).await { + debug!(filename = %attachment.filename, chars = transcript.len(), "voice transcript injected"); + content_blocks.insert(0, ContentBlock::Text { + text: format!("[Voice message transcript]: {transcript}"), + }); + } + } else { + debug!(filename = %attachment.filename, "skipping audio attachment (STT disabled)"); + } + } else if let Some(content_block) = download_and_encode_image(attachment).await { + debug!(url = %attachment.url, filename = %attachment.filename, "adding image attachment"); + content_blocks.push(content_block); + } + } + } + + tracing::debug!( + text_len = prompt_with_sender.len(), + num_attachments = msg.attachments.len(), + in_thread, + "processing" + ); + + // Note: image-only messages (no text) are intentionally allowed since + // prompt_with_sender always includes the non-empty sender_context XML. + // The guard above (prompt.is_empty() && no attachments) handles stickers/embeds. let thread_id = if in_thread { msg.channel_id.get() @@ -88,7 +183,8 @@ impl EventHandler for Handler { let thread_key = thread_id.to_string(); if let Err(e) = self.pool.get_or_create(&thread_key).await { - let _ = edit(&ctx, thread_channel, thinking_msg.id, "⚠️ Failed to start agent.").await; + let msg = format_user_error(&e.to_string()); + let _ = edit(&ctx, thread_channel, thinking_msg.id, &format!("⚠️ {}", msg)).await; error!("pool error: {e}"); return; } @@ -104,11 +200,11 @@ impl EventHandler for Handler { )); reactions.set_queued().await; - // Stream prompt with live edits + // Stream prompt with live edits (pass content blocks instead of just text) let result = stream_prompt( &self.pool, &thread_key, - &prompt, + content_blocks, &ctx, thread_channel, thinking_msg.id, @@ -145,6 +241,175 @@ impl EventHandler for Handler { } } +/// Check if an attachment is an audio file (voice messages are typically audio/ogg). +fn is_audio_attachment(attachment: &serenity::model::channel::Attachment) -> bool { + let mime = attachment.content_type.as_deref().unwrap_or(""); + mime.starts_with("audio/") +} + +/// Download an audio attachment and transcribe it via the configured STT provider. +async fn download_and_transcribe( + attachment: &serenity::model::channel::Attachment, + stt_config: &SttConfig, +) -> Option { + const MAX_SIZE: u64 = 25 * 1024 * 1024; // 25 MB (Whisper API limit) + + if u64::from(attachment.size) > MAX_SIZE { + error!(filename = %attachment.filename, size = attachment.size, "audio exceeds 25MB limit"); + return None; + } + + let resp = HTTP_CLIENT.get(&attachment.url).send().await.ok()?; + if !resp.status().is_success() { + error!(url = %attachment.url, status = %resp.status(), "audio download failed"); + return None; + } + let bytes = resp.bytes().await.ok()?.to_vec(); + + let mime_type = attachment.content_type.as_deref().unwrap_or("audio/ogg"); + let mime_type = mime_type.split(';').next().unwrap_or(mime_type).trim(); + + crate::stt::transcribe(&HTTP_CLIENT, stt_config, bytes, attachment.filename.clone(), mime_type).await +} + +/// Maximum dimension (width or height) for resized images. +/// Matches OpenClaw's DEFAULT_IMAGE_MAX_DIMENSION_PX. +const IMAGE_MAX_DIMENSION_PX: u32 = 1200; + +/// JPEG quality for compressed output (OpenClaw uses progressive 85→35; +/// we start at 75 which is a good balance of quality vs size). +const IMAGE_JPEG_QUALITY: u8 = 75; + +/// Download a Discord image attachment, resize/compress it, then base64-encode +/// as an ACP image content block. +/// +/// Large images are resized so the longest side is at most 1200px and +/// re-encoded as JPEG at quality 75. This keeps the base64 payload well +/// under typical JSON-RPC transport limits (~200-400KB after encoding). +async fn download_and_encode_image(attachment: &serenity::model::channel::Attachment) -> Option { + const MAX_SIZE: u64 = 10 * 1024 * 1024; // 10 MB + + let url = &attachment.url; + if url.is_empty() { + return None; + } + + // Determine media type — prefer content-type header, fallback to extension + let media_type = attachment + .content_type + .as_deref() + .or_else(|| { + attachment + .filename + .rsplit('.') + .next() + .and_then(|ext| match ext.to_lowercase().as_str() { + "png" => Some("image/png"), + "jpg" | "jpeg" => Some("image/jpeg"), + "gif" => Some("image/gif"), + "webp" => Some("image/webp"), + _ => None, + }) + }); + + let Some(mime) = media_type else { + debug!(filename = %attachment.filename, "skipping non-image attachment"); + return None; + }; + let mime = mime.split(';').next().unwrap_or(mime).trim(); + if !mime.starts_with("image/") { + debug!(filename = %attachment.filename, mime = %mime, "skipping non-image attachment"); + return None; + } + + if u64::from(attachment.size) > MAX_SIZE { + error!(filename = %attachment.filename, size = attachment.size, "image exceeds 10MB limit"); + return None; + } + + let response = match HTTP_CLIENT.get(url).send().await { + Ok(resp) => resp, + Err(e) => { error!(url = %url, error = %e, "download failed"); return None; } + }; + if !response.status().is_success() { + error!(url = %url, status = %response.status(), "HTTP error downloading image"); + return None; + } + let bytes = match response.bytes().await { + Ok(b) => b, + Err(e) => { error!(url = %url, error = %e, "read failed"); return None; } + }; + + // Defense-in-depth: verify actual download size + if bytes.len() as u64 > MAX_SIZE { + error!(filename = %attachment.filename, size = bytes.len(), "downloaded image exceeds limit"); + return None; + } + + // Resize and compress + let (output_bytes, output_mime) = match resize_and_compress(&bytes) { + Ok(result) => result, + Err(e) => { + // Fallback: use original bytes but reject if too large for transport + if bytes.len() > 1024 * 1024 { + error!(filename = %attachment.filename, error = %e, size = bytes.len(), "resize failed and original too large, skipping"); + return None; + } + debug!(filename = %attachment.filename, error = %e, "resize failed, using original"); + (bytes.to_vec(), mime.to_string()) + } + }; + + debug!( + filename = %attachment.filename, + original_size = bytes.len(), + compressed_size = output_bytes.len(), + "image processed" + ); + + let encoded = BASE64.encode(&output_bytes); + Some(ContentBlock::Image { + media_type: output_mime, + data: encoded, + }) +} + +/// Resize image so longest side ≤ IMAGE_MAX_DIMENSION_PX, then encode as JPEG. +/// Returns (compressed_bytes, mime_type). GIFs are passed through unchanged +/// to preserve animation. +fn resize_and_compress(raw: &[u8]) -> Result<(Vec, String), image::ImageError> { + let reader = ImageReader::new(Cursor::new(raw)) + .with_guessed_format()?; + + let format = reader.format(); + + // Pass through GIFs unchanged to preserve animation + if format == Some(image::ImageFormat::Gif) { + return Ok((raw.to_vec(), "image/gif".to_string())); + } + + let img = reader.decode()?; + let (w, h) = (img.width(), img.height()); + + // Resize preserving aspect ratio: scale so longest side = 1200px + let img = if w > IMAGE_MAX_DIMENSION_PX || h > IMAGE_MAX_DIMENSION_PX { + let max_side = std::cmp::max(w, h); + let ratio = f64::from(IMAGE_MAX_DIMENSION_PX) / f64::from(max_side); + let new_w = (f64::from(w) * ratio) as u32; + let new_h = (f64::from(h) * ratio) as u32; + img.resize(new_w, new_h, image::imageops::FilterType::Lanczos3) + } else { + img + }; + + // Encode as JPEG + let mut buf = Cursor::new(Vec::new()); + let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut buf, IMAGE_JPEG_QUALITY); + img.write_with_encoder(encoder)?; + + Ok((buf.into_inner(), "image/jpeg".to_string())) +} + async fn edit(ctx: &Context, ch: ChannelId, msg_id: MessageId, content: &str) -> serenity::Result { ch.edit_message(&ctx.http, msg_id, serenity::builder::EditMessage::new().content(content)).await } @@ -152,24 +417,23 @@ async fn edit(ctx: &Context, ch: ChannelId, msg_id: MessageId, content: &str) -> async fn stream_prompt( pool: &SessionPool, thread_key: &str, - prompt: &str, + content_blocks: Vec, ctx: &Context, channel: ChannelId, msg_id: MessageId, reactions: Arc, ) -> anyhow::Result<()> { - let prompt = prompt.to_string(); let reactions = reactions.clone(); pool.with_connection(thread_key, |conn| { - let prompt = prompt.clone(); + let content_blocks = content_blocks.clone(); let ctx = ctx.clone(); let reactions = reactions.clone(); Box::pin(async move { let reset = conn.session_reset; conn.session_reset = false; - let (mut rx, _) = conn.session_prompt(&prompt).await?; + let (mut rx, _): (_, _) = conn.session_prompt(content_blocks).await?; reactions.set_thinking().await; let initial = if reset { @@ -180,38 +444,39 @@ async fn stream_prompt( let (buf_tx, buf_rx) = watch::channel(initial); let mut text_buf = String::new(); - let mut tool_lines: Vec = Vec::new(); + // Tool calls indexed by toolCallId. Vec preserves first-seen + // order. We store id + title + state separately so a ToolDone + // event that arrives without a refreshed title (claude-agent-acp's + // update events don't always re-send the title field) can still + // reuse the title we already learned from a prior + // tool_call_update — only the icon flips 🔧 → ✅ / ❌. Rendering + // happens on the fly in compose_display(). + let mut tool_lines: Vec = Vec::new(); let current_msg_id = msg_id; if reset { text_buf.push_str("⚠️ _Session expired, starting fresh..._\n\n"); } - // Spawn edit-streaming task + // Spawn edit-streaming task — only edits the single message, never sends new ones. + // Long content is truncated during streaming; final multi-message split happens after. let edit_handle = { let ctx = ctx.clone(); let mut buf_rx = buf_rx.clone(); tokio::spawn(async move { let mut last_content = String::new(); - let mut current_edit_msg = msg_id; loop { tokio::time::sleep(std::time::Duration::from_millis(1500)).await; if buf_rx.has_changed().unwrap_or(false) { let content = buf_rx.borrow_and_update().clone(); if content != last_content { - if content.len() > 1900 { - let chunks = format::split_message(&content, 1900); - if let Some(first) = chunks.first() { - let _ = edit(&ctx, channel, current_edit_msg, first).await; - } - for chunk in chunks.iter().skip(1) { - if let Ok(new_msg) = channel.say(&ctx.http, chunk).await { - current_edit_msg = new_msg.id; - } - } + let display = if content.chars().count() > 1900 { + let truncated = format::truncate_chars(&content, 1900); + format!("{truncated}…") } else { - let _ = edit(&ctx, channel, current_edit_msg, &content).await; - } + content.clone() + }; + let _ = edit(&ctx, channel, msg_id, &display).await; last_content = content; } } @@ -224,8 +489,13 @@ async fn stream_prompt( // Process ACP notifications let mut got_first_text = false; + let mut response_error: Option = None; while let Some(notification) = rx.recv().await { if notification.id.is_some() { + // Capture error from ACP response to display in Discord + if let Some(ref err) = notification.error { + response_error = Some(format_coded_error(err.code, &err.message)); + } break; } @@ -242,16 +512,53 @@ async fn stream_prompt( AcpEvent::Thinking => { reactions.set_thinking().await; } - AcpEvent::ToolStart { title, .. } if !title.is_empty() => { + AcpEvent::ToolStart { id, title } if !title.is_empty() => { reactions.set_tool(&title).await; - tool_lines.push(format!("🔧 `{title}`...")); + let title = sanitize_title(&title); + // Dedupe by toolCallId: replace if we've already + // seen this id, otherwise append a new entry. + // claude-agent-acp emits a placeholder title + // ("Terminal", "Edit", etc.) on the first event + // and refines it via tool_call_update; without + // dedup the placeholder and refined version + // appear as two separate orphaned lines. + if let Some(slot) = tool_lines.iter_mut().find(|e| e.id == id) { + slot.title = title; + slot.state = ToolState::Running; + } else { + tool_lines.push(ToolEntry { + id, + title, + state: ToolState::Running, + }); + } let _ = buf_tx.send(compose_display(&tool_lines, &text_buf)); } - AcpEvent::ToolDone { title, status, .. } => { + AcpEvent::ToolDone { id, title, status } => { reactions.set_thinking().await; - let icon = if status == "completed" { "✅" } else { "❌" }; - if let Some(line) = tool_lines.iter_mut().rev().find(|l| l.contains(&title)) { - *line = format!("{icon} `{title}`"); + let new_state = if status == "completed" { + ToolState::Completed + } else { + ToolState::Failed + }; + // Find by id (the title is unreliable — substring + // match against the placeholder "Terminal" would + // never find the refined entry). Preserve the + // existing title if the Done event omits it. + if let Some(slot) = tool_lines.iter_mut().find(|e| e.id == id) { + if !title.is_empty() { + slot.title = sanitize_title(&title); + } + slot.state = new_state; + } else if !title.is_empty() { + // Done arrived without a prior Start (rare + // race) — record it so we still show + // something. + tool_lines.push(ToolEntry { + id, + title: sanitize_title(&title), + state: new_state, + }); } let _ = buf_tx.send(compose_display(&tool_lines, &text_buf)); } @@ -266,8 +573,18 @@ async fn stream_prompt( // Final edit let final_content = compose_display(&tool_lines, &text_buf); + // If ACP returned both an error and partial text, show both. + // This can happen when the agent started producing content before hitting an error + // (e.g. context length limit, rate limit mid-stream). Showing both gives users + // full context rather than hiding the partial response. let final_content = if final_content.is_empty() { - "_(no response)_".to_string() + if let Some(err) = response_error { + format!("⚠️ {}", err) + } else { + "_(no response)_".to_string() + } + } else if let Some(err) = response_error { + format!("⚠️ {}\n\n{}", err, final_content) } else { final_content }; @@ -287,11 +604,47 @@ async fn stream_prompt( .await } -fn compose_display(tool_lines: &[String], text: &str) -> String { +/// Flatten a tool-call title into a single line that's safe to render +/// inside Discord inline-code spans. Discord renders single-backtick +/// code on a single line only, so multi-line shell commands (heredocs, +/// `&&`-chained commands split across lines) appear truncated; we +/// collapse newlines to ` ; ` and rewrite embedded backticks so they +/// don't break the wrapping span. +fn sanitize_title(title: &str) -> String { + title.replace('\r', "").replace('\n', " ; ").replace('`', "'") +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum ToolState { + Running, + Completed, + Failed, +} + +#[derive(Debug, Clone)] +struct ToolEntry { + id: String, + title: String, + state: ToolState, +} + +impl ToolEntry { + fn render(&self) -> String { + let icon = match self.state { + ToolState::Running => "🔧", + ToolState::Completed => "✅", + ToolState::Failed => "❌", + }; + let suffix = if self.state == ToolState::Running { "..." } else { "" }; + format!("{icon} `{}`{}", self.title, suffix) + } +} + +fn compose_display(tool_lines: &[ToolEntry], text: &str) -> String { let mut out = String::new(); if !tool_lines.is_empty() { - for line in tool_lines { - out.push_str(line); + for entry in tool_lines { + out.push_str(&entry.render()); out.push('\n'); } out.push('\n'); @@ -300,9 +653,12 @@ fn compose_display(tool_lines: &[String], text: &str) -> String { out } +static MENTION_RE: LazyLock = LazyLock::new(|| { + regex::Regex::new(r"<@[!&]?\d+>").unwrap() +}); + fn strip_mention(content: &str) -> String { - let re = regex::Regex::new(r"<@[!&]?\d+>").unwrap(); - re.replace_all(content, "").trim().to_string() + MENTION_RE.replace_all(content, "").trim().to_string() } fn shorten_thread_name(prompt: &str) -> String { @@ -339,3 +695,88 @@ async fn get_or_create_thread(ctx: &Context, msg: &Message, prompt: &str) -> any Ok(thread.id.get()) } + + +#[cfg(test)] +mod tests { + use super::*; + + fn make_png(width: u32, height: u32) -> Vec { + let img = image::RgbImage::new(width, height); + let mut buf = Cursor::new(Vec::new()); + img.write_to(&mut buf, image::ImageFormat::Png).unwrap(); + buf.into_inner() + } + + #[test] + fn large_image_resized_to_max_dimension() { + let png = make_png(3000, 2000); + let (compressed, mime) = resize_and_compress(&png).unwrap(); + + assert_eq!(mime, "image/jpeg"); + let result = image::load_from_memory(&compressed).unwrap(); + assert!(result.width() <= IMAGE_MAX_DIMENSION_PX); + assert!(result.height() <= IMAGE_MAX_DIMENSION_PX); + } + + #[test] + fn small_image_keeps_original_dimensions() { + let png = make_png(800, 600); + let (compressed, mime) = resize_and_compress(&png).unwrap(); + + assert_eq!(mime, "image/jpeg"); + let result = image::load_from_memory(&compressed).unwrap(); + assert_eq!(result.width(), 800); + assert_eq!(result.height(), 600); + } + + #[test] + fn landscape_image_respects_aspect_ratio() { + let png = make_png(4000, 2000); + let (compressed, _) = resize_and_compress(&png).unwrap(); + + let result = image::load_from_memory(&compressed).unwrap(); + assert_eq!(result.width(), 1200); + assert_eq!(result.height(), 600); + } + + #[test] + fn portrait_image_respects_aspect_ratio() { + let png = make_png(2000, 4000); + let (compressed, _) = resize_and_compress(&png).unwrap(); + + let result = image::load_from_memory(&compressed).unwrap(); + assert_eq!(result.width(), 600); + assert_eq!(result.height(), 1200); + } + + #[test] + fn compressed_output_is_smaller_than_original() { + let png = make_png(3000, 2000); + let (compressed, _) = resize_and_compress(&png).unwrap(); + + assert!(compressed.len() < png.len(), "compressed {} should be < original {}", compressed.len(), png.len()); + } + + #[test] + fn gif_passes_through_unchanged() { + // Minimal valid GIF89a (1x1 pixel) + let gif: Vec = vec![ + 0x47, 0x49, 0x46, 0x38, 0x39, 0x61, // GIF89a + 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, // logical screen descriptor + 0x2C, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, // image descriptor + 0x02, 0x02, 0x44, 0x01, 0x00, // image data + 0x3B, // trailer + ]; + let (output, mime) = resize_and_compress(&gif).unwrap(); + + assert_eq!(mime, "image/gif"); + assert_eq!(output, gif); + } + + #[test] + fn invalid_data_returns_error() { + let garbage = vec![0x00, 0x01, 0x02, 0x03]; + assert!(resize_and_compress(&garbage).is_err()); + } +} diff --git a/src/error_display.rs b/src/error_display.rs new file mode 100644 index 00000000..40f1479a --- /dev/null +++ b/src/error_display.rs @@ -0,0 +1,212 @@ +/// Format any error for user display in Discord. +/// +/// Handles two error categories: +/// - **Coded errors** (code != 0): JSON-RPC or HTTP status codes from upstream agent. +/// - **Startup/connection errors** (code == 0): Errors from pool.rs or connection.rs +/// where only the message string is available. +/// +/// Provider-agnostic: no provider-specific strings, message text passed through verbatim. +pub fn format_user_error(message: &str) -> String { + let msg_lower = message.to_lowercase(); + + // Startup / connection errors (code == 0 from anyhow) + if msg_lower.contains("timeout waiting for") { + // Use msg_lower for extraction to stay case-insistent with the match above. + // msg_lower and message are the same length, so byte offsets are valid. + if let Some(start) = msg_lower.find("timeout waiting for ") { + let rest = &message[start + "timeout waiting for ".len()..]; + let method = rest.split_whitespace().next().unwrap_or("request"); + return format!("**Request Timeout**\nTimeout waiting for {}, please try again.", method); + } + return "**Request Timeout**\nTimeout waiting for a response, please try again.".to_string(); + } + if msg_lower.contains("connection closed") || msg_lower.contains("channel closed") { + return "**Connection Lost**\nThe connection to the agent was lost, please try again.".to_string(); + } + if msg_lower.contains("failed to spawn") || msg_lower.contains("no such file") { + return "**Agent Not Found**\nCould not start the agent — please check your configuration.".to_string(); + } + if msg_lower.contains("pool exhausted") { + return "**Service Busy**\nAll agent sessions are in use, please try again shortly.".to_string(); + } + if msg_lower.contains("invalid api key") || msg_lower.contains("unauthorized") { + return "**Unauthorized**\nPlease check your API key configuration.".to_string(); + } + + // Unknown error — pass through as-is + if message.is_empty() { + "**Error**\nAn unknown error occurred.".to_string() + } else { + format!("**Error**\n{}", message) + } +} + +/// Format coded error from ACP agent for display in Discord. +/// Used for response errors that have a JSON-RPC or HTTP status code. +/// Public for reuse by other adapters (e.g. Slack). +pub fn format_coded_error(code: i64, message: &str) -> String { + let prefix = match code { + 400 => "**Bad Request**", + 401 => "**Unauthorized**", + 403 => "**Forbidden**", + 404 => "**Not Found**", + 408 => "**Request Timeout**", + 429 => "**Rate Limited**", + 500 => "**Internal Server Error**", + 502 => "**Bad Gateway**", + 503 => "**Service Unavailable**", + 504 => "**Gateway Timeout**", + -32600 => "**Invalid Request**", + -32601 => "**Method Not Found**", + -32602 => "**Invalid Params**", + -32603 => "**Internal Error**", + -32099..=-32000 => "**Server Error**", + _ => "**Error**", + }; + if message.is_empty() { + format!("{} (code: {})", prefix, code) + } else { + format!("{} (code: {})\n{}", prefix, code, message) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + // ─── format_user_error tests ───────────────────────────────────────────── + + #[test] + fn test_format_user_error_timeout() { + let result = format_user_error("timeout waiting for session/new response"); + assert!(result.contains("Request Timeout")); + assert!(result.contains("session/new")); + } + + #[test] + fn test_format_user_error_connection_closed() { + let result = format_user_error("connection closed"); + assert!(result.contains("Connection Lost")); + } + + #[test] + fn test_format_user_error_channel_closed() { + let result = format_user_error("channel closed"); + assert!(result.contains("Connection Lost")); + } + + #[test] + fn test_format_user_error_failed_to_spawn() { + let result = format_user_error("failed to spawn /some/path: No such file"); + assert!(result.contains("Agent Not Found")); + assert!(result.contains("the agent")); // generic, no provider name + } + + #[test] + fn test_format_user_error_no_such_file() { + let result = format_user_error("binary /usr/bin/nonexistent: no such file"); + assert!(result.contains("Agent Not Found")); + } + + #[test] + fn test_format_user_error_pool_exhausted() { + let result = format_user_error("pool exhausted (5 sessions)"); + assert!(result.contains("Service Busy")); + } + + #[test] + fn test_format_user_error_invalid_api_key() { + let result = format_user_error("invalid api key"); + assert!(result.contains("Unauthorized")); + } + + #[test] + fn test_format_user_error_unauthorized() { + let result = format_user_error("unauthorized: token rejected"); + assert!(result.contains("Unauthorized")); + } + + #[test] + fn test_format_user_error_unknown() { + let result = format_user_error("something went wrong"); + assert!(result.contains("Error")); + assert!(result.contains("something went wrong")); + } + + #[test] + fn test_format_user_error_empty() { + let result = format_user_error(""); + assert!(result.contains("Error")); + assert!(result.contains("unknown")); + } + + #[test] + fn test_format_user_error_case_insensitive() { + assert!(format_user_error("TIMEOUT WAITING FOR foo").contains("Timeout")); + assert!(format_user_error("CONNECTION CLOSED").contains("Connection")); + assert!(format_user_error("POOL EXHAUSTED").contains("Busy")); + } + + #[test] + fn test_format_user_error_mixed_case_timeout() { + // Case-insensitive matching should still extract method correctly + let result = format_user_error("Timeout Waiting For custom/method"); + assert!(result.contains("Request Timeout")); + assert!(result.contains("custom/method")); + } + + // ─── format_coded_error tests ─────────────────────────────────────────── + + #[test] + fn test_format_coded_error_401() { + let result = format_coded_error(401, "invalid token"); + assert!(result.contains("Unauthorized")); + assert!(result.contains("401")); + assert!(result.contains("invalid token")); + } + + #[test] + fn test_format_coded_error_429() { + let result = format_coded_error(429, ""); + assert!(result.contains("Rate Limited")); + assert!(result.contains("429")); + assert!(!result.contains("\n")); // no message, no newline + } + + #[test] + fn test_format_coded_error_503() { + let result = format_coded_error(503, "service unavailable"); + assert!(result.contains("Service Unavailable")); + assert!(result.contains("503")); + assert!(result.contains("service unavailable")); + } + + #[test] + fn test_format_coded_error_json_rpc() { + let result = format_coded_error(-32602, "missing required parameter"); + assert!(result.contains("Invalid Params")); + assert!(result.contains("-32602")); + } + + #[test] + fn test_format_coded_error_server_error_range() { + let result = format_coded_error(-32050, "internal failure"); + assert!(result.contains("Server Error")); + assert!(result.contains("-32050")); + } + + #[test] + fn test_format_coded_error_connection_error() { + let result = format_coded_error(-32000, "connection refused"); + assert!(result.contains("Server Error")); // -32000 falls in -32099..=-32000 range + assert!(result.contains("-32000")); + } + + #[test] + fn test_format_coded_error_unknown_code() { + let result = format_coded_error(999, "something happened"); + assert!(result.contains("Error")); + assert!(result.contains("999")); + assert!(result.contains("something happened")); + } +} diff --git a/src/format.rs b/src/format.rs index a0026ebb..841cf559 100644 --- a/src/format.rs +++ b/src/format.rs @@ -1,31 +1,40 @@ -/// Split text into chunks at line boundaries, each <= limit chars. +/// Split text into chunks at line boundaries, each <= limit Unicode characters (UTF-8 safe). +/// Discord's message limit counts Unicode characters, not bytes. pub fn split_message(text: &str, limit: usize) -> Vec { - if text.len() <= limit { + if text.chars().count() <= limit { return vec![text.to_string()]; } let mut chunks = Vec::new(); let mut current = String::new(); + let mut current_len: usize = 0; for line in text.split('\n') { + let line_chars = line.chars().count(); // +1 for the newline - if !current.is_empty() && current.len() + line.len() + 1 > limit { + if !current.is_empty() && current_len + line_chars + 1 > limit { chunks.push(current); current = String::new(); + current_len = 0; } if !current.is_empty() { current.push('\n'); + current_len += 1; } - // If a single line exceeds limit, hard-split it - if line.len() > limit { - for chunk in line.as_bytes().chunks(limit) { - if !current.is_empty() { + // If a single line exceeds limit, hard-split on char boundaries + if line_chars > limit { + for ch in line.chars() { + if current_len + 1 > limit { chunks.push(current); + current = String::new(); + current_len = 0; } - current = String::from_utf8_lossy(chunk).to_string(); + current.push(ch); + current_len += 1; } } else { current.push_str(line); + current_len += line_chars; } } if !current.is_empty() { @@ -33,3 +42,12 @@ pub fn split_message(text: &str, limit: usize) -> Vec { } chunks } + +/// Truncate a string to at most `limit` Unicode characters. +/// Discord's message limit counts Unicode characters, not bytes. +pub fn truncate_chars(s: &str, limit: usize) -> &str { + match s.char_indices().nth(limit) { + Some((idx, _)) => &s[..idx], + None => s, + } +} diff --git a/src/main.rs b/src/main.rs index 32588966..225bf236 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,8 +1,10 @@ mod acp; mod config; mod discord; +mod error_display; mod format; mod reactions; +mod stt; use serenity::prelude::*; use std::collections::HashSet; @@ -15,7 +17,7 @@ async fn main() -> anyhow::Result<()> { tracing_subscriber::fmt() .with_env_filter( tracing_subscriber::EnvFilter::try_from_default_env() - .unwrap_or_else(|_| "agent_broker=info".into()), + .unwrap_or_else(|_| "openab=info".into()), ) .init(); @@ -24,11 +26,12 @@ async fn main() -> anyhow::Result<()> { .map(PathBuf::from) .unwrap_or_else(|| PathBuf::from("config.toml")); - let cfg = config::load_config(&config_path)?; + let mut cfg = config::load_config(&config_path)?; info!( agent_cmd = %cfg.agent.command, pool_max = cfg.pool.max_sessions, channels = ?cfg.discord.allowed_channels, + users = ?cfg.discord.allowed_users, reactions = cfg.reactions.enabled, "config loaded" ); @@ -36,17 +39,32 @@ async fn main() -> anyhow::Result<()> { let pool = Arc::new(acp::SessionPool::new(cfg.agent, cfg.pool.max_sessions)); let ttl_secs = cfg.pool.session_ttl_hours * 3600; - let allowed_channels: HashSet = cfg - .discord - .allowed_channels - .iter() - .filter_map(|s| s.parse().ok()) - .collect(); + let allowed_channels = parse_id_set(&cfg.discord.allowed_channels, "allowed_channels")?; + let allowed_users = parse_id_set(&cfg.discord.allowed_users, "allowed_users")?; + info!(channels = allowed_channels.len(), users = allowed_users.len(), "parsed allowlists"); + + // Resolve STT config before constructing handler (auto-detect mutates cfg.stt) + if cfg.stt.enabled { + if cfg.stt.api_key.is_empty() && cfg.stt.base_url.contains("groq.com") { + if let Ok(key) = std::env::var("GROQ_API_KEY") { + if !key.is_empty() { + info!("stt.api_key not set, using GROQ_API_KEY from environment"); + cfg.stt.api_key = key; + } + } + } + if cfg.stt.api_key.is_empty() { + anyhow::bail!("stt.enabled = true but no API key found — set stt.api_key in config or export GROQ_API_KEY"); + } + info!(model = %cfg.stt.model, base_url = %cfg.stt.base_url, "STT enabled"); + } let handler = discord::Handler { pool: pool.clone(), allowed_channels, + allowed_users, reactions_config: cfg.reactions, + stt_config: cfg.stt.clone(), }; let intents = GatewayIntents::GUILD_MESSAGES @@ -81,6 +99,23 @@ async fn main() -> anyhow::Result<()> { // Cleanup cleanup_handle.abort(); shutdown_pool.shutdown().await; - info!("agent-broker shut down"); + info!("openab shut down"); Ok(()) } + +fn parse_id_set(raw: &[String], label: &str) -> anyhow::Result> { + let set: HashSet = raw + .iter() + .filter_map(|s| match s.parse() { + Ok(id) => Some(id), + Err(_) => { + tracing::warn!(value = %s, label = label, "ignoring invalid entry"); + None + } + }) + .collect(); + if !raw.is_empty() && set.is_empty() { + anyhow::bail!("all {label} entries failed to parse — refusing to start with an empty allowlist"); + } + Ok(set) +} diff --git a/src/stt.rs b/src/stt.rs new file mode 100644 index 00000000..122db9b6 --- /dev/null +++ b/src/stt.rs @@ -0,0 +1,61 @@ +use crate::config::SttConfig; +use reqwest::multipart; +use tracing::{debug, error}; + +/// Transcribe audio bytes via an OpenAI-compatible `/audio/transcriptions` endpoint. +pub async fn transcribe( + client: &reqwest::Client, + cfg: &SttConfig, + audio_bytes: Vec, + filename: String, + mime_type: &str, +) -> Option { + let url = format!("{}/audio/transcriptions", cfg.base_url.trim_end_matches('/')); + + let file_part = multipart::Part::bytes(audio_bytes) + .file_name(filename) + .mime_str(mime_type) + .ok()?; + + let form = multipart::Form::new() + .part("file", file_part) + .text("model", cfg.model.clone()) + .text("response_format", "json"); + + let resp = match client + .post(&url) + .bearer_auth(&cfg.api_key) + .multipart(form) + .send() + .await + { + Ok(r) => r, + Err(e) => { + error!(error = %e, "STT request failed"); + return None; + } + }; + + if !resp.status().is_success() { + let status = resp.status(); + let body = resp.text().await.unwrap_or_default(); + error!(status = %status, body = %body, "STT API error"); + return None; + } + + let json: serde_json::Value = match resp.json().await { + Ok(v) => v, + Err(e) => { + error!(error = %e, "STT response parse failed"); + return None; + } + }; + + let text = json.get("text")?.as_str()?.trim().to_string(); + if text.is_empty() { + return None; + } + + debug!(chars = text.len(), "STT transcription complete"); + Some(text) +}