diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 00000000..2f0a040f --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,8 @@ +coverage: + status: + project: + default: + informational: true + patch: + default: + target: 60% diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..2ba1ecad --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,16 @@ +## Summary + + +## Changes + +- Change 1 +- Change 2 + +## Test Plan + +- [ ] Unit tests pass +- [ ] Manual local verification confirms the `lark xxx` command works as expected + +## Related Issues + +- None diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 5aba7b0d..351cf3d9 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -2,22 +2,32 @@ name: Coverage on: push: - branches: [ main ] + branches: [main] + paths: + - "**.go" + - go.mod + - go.sum + - .github/workflows/coverage.yml pull_request: - branches: [ main ] + branches: [main] + paths: + - "**.go" + - go.mod + - go.sum + - .github/workflows/coverage.yml permissions: contents: read jobs: - codecov: - runs-on: ubuntu-22.04 + coverage: + runs-on: ubuntu-latest steps: - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5 with: - go-version: '1.23' + go-version-file: go.mod - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: @@ -27,10 +37,18 @@ jobs: run: python3 scripts/fetch_meta.py - name: Run tests with coverage - run: go test -coverprofile=coverage.txt -covermode=atomic ./... - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@75cd11691c0faa626561e295848008c8a7dddffe # v5 - with: - files: coverage.txt - token: ${{ secrets.CODECOV_TOKEN }} + run: go test -race -coverprofile=coverage.txt -covermode=atomic ./... + + - name: Generate coverage report + run: | + total=$(go tool cover -func=coverage.txt | grep total | awk '{print $3}') + echo "## Coverage Report" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "**Total coverage: ${total}**" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "
Details" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + go tool cover -func=coverage.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/gitleaks.yml b/.github/workflows/gitleaks.yml new file mode 100644 index 00000000..1506b56e --- /dev/null +++ b/.github/workflows/gitleaks.yml @@ -0,0 +1,28 @@ +name: Gitleaks + +on: + pull_request: + branches: [main] + push: + branches: [main] + workflow_dispatch: + +permissions: + contents: read + +jobs: + gitleaks: + # Forked pull_request runs do not receive repository/org secrets except GITHUB_TOKEN. + if: ${{ github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - uses: gitleaks/gitleaks-action@ff98106e4c7b2bc287b24eaf42907196329070c7 # v2.3.9 + env: + # GITHUB_TOKEN is provided automatically by GitHub Actions. + # GITLEAKS_KEY must be configured as a repository secret. + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITLEAKS_LICENSE: ${{ secrets.GITLEAKS_KEY }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 2d0da6b6..cec20a8b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,43 +2,36 @@ name: Lint on: push: - branches: [ main ] + branches: [main] + paths: + - "**.go" + - go.mod + - go.sum + - .golangci.yml + - .github/workflows/lint.yml pull_request: - branches: [ main ] + branches: [main] + paths: + - "**.go" + - go.mod + - go.sum + - .golangci.yml + - .github/workflows/lint.yml permissions: contents: read jobs: - staticcheck: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5 - with: - go-version: '1.23' - - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 - with: - python-version: '3.x' - - - name: Fetch meta_data.json - run: python3 scripts/fetch_meta.py - - - name: Run staticcheck - uses: dominikh/staticcheck-action@9716614d4101e79b4340dd97b10e54d68234e431 # v1 - with: - install-go: false - golangci-lint: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 - uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5 with: - go-version: '1.23' + go-version-file: go.mod - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: @@ -47,26 +40,21 @@ jobs: - name: Fetch meta_data.json run: python3 scripts/fetch_meta.py - - name: Run golangci-lint - uses: golangci/golangci-lint-action@55c2c1448f86e01eaae002a5a3a9624417608d84 # v6 - with: - version: latest - - vet: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - name: Ensure go.mod and go.sum are tidy + run: | + go mod tidy + if ! git diff --quiet go.mod go.sum; then + echo "::error::go.mod or go.sum is not tidy. Run 'go mod tidy' and commit the changes." + git diff go.mod go.sum + exit 1 + fi - - uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5 - with: - go-version: '1.23' - - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 - with: - python-version: '3.x' + - name: Run golangci-lint + run: go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 run --new-from-rev=origin/main - - name: Fetch meta_data.json - run: python3 scripts/fetch_meta.py + - name: Run govulncheck + continue-on-error: true # informational until Go version is upgraded + run: go run golang.org/x/vuln/cmd/govulncheck@v1.1.4 ./... - - name: Run go vet - run: go vet ./... + - name: Check dependency licenses + run: go run github.com/google/go-licenses/v2@v2.0.1 check ./... --disallowed_types=forbidden,restricted,reciprocal,unknown diff --git a/.github/workflows/pkg-pr-new-comment.yml b/.github/workflows/pkg-pr-new-comment.yml new file mode 100644 index 00000000..56725fc6 --- /dev/null +++ b/.github/workflows/pkg-pr-new-comment.yml @@ -0,0 +1,149 @@ +name: PR Preview Package Comment + +on: + workflow_run: + workflows: ["PR Preview Package"] + types: [completed] + +permissions: + actions: read + contents: read + issues: write + pull-requests: write + +jobs: + comment: + if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request' + runs-on: ubuntu-latest + + steps: + - name: Check comment payload artifact + id: payload + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const runId = context.payload.workflow_run?.id; + const { data } = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: runId, + per_page: 100, + }); + const found = Boolean( + data.artifacts?.some((artifact) => artifact.name === "pkg-pr-new-comment-payload") + ); + core.setOutput("found", found ? "true" : "false"); + if (!found) { + core.notice("No comment payload artifact found for this run; skipping comment."); + } + + - name: Download comment payload + if: steps.payload.outputs.found == 'true' + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8 + with: + name: pkg-pr-new-comment-payload + repository: ${{ github.repository }} + run-id: ${{ github.event.workflow_run.id }} + github-token: ${{ github.token }} + + - name: Comment install command + if: steps.payload.outputs.found == 'true' + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const fs = require("fs"); + const payload = JSON.parse(fs.readFileSync("pkg-pr-new-comment-payload.json", "utf8")); + const url = payload?.url; + const payloadPr = payload?.pr; + const sourceRepo = payload?.sourceRepo; + const sourceBranch = payload?.sourceBranch; + if (!Number.isInteger(payloadPr)) { + throw new Error(`Invalid PR number in artifact payload: ${payloadPr}`); + } + if (payloadPr <= 0) { + throw new Error(`Invalid PR number in artifact payload: ${payloadPr}`); + } + const issueNumber = payloadPr; + const runPrNumber = context.payload.workflow_run?.pull_requests?.[0]?.number; + if (Number.isInteger(runPrNumber) && runPrNumber !== issueNumber) { + throw new Error( + `PR number mismatch between workflow_run (${runPrNumber}) and artifact payload (${issueNumber})`, + ); + } + + if (typeof url !== "string" || url.trim() !== url || /[\u0000-\u001F\u007F]/.test(url)) { + throw new Error(`Invalid package URL in payload: ${url}`); + } + let parsedUrl; + try { + parsedUrl = new URL(url); + } catch { + throw new Error(`Invalid package URL in payload: ${url}`); + } + if (parsedUrl.protocol !== "https:" || parsedUrl.hostname !== "pkg.pr.new") { + throw new Error(`Invalid package URL in payload: ${url}`); + } + + const safeRepoPattern = /^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/; + const safeBranchPattern = /^[A-Za-z0-9._\/-]+$/; + const hasSkillSource = + typeof sourceRepo === "string" && + typeof sourceBranch === "string" && + safeRepoPattern.test(sourceRepo) && + safeBranchPattern.test(sourceBranch); + const skillSection = hasSkillSource + ? [ + "", + "### 🧩 Skill update", + "", + "```bash", + `npx skills add ${sourceRepo}#${sourceBranch} -y -g`, + "```", + ] + : [ + "", + "### 🧩 Skill update", + "", + "_Unavailable for this PR because source repo/branch metadata is missing._", + ]; + + const body = [ + "", + "## 🚀 PR Preview Install Guide", + "", + "### 🧰 CLI update", + "", + "```bash", + `npm i -g ${url}`, + "```", + ...skillSection, + ].join("\n"); + + const comments = await github.paginate(github.rest.issues.listComments, { + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issueNumber, + per_page: 100, + }); + + const existing = comments.find((comment) => + comment.user?.login === "github-actions[bot]" && + typeof comment.body === "string" && + comment.body.includes("") + ); + + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issueNumber, + body, + }); + } diff --git a/.github/workflows/pkg-pr-new.yml b/.github/workflows/pkg-pr-new.yml new file mode 100644 index 00000000..a6582fa5 --- /dev/null +++ b/.github/workflows/pkg-pr-new.yml @@ -0,0 +1,71 @@ +name: PR Preview Package + +on: + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches: [main] + +permissions: + contents: read + +jobs: + publish: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5 + with: + go-version-file: go.mod + + - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4 + with: + node-version: lts/* + + - name: Build preview package + run: ./scripts/build-pkg-pr-new.sh + + - name: Publish to pkg.pr.new + run: npx pkg-pr-new publish --no-compact --json output.json --comment=off ./.pkg-pr-new + + - name: Build comment payload + env: + PR_NUMBER: ${{ github.event.pull_request.number }} + SOURCE_REPO: ${{ github.event.pull_request.head.repo.full_name }} + SOURCE_BRANCH: ${{ github.event.pull_request.head.ref }} + run: | + node <<'NODE' + const fs = require("fs"); + + const output = JSON.parse(fs.readFileSync("output.json", "utf8")); + const url = output?.packages?.[0]?.url; + if (!url) throw new Error("No package URL found in output.json"); + if (!url.startsWith("https://pkg.pr.new/")) { + throw new Error(`Unexpected package URL: ${url}`); + } + + const pr = Number(process.env.PR_NUMBER); + if (!Number.isInteger(pr) || pr <= 0) { + throw new Error(`Invalid PR_NUMBER: ${process.env.PR_NUMBER}`); + } + + const payload = { + pr, + url, + sourceRepo: process.env.SOURCE_REPO || "", + sourceBranch: process.env.SOURCE_BRANCH || "", + }; + + fs.writeFileSync( + "pkg-pr-new-comment-payload.json", + JSON.stringify(payload), + ); + NODE + + - name: Upload comment payload + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + with: + name: pkg-pr-new-comment-payload + path: pkg-pr-new-comment-payload.json diff --git a/.github/workflows/pr-labels-test.yml b/.github/workflows/pr-labels-test.yml new file mode 100644 index 00000000..df8dd891 --- /dev/null +++ b/.github/workflows/pr-labels-test.yml @@ -0,0 +1,31 @@ +name: Test PR Label Logic + +on: + push: + branches: [main] + paths: + - "scripts/pr-labels/**" + - ".github/workflows/pr-labels-test.yml" + pull_request: + branches: [main] + paths: + - "scripts/pr-labels/**" + - ".github/workflows/pr-labels-test.yml" + +permissions: + contents: read + +jobs: + test-pr-labels: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Run PR label regression tests + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: node scripts/pr-labels/test.js diff --git a/.github/workflows/pr-labels.yml b/.github/workflows/pr-labels.yml new file mode 100644 index 00000000..096b4125 --- /dev/null +++ b/.github/workflows/pr-labels.yml @@ -0,0 +1,43 @@ +name: PR Labels + +on: + pull_request_target: + # NOTE: This event runs with base-branch code and write permissions. + # Do NOT add `ref: github.event.pull_request.head.sha` to the checkout step, + # as that would execute untrusted PR code with elevated access. + types: + - opened + - edited + - reopened + - synchronize + - ready_for_review + +permissions: + contents: read + pull-requests: write + issues: write + +jobs: + sync-pr-labels: + if: ${{ github.event.pull_request.state == 'open' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Sync managed PR labels + id: sync_pr_labels + # Labeling is best-effort and must not block PR merges. + continue-on-error: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: node scripts/pr-labels/index.js + + - name: Warn when label sync fails + if: ${{ always() && steps.sync_pr_labels.outcome == 'failure' }} + run: | + echo "::warning::PR label sync failed; labels may be stale." + echo "⚠️ PR label sync failed; labels may be stale." >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 94d6b56a..1a971b5a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -33,3 +33,19 @@ jobs: args: release --clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + publish-npm: + needs: goreleaser + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 + with: + node-version: '20' + registry-url: 'https://registry.npmjs.org' + + - name: Publish to npm + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npm publish --access public diff --git a/.github/workflows/skill-format-check.yml b/.github/workflows/skill-format-check.yml new file mode 100644 index 00000000..7c8b8fc5 --- /dev/null +++ b/.github/workflows/skill-format-check.yml @@ -0,0 +1,32 @@ +name: Skill Format Check + +on: + push: + branches: [main] + paths: + - "skills/**" + - "scripts/skill-format-check/**" + - ".github/workflows/skill-format-check.yml" + pull_request: + branches: [main] + paths: + - "skills/**" + - "scripts/skill-format-check/**" + - ".github/workflows/skill-format-check.yml" + +permissions: + contents: read + +jobs: + check-format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Run Skill Format Check + run: node scripts/skill-format-check/index.js diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 11136dcf..58351696 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,22 +2,32 @@ name: Tests on: push: - branches: [ main ] + branches: [main] + paths: + - "**.go" + - go.mod + - go.sum + - .github/workflows/tests.yml pull_request: - branches: [ main ] + branches: [main] + paths: + - "**.go" + - go.mod + - go.sum + - .github/workflows/tests.yml permissions: contents: read jobs: unit-test: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5 with: - go-version: '1.23' + go-version-file: go.mod - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: @@ -27,4 +37,7 @@ jobs: run: python3 scripts/fetch_meta.py - name: Run tests - run: go test -v -race -count=1 -timeout=30s ./cmd/... ./internal/... ./shortcuts/... + run: go test -v -race -count=1 -timeout=5m ./cmd/... ./internal/... ./shortcuts/... + + - name: Build + run: go build -v ./... diff --git a/.gitleaks.toml b/.gitleaks.toml new file mode 100644 index 00000000..597b3395 --- /dev/null +++ b/.gitleaks.toml @@ -0,0 +1,16 @@ +title = "lark-cli gitleaks config" + +[extend] +useDefault = true + +[[rules]] +id = "lark-bot-app-id" +description = "Detect Lark bot app ids" +regex = '''\bcli_[a-z0-9]{16}\b''' +keywords = ["cli_"] + +[[rules]] +id = "lark-session-token" +description = "Detect Lark session tokens" +regex = '''\bXN0YXJ0-[A-Za-z0-9_-]+-WVuZA\b''' +keywords = ["XN0YXJ0-", "-WVuZA"] diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 00000000..4690fe93 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,66 @@ +version: "2" + +run: + timeout: 5m + +linters: + default: none + enable: + - asasalint # checks for pass []any as any in variadic func(...any) + - asciicheck # checks that code does not contain non-ASCII identifiers + - bidichk # checks for dangerous unicode character sequences + - bodyclose # checks whether HTTP response body is closed successfully + - copyloopvar # detects places where loop variables are copied + - durationcheck # checks for two durations multiplied together + - exptostd # detects functions from golang.org/x/exp/ replaceable by std + - fatcontext # detects nested contexts in loops + - gocheckcompilerdirectives # validates go compiler directive comments (//go:) + - gochecksumtype # checks exhaustiveness on Go "sum types" + - gocritic # diagnostics for bugs, performance and style + - gomoddirectives # checks for replace, retract, and exclude in go.mod + - goprintffuncname # checks that printf-like functions end with f + - govet # reports suspicious constructs + - ineffassign # detects ineffective assignments + - nilerr # finds code that returns nil even if error is not nil + - nolintlint # reports ill-formed nolint directives + - nosprintfhostport # checks for misuse of Sprintf to construct host:port + - reassign # checks that package variables are not reassigned + - unconvert # removes unnecessary type conversions + - unused # checks for unused constants, variables, functions and types + + # To enable later after fixing existing issues: + # - errcheck # checks for unchecked errors + # - errname # checks that error types are named XxxError + # - errorlint # checks error wrapping best practices + # - gosec # security-oriented linter + # - misspell # finds commonly misspelled English words + # - staticcheck # comprehensive static analysis + + exclusions: + paths: + - generated + rules: + - path: _test\.go$ + linters: + - bodyclose + - gocritic + + settings: + gocritic: + disabled-checks: + - appendAssign + - hugeParam + disabled-tags: + - style + govet: + enable: + - httpresponse + +formatters: + enable: + - gofmt + - goimports + +issues: + max-issues-per-linter: 0 + max-same-issues: 0 diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..e594a81c --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,33 @@ +# AGENTS.md +Concise maintainer/developer guide for building, testing, and opening high-quality PRs in this repo. + +## Goal (pick one per PR) +- Make CLI better: improve UX, error messages, help text, flags, and output clarity. +- Improve reliability: fix bugs, edge cases, and regressions with tests. +- Improve developer velocity: simplify code paths, reduce complexity, keep behavior explicit. +- Improve quality gates: strengthen tests/lint/checks without adding heavy process. + +## Fast Dev Loop +1. `make build` (runs `python3 scripts/fetch_meta.py` first) +2. `make unit-test` (required before PR) +3. Run changed command(s) manually via `./lark-cli ...` + +## Pre-PR Checks (match CI gates) +1. `make unit-test` +2. `go mod tidy` (must not change `go.mod`/`go.sum`) +3. `go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 run --new-from-rev=origin/main` +4. If dependencies changed: `go run github.com/google/go-licenses/v2@v2.0.1 check ./... --disallowed_types=forbidden,restricted,reciprocal,unknown` +5. Optional full local suite: `make test` (vet + unit + integration) + +## Test/Check Commands +- Unit: `make unit-test` +- Integration: `make integration-test` +- Full: `make test` +- Vet only: `make vet` +- Coverage (local): `go test -race -coverprofile=coverage.txt -covermode=atomic ./...` + +## Commit/PR Rules +- Use Conventional Commits in English: `feat: ...`, `fix: ...`, `docs: ...`, `ci: ...`, `test: ...`, `chore: ...`, `refactor: ...` +- Keep PR title in the same Conventional Commit format (squash merge keeps it). +- Before opening a real PR, draft/fill description from `.github/pull_request_template.md` and ensure Summary/Changes/Test Plan are complete. +- Never commit secrets/tokens/internal sensitive data. diff --git a/ByteDance_Corporate_Contributor_License_Agreement_v1.1.pdf b/ByteDance_Corporate_Contributor_License_Agreement_v1.1.pdf deleted file mode 100644 index 043c0aa0..00000000 Binary files a/ByteDance_Corporate_Contributor_License_Agreement_v1.1.pdf and /dev/null differ diff --git a/CHANGELOG.md b/CHANGELOG.md index cad474a1..a414b8ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,62 @@ All notable changes to this project will be documented in this file. +## [v1.0.2] - 2026-04-01 + +### Features + +- Improve OS keychain/DPAPI access error handling for sandbox environments (#173) +- **mail**: Auto-resolve local image paths in draft body HTML (#139) + +### Bug Fixes + +- Correct URL formatting in login `--no-wait` output (#169) + +### Documentation + +- Add concise AGENTS development guide (#178) + +### CI + +- Refine PR business area labels and introduce skill format check (#148) + +### Chore + +- Add pull request template (#176) + +## [v1.0.1] - 2026-03-31 + +### Features + +- Add automatic CLI update detection and notification (#144) +- Add npm publish job to release workflow (#145) +- Support auto extension for downloads (#16) +- Remove useless files (#131) +- Normalize markdown message send/reply output (#28) +- Add auto-pagination to messages search and update lark-im docs (#30) + +### Bug Fixes + +- **base**: Use base history read scope for record history list (#96) +- Remove sensitive send scope from reply and forward shortcuts (#92) +- Resolve silent failure in `lark-cli api` error output (#85) + +### Documentation + +- **base**: Clarify field description usage in json (#90) +- Update Base description to include all capabilities (#61) +- Add official badge to distinguish from third-party Lark CLI tools (#103) +- Rename user-facing Bitable references to Base (#11) +- Add star history chart to readmes (#12) +- Simplify installation steps by merging CLI and Skills into one section (#26) +- Add npm version badge and improve AI agent tip wording (#23) +- Emphasize Skills installation as required for AI Agents (#19) +- Clarify install methods as alternatives and add source build steps + +### CI + +- Improve CI workflows and add golangci-lint config (#71) + ## [v1.0.0] - 2026-03-28 ### Initial Release @@ -27,7 +83,7 @@ Built-in shortcuts for commonly used Lark APIs, enabling concise commands like ` - **Drive** — Upload, download, and manage cloud documents. - **Docs** — Work with Lark documents. - **Sheets** — Interact with spreadsheets. -- **Base (Bitable)** — Manage multi-dimensional tables. +- **Base** — Manage multi-dimensional tables. - **Calendar** — Create and manage calendar events. - **Mail** — Send and manage emails. - **Contact** — Look up users and departments. @@ -54,4 +110,6 @@ Bundled AI agent skills for intelligent assistance: - Bilingual documentation (English & Chinese). - CI/CD pipelines: linting, testing, coverage reporting, and automated releases. +[v1.0.2]: https://github.com/larksuite/cli/releases/tag/v1.0.2 +[v1.0.1]: https://github.com/larksuite/cli/releases/tag/v1.0.1 [v1.0.0]: https://github.com/larksuite/cli/releases/tag/v1.0.0 diff --git a/CLA.md b/CLA.md deleted file mode 100644 index 4f186c6b..00000000 --- a/CLA.md +++ /dev/null @@ -1,28 +0,0 @@ -> Thank you for your interest in open source projects hosted or managed by ByteDance Ltd. and/or its Affiliates ("**ByteDance**") . In order to clarify the intellectual property license granted with Contributions from any person or entity, ByteDance must have a Contributor License Agreement ("**CLA**") on file that has been signed by each Contributor, indicating agreement to the license terms below. This license is for your protection as a Contributor as well as the protection of ByteDance and its users; it does not change your rights to use your own Contributions for any other purpose. -> If you are an individual making a submission on your own behalf, you should accept the Individual Contributor License Agreement. If you are making a submission on behalf of a legal entity (the “**Corporation**”), you should sign the separation Corporate Contributor License Agreement. - -**ByteDance Individual Contributor License Agreement v1.** **1** -By clicking “Accept” on this page, You accept and agree to the following terms and conditions for Your present and future Contributions submitted to ByteDance. Except for the license granted herein to ByteDance and recipients of software distributed by ByteDance, You reserve all right, title, and interest in and to Your Contributions. -1.Definitions. -"Affiliate" shall mean an entity that Controls, is Controlled by, or is under common Control with You or ByteDance, respectively (but only as long as such Control exists). -"Control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. -"Contribution" shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to ByteDance for inclusion in, or documentation of, any of the products owned or managed by ByteDance (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to ByteDance or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, ByteDance for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution." -"You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with ByteDance. For the avoidance of doubt, the Corporation making a Contribution and all of its Affiliates are considered to be a single Contributor and this CLA shall apply to Contributions Submitted by the Corporation or any of its Affiliates. -2.Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to ByteDance and to recipients of software distributed by ByteDance a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works. -3.Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to ByteDance and to recipients of software distributed by ByteDance a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed. -4.You represent that you are legally entitled to grant the above license. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, that your employer has waived such rights for your Contributions to ByteDance, or that your employer has executed a separate Corporate CLA with ByteDance. -5.You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others). You represent that Your Contribution submissions include complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions. -6.You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON- INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. -7.Should You wish to submit work that is not Your original creation, You may submit it to ByteDance separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]". -8.You agree to notify ByteDance of any facts or circumstances of which you become aware that would make these representations inaccurate in any respect. -9.You agree that contributions to Projects and information about contributions may be maintained indefinitely and disclosed publicly, including Your name and other information that You submit with your submission. -10.This Agreement is the entire agreement and understanding between the parties, and supersedes any and all prior agreements, understandings or communications, written or oral, between the parties relating to the subject matter hereof. This Agreement may be assigned by ByteDance. - -[ByteDance Corporate Contributor License Agreement v1.1](./ByteDance_Corporate_Contributor_License_Agreement_v1.1.pdf) - -This version of the Contributor License Agreement allows a legal entity (the “Corporation”) to submit Contributions to the applicable project. -ByteDance Corporate Contributor License Agreement v1.1.pdf -A person authorized to sign legal documents on behalf of your employer (usually a VP or higher) must sign the Contributor Agreement on behalf of the employer. -If you have not already signed this agreement, please complete and sign, then scan and email a pdf file of this Agreement to opensource-cla@bytedance.com. Please read this document carefully before signing and keep a copy for your records. - -If you need to update your CLA, please email  from the email address associated with your individual or corporate information. \ No newline at end of file diff --git a/README.md b/README.md index 4a5b1a8e..aacf745c 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,11 @@ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Go Version](https://img.shields.io/badge/go-%3E%3D1.23-blue.svg)](https://go.dev/) +[![npm version](https://img.shields.io/npm/v/@larksuite/cli.svg)](https://www.npmjs.com/package/@larksuite/cli) [中文版](./README.zh.md) | [English](./README.md) -A command-line tool for [Lark/Feishu](https://www.larksuite.com/) Open Platform — built for humans and AI Agents. Covers core business domains including Messenger, Docs, Base, Sheets, Calendar, Mail, Tasks, Meetings, and more, with 200+ commands and 19 AI Agent [Skills](./skills/). +The official [Lark/Feishu](https://www.larksuite.com/) CLI tool, maintained by the [larksuite](https://github.com/larksuite) team — built for humans and AI Agents. Covers core business domains including Messenger, Docs, Base, Sheets, Calendar, Mail, Tasks, Meetings, and more, with 200+ commands and 19 AI Agent [Skills](./skills/). [Install](#installation--quick-start) · [AI Agent Skills](#agent-skills) · [Auth](#authentication) · [Commands](#three-layer-command-system) · [Advanced](#advanced-usage) · [Security](#security--risk-warnings-read-before-use) · [Contributing](#contributing) @@ -27,7 +28,7 @@ A command-line tool for [Lark/Feishu](https://www.larksuite.com/) Open Platform | 💬 Messenger | Send/reply messages, create and manage group chats, view chat history & threads, search messages, download media | | 📄 Docs | Create, read, update, and search documents, read/write media & whiteboards | | 📁 Drive | Upload and download files, search docs & wiki, manage comments | -| 📊 Base | Create and manage tables, fields, records, views, dashboards, data aggregation & analytics | +| 📊 Base | Create and manage tables, fields, records, views, dashboards, workflows, forms, roles & permissions, data aggregation & analytics | | 📈 Sheets | Create, read, write, append, find, and export spreadsheet data | | ✅ Tasks | Create, query, update, and complete tasks; manage task lists, subtasks, comments & reminders | | 📚 Wiki | Create and manage knowledge spaces, nodes, and documents | @@ -46,16 +47,20 @@ Before you start, make sure you have: ### Quick Start (Human Users) -> **Tip:** If you have an AI Agent, you can hand this README to it and let the AI handle installation and setup — jump to [Quick Start (AI Agent)](#quick-start-ai-agent). +> **Note for AI assistants:** If you are an AI Agent helping the user with installation, jump directly to [Quick Start (AI Agent)](#quick-start-ai-agent), which contains all the steps you need to complete. -#### Install CLI +#### Install Choose **one** of the following methods: **Option 1 — From npm (recommended):** ```bash +# Install CLI npm install -g @larksuite/cli + +# Install CLI SKILL (required) +npx skills add larksuite/cli -y -g ``` **Option 2 — From source:** @@ -66,17 +71,8 @@ Requires Go `v1.23`+ and Python 3. git clone https://github.com/larksuite/cli.git cd cli make install -``` - -#### Install AI Agent Skills - -[Skills](./skills/) are structured instruction documents that enable AI Agents to use this CLI: - -```bash -# Install all skills to current directory -npx skills add larksuite/cli -y -# Install all skills globally +# Install CLI SKILL (required) npx skills add larksuite/cli -y -g ``` @@ -97,21 +93,17 @@ lark-cli calendar +agenda > The following steps are for AI Agents. Some steps require the user to complete actions in a browser. -**Step 1 — Install CLI** +**Step 1 — Install** ```bash +# Install CLI npm install -g @larksuite/cli -``` - -**Step 2 — Install Skills (core step, do not skip)** -> Skills are the instruction files that tell the AI Agent what commands are available and how to call them. This step is essential for the Agent to work with lark-cli. - -```bash -npx skills add larksuite/cli --all -y +# Install CLI SKILL (required) +npx skills add larksuite/cli -y -g ``` -**Step 3 — Configure app credentials** +**Step 2 — Configure app credentials** > Run this command in the background. It will output an authorization URL — extract it and send it to the user. The command exits automatically after the user completes the setup in the browser. @@ -119,7 +111,7 @@ npx skills add larksuite/cli --all -y lark-cli config init --new ``` -**Step 4 — Login** +**Step 3 — Login** > Same as above: run in the background, extract the authorization URL and send it to the user. @@ -127,7 +119,7 @@ lark-cli config init --new lark-cli auth login --recommend ``` -**Step 5 — Verify** +**Step 4 — Verify** ```bash lark-cli auth status @@ -273,6 +265,10 @@ We recommend using the Lark/Feishu bot integrated with this tool as a private co Please fully understand all usage risks. By using this tool, you are deemed to voluntarily assume all related responsibilities. +## Star History + +[![Star History Chart](https://api.star-history.com/svg?repos=larksuite/cli&type=Date)](https://star-history.com/#larksuite/cli&Date) + ## Contributing Community contributions are welcome! If you find a bug or have feature suggestions, please submit an [Issue](https://github.com/larksuite/cli/issues) or [Pull Request](https://github.com/larksuite/cli/pulls). diff --git a/README.zh.md b/README.zh.md index 921177b7..e23fb846 100644 --- a/README.zh.md +++ b/README.zh.md @@ -2,10 +2,11 @@ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Go Version](https://img.shields.io/badge/go-%3E%3D1.23-blue.svg)](https://go.dev/) +[![npm version](https://img.shields.io/npm/v/@larksuite/cli.svg)](https://www.npmjs.com/package/@larksuite/cli) [中文版](./README.zh.md) | [English](./README.md) -飞书/Lark 开放平台命令行工具 — 让人类和 AI Agent 都能在终端中操作飞书。覆盖消息、文档、多维表格、电子表格、日历、邮箱、任务、会议等核心业务域,提供 200+ 命令及 19 个 AI Agent [Skills](./skills/)。 +飞书官方 CLI 工具,由 [larksuite](https://github.com/larksuite) 团队维护 — 让人类和 AI Agent 都能在终端中操作飞书。覆盖消息、文档、多维表格、电子表格、日历、邮箱、任务、会议等核心业务域,提供 200+ 命令及 19 个 AI Agent [Skills](./skills/)。 [安装](#安装与快速开始) · [AI Agent Skills](#agent-skills) · [认证](#认证) · [命令](#三层命令调用) · [进阶用法](#进阶用法) · [安全](#安全与风险提示使用前必读) · [贡献](#贡献) @@ -27,7 +28,7 @@ | 💬 即时通讯 | 发送/回复消息、创建和管理群聊、查看聊天记录与话题、搜索消息、下载媒体文件 | | 📄 云文档 | 创建、读取、更新文档、搜索文档、读写素材与画板 | | 📁 云空间 | 上传和下载文件、搜索文档与知识库、管理评论 | -| 📊 多维表格 | 创建和管理多维表格、字段、记录、视图、仪表盘,数据聚合分析 | +| 📊 多维表格 | 创建和管理数据表、字段、记录、视图、仪表盘、自动化流程、表单、角色权限,数据聚合分析 | | 📈 电子表格 | 创建、读取、写入、追加、查找和导出表格数据 | | ✅ 任务 | 创建、查询、更新和完成任务;管理任务清单、子任务、评论与提醒 | | 📚 知识库 | 创建和管理知识空间、节点和文档 | @@ -46,16 +47,20 @@ ### 快速开始(人类用户) -> **Tip:** 如果你拥有 AI Agent,可以直接把本 README 丢给它,让 AI 帮你完成安装和配置 — 跳转到[快速开始(AI Agent)](#快速开始ai-agent)查看。 +> **Note for AI assistants:** 如果你是 AI Agent 且正在协助用户执行安装,请直接跳转到[快速开始(AI Agent)](#快速开始ai-agent)执行,那里包含你需要完成的所有步骤。 -#### 安装 CLI +#### 安装 以下两种方式**任选其一**: **方式一 — 从 npm 安装(推荐):** ```bash +# 安装 CLI npm install -g @larksuite/cli + +# 安装 CLI SKILL(必需) +npx skills add larksuite/cli -y -g ``` **方式二 — 从源码安装:** @@ -66,17 +71,8 @@ npm install -g @larksuite/cli git clone https://github.com/larksuite/cli.git cd cli make install -``` - -#### 安装 AI Agent Skills - -[Skills](./skills/) 是结构化的指令文档,使 AI Agent 能够使用本 CLI: - -```bash -# 安装所有 skills 到当前目录 -npx skills add larksuite/cli -y -# 安装所有 skills 到全局 +# 安装 CLI SKILL(必需) npx skills add larksuite/cli -y -g ``` @@ -97,21 +93,17 @@ lark-cli calendar +agenda > 以下步骤面向 AI Agent,部分步骤需要用户在浏览器中配合完成。 -**第 1 步 — 安装 CLI** +**第 1 步 — 安装** ```bash +# 安装 CLI npm install -g @larksuite/cli -``` - -**第 2 步 — 安装 Skills(核心步骤,请勿跳过)** -> Skills 是告诉 AI Agent 有哪些命令可用及如何调用的指令文件,是 Agent 使用 lark-cli 的前提。 - -```bash -npx skills add larksuite/cli --all -y +# 安装 CLI SKILL(必需) +npx skills add larksuite/cli -y -g ``` -**第 3 步 — 配置应用凭证** +**第 2 步 — 配置应用凭证** > 在后台运行此命令,命令会输出一个授权链接,提取该链接并发送给用户,用户在浏览器中完成配置后命令会自动退出。 @@ -119,7 +111,7 @@ npx skills add larksuite/cli --all -y lark-cli config init --new ``` -**第 4 步 — 登录** +**第 3 步 — 登录** > 同上,后台运行,提取授权链接发给用户。 @@ -127,7 +119,7 @@ lark-cli config init --new lark-cli auth login --recommend ``` -**第 5 步 — 验证** +**第 4 步 — 验证** ```bash lark-cli auth status @@ -274,6 +266,10 @@ lark-cli schema im.messages.delete 请您充分知悉全部使用风险,使用本工具即视为您自愿承担相关所有责任。 +## Star History + +[![Star History Chart](https://api.star-history.com/svg?repos=larksuite/cli&type=Date)](https://star-history.com/#larksuite/cli&Date) + ## 贡献 欢迎社区贡献!如果你发现 bug 或有功能建议,请提交 [Issue](https://github.com/larksuite/cli/issues) 或 [Pull Request](https://github.com/larksuite/cli/pulls)。 diff --git a/cmd/api/api.go b/cmd/api/api.go index d2ec7098..587d4527 100644 --- a/cmd/api/api.go +++ b/cmd/api/api.go @@ -198,15 +198,12 @@ func apiRun(opts *APIOptions) error { Out: out, ErrOut: f.IOStreams.ErrOut, }) - // MarkRaw tells root error handler that the API response was already written - // to stdout, so it should skip the stderr error envelope. Only apply when - // HandleResponse actually wrote output (i.e. returned a business/API error - // after printing JSON to stdout). Non-JSON HTTP errors (e.g. 404 text/plain) - // produce no stdout output and need the envelope. - if err != nil && client.IsJSONContentType(resp.Header.Get("Content-Type")) { + // MarkRaw tells root error handler to skip enrichPermissionError, + // preserving the original API error detail (log_id, troubleshooter, etc.). + if err != nil { return output.MarkRaw(err) } - return err + return nil } func apiDryRun(f *cmdutil.Factory, request client.RawApiRequest, config *core.CliConfig, format string) error { diff --git a/cmd/api/api_test.go b/cmd/api/api_test.go index 362a6c02..730aae0b 100644 --- a/cmd/api/api_test.go +++ b/cmd/api/api_test.go @@ -446,10 +446,9 @@ func TestApiCmd_APIError_IsRaw(t *testing.T) { t.Error("expected API error from api command to be marked Raw") } - // stderr should NOT contain an error envelope (identity line is OK) - if strings.Contains(stderr.String(), `"ok"`) { - t.Error("expected no JSON error envelope on stderr for Raw API error") - } + // Note: stderr envelope output is tested at the root level (TestHandleRootError_*) + // since WriteErrorEnvelope is called by handleRootError, not by cobra's Execute. + _ = stderr } func TestApiCmd_APIError_PreservesOriginalMessage(t *testing.T) { diff --git a/cmd/auth/login.go b/cmd/auth/login.go index 572965d3..ebd744f2 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -90,6 +90,7 @@ func completeDomain(toComplete string) []string { return completions } +// authLoginRun executes the login command logic. func authLoginRun(opts *LoginOptions) error { f := opts.Factory @@ -225,26 +226,34 @@ func authLoginRun(opts *LoginOptions) error { // --no-wait: return immediately with device code and URL if opts.NoWait { - b, _ := json.Marshal(map[string]interface{}{ + data := map[string]interface{}{ "verification_url": authResp.VerificationUriComplete, "device_code": authResp.DeviceCode, "expires_in": authResp.ExpiresIn, "hint": fmt.Sprintf("Show verification_url to user, then immediately execute: lark-cli auth login --device-code %s (blocks until authorized or timeout). Do not instruct the user to run this command themselves.", authResp.DeviceCode), - }) - fmt.Fprintln(f.IOStreams.Out, string(b)) + } + encoder := json.NewEncoder(f.IOStreams.Out) + encoder.SetEscapeHTML(false) + if err := encoder.Encode(data); err != nil { + fmt.Fprintf(f.IOStreams.ErrOut, "error: failed to write JSON output: %v\n", err) + } return nil } // Step 2: Show user code and verification URL if opts.JSON { - b, _ := json.Marshal(map[string]interface{}{ + data := map[string]interface{}{ "event": "device_authorization", "verification_uri": authResp.VerificationUri, "verification_uri_complete": authResp.VerificationUriComplete, "user_code": authResp.UserCode, "expires_in": authResp.ExpiresIn, - }) - fmt.Fprintln(f.IOStreams.Out, string(b)) + } + encoder := json.NewEncoder(f.IOStreams.Out) + encoder.SetEscapeHTML(false) + if err := encoder.Encode(data); err != nil { + fmt.Fprintf(f.IOStreams.ErrOut, "error: failed to write JSON output: %v\n", err) + } } else { fmt.Fprintf(f.IOStreams.ErrOut, msg.OpenURL) fmt.Fprintf(f.IOStreams.ErrOut, " %s\n\n", authResp.VerificationUriComplete) diff --git a/cmd/doctor/doctor.go b/cmd/doctor/doctor.go index 6edde0a5..67d95989 100644 --- a/cmd/doctor/doctor.go +++ b/cmd/doctor/doctor.go @@ -14,9 +14,11 @@ import ( "github.com/spf13/cobra" larkauth "github.com/larksuite/cli/internal/auth" + "github.com/larksuite/cli/internal/build" "github.com/larksuite/cli/internal/cmdutil" "github.com/larksuite/cli/internal/core" "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/update" ) // DoctorOptions holds inputs for the doctor command. @@ -60,6 +62,10 @@ func fail(name, msg, hint string) checkResult { return checkResult{Name: name, Status: "fail", Message: msg, Hint: hint} } +func warn(name, msg, hint string) checkResult { + return checkResult{Name: name, Status: "warn", Message: msg, Hint: hint} +} + func skip(name, msg string) checkResult { return checkResult{Name: name, Status: "skip", Message: msg} } @@ -68,6 +74,12 @@ func doctorRun(opts *DoctorOptions) error { f := opts.Factory var checks []checkResult + // ── 0. CLI version & update check ── + checks = append(checks, pass("cli_version", build.Version)) + if !opts.Offline { + checks = append(checks, checkCLIUpdate()...) + } + // ── 1. Config file ── _, err := core.LoadMultiAppConfig() if err != nil { @@ -214,6 +226,23 @@ func mustHTTPClient(f *cmdutil.Factory) *http.Client { return c } +// checkCLIUpdate actively queries the npm registry for the latest version. +// Unlike the root-level async check, this does a synchronous fetch with timeout +// and works regardless of build version (dev builds included). +func checkCLIUpdate() []checkResult { + latest, err := update.FetchLatest() + if err != nil { + return []checkResult{warn("cli_update", "check failed: "+err.Error(), "")} + } + current := build.Version + if update.IsNewer(latest, current) { + return []checkResult{warn("cli_update", + fmt.Sprintf("%s → %s available", current, latest), + "run: npm update -g @larksuite/cli")} + } + return []checkResult{pass("cli_update", latest+" (up to date)")} +} + func finishDoctor(f *cmdutil.Factory, checks []checkResult) error { allOK := true for _, c := range checks { diff --git a/cmd/root.go b/cmd/root.go index 6cf9f624..6168dbde 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -4,11 +4,13 @@ package cmd import ( + "bytes" "encoding/json" "errors" "fmt" "io" "net/url" + "os" "strconv" "github.com/larksuite/cli/cmd/api" @@ -24,6 +26,7 @@ import ( "github.com/larksuite/cli/internal/core" "github.com/larksuite/cli/internal/output" "github.com/larksuite/cli/internal/registry" + "github.com/larksuite/cli/internal/update" "github.com/larksuite/cli/shortcuts" "github.com/spf13/cobra" ) @@ -65,7 +68,7 @@ AI AGENT SKILLS: teach the agent Lark API patterns, best practices, and workflows. Install all skills: - npx skills add larksuite/cli --all -y + npx skills add larksuite/cli -g -y Or pick specific domains: npx skills add larksuite/cli -s lark-calendar -y @@ -105,12 +108,68 @@ func Execute() int { service.RegisterServiceCommands(rootCmd, f) shortcuts.RegisterShortcuts(rootCmd, f) + // --- Update check (non-blocking) --- + if !isCompletionCommand(os.Args) { + setupUpdateNotice() + } + if err := rootCmd.Execute(); err != nil { return handleRootError(f, err) } return 0 } +// setupUpdateNotice starts an async update check and wires the output decorator. +func setupUpdateNotice() { + // Sync: check cache immediately (no network, fast). + if info := update.CheckCached(build.Version); info != nil { + update.SetPending(info) + } + + // Async: refresh cache for this run (and future runs). + go func() { + defer func() { + if r := recover(); r != nil { + fmt.Fprintf(os.Stderr, "update check panic: %v\n", r) + } + }() + update.RefreshCache(build.Version) + // If cache was just populated for the first time, set pending now. + if update.GetPending() == nil { + if info := update.CheckCached(build.Version); info != nil { + update.SetPending(info) + } + } + }() + + // Wire the output decorator so JSON envelopes include "_notice". + output.PendingNotice = func() map[string]interface{} { + info := update.GetPending() + if info == nil { + return nil + } + return map[string]interface{}{ + "update": map[string]interface{}{ + "current": info.Current, + "latest": info.Latest, + "message": info.Message(), + }, + } + } +} + +// isCompletionCommand returns true if args indicate a shell completion request. +// Update notifications must be suppressed for these to avoid corrupting +// machine-parseable completion output. +func isCompletionCommand(args []string) bool { + for _, arg := range args { + if arg == "completion" || arg == "__complete" { + return true + } + } + return false +} + // handleRootError dispatches a command error to the appropriate handler // and returns the process exit code. func handleRootError(f *cmdutil.Factory, err error) int { @@ -126,12 +185,11 @@ func handleRootError(f *cmdutil.Factory, err error) int { // All other structured errors normalize to ExitError. if exitErr := asExitError(err); exitErr != nil { - if exitErr.Raw { - // Raw errors (e.g. from `api` command) already printed the full API - // response to stdout; skip enrichment and duplicate stderr envelope. - return exitErr.Code + if !exitErr.Raw { + // Raw errors (e.g. from `api` command) preserve the original API + // error detail; skip enrichment which would clear it. + enrichPermissionError(f, exitErr) } - enrichPermissionError(f, exitErr) output.WriteErrorEnvelope(errOut, exitErr, string(f.ResolvedIdentity)) return exitErr.Code } @@ -184,12 +242,18 @@ func writeSecurityPolicyError(w io.Writer, spErr *internalauth.SecurityPolicyErr } env := map[string]interface{}{"ok": false, "error": errData} - b, err := json.MarshalIndent(env, "", " ") + + buffer := &bytes.Buffer{} + encoder := json.NewEncoder(buffer) + encoder.SetEscapeHTML(false) + encoder.SetIndent("", " ") + err := encoder.Encode(env) + if err != nil { fmt.Fprintln(w, `{"ok":false,"error":{"type":"internal_error","code":"marshal_error","message":"failed to marshal error"}}`) return } - fmt.Fprintln(w, string(b)) + fmt.Fprint(w, buffer.String()) } // installTipsHelpFunc wraps the default help function to append a TIPS section diff --git a/cmd/root_e2e_test.go b/cmd/root_e2e_test.go new file mode 100644 index 00000000..afdae1b4 --- /dev/null +++ b/cmd/root_e2e_test.go @@ -0,0 +1,279 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package cmd + +import ( + "bytes" + "encoding/json" + "reflect" + "testing" + + "github.com/larksuite/cli/cmd/api" + "github.com/larksuite/cli/cmd/service" + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/core" + "github.com/larksuite/cli/internal/httpmock" + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/shortcuts" + "github.com/spf13/cobra" +) + +// buildTestRootCmd creates a root command with api, service, and shortcut +// subcommands wired to a test factory, simulating the real CLI command tree. +func buildTestRootCmd(t *testing.T, f *cmdutil.Factory) *cobra.Command { + t.Helper() + rootCmd := &cobra.Command{Use: "lark-cli"} + rootCmd.SilenceErrors = true + rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) { + cmd.SilenceUsage = true + } + rootCmd.AddCommand(api.NewCmdApi(f, nil)) + service.RegisterServiceCommands(rootCmd, f) + shortcuts.RegisterShortcuts(rootCmd, f) + return rootCmd +} + +// executeE2E runs a command through the full command tree and handleRootError, +// returning exit code — matching real CLI behavior. +func executeE2E(t *testing.T, f *cmdutil.Factory, rootCmd *cobra.Command, args []string) int { + t.Helper() + rootCmd.SetArgs(args) + if err := rootCmd.Execute(); err != nil { + return handleRootError(f, err) + } + return 0 +} + +// registerTokenStub registers a tenant_access_token stub so bot auth succeeds. +func registerTokenStub(reg *httpmock.Registry) { + reg.Register(&httpmock.Stub{ + URL: "/open-apis/auth/v3/tenant_access_token/internal", + Body: map[string]interface{}{ + "code": 0, "msg": "ok", + "tenant_access_token": "t-e2e-token", "expire": 7200, + }, + }) +} + +// parseEnvelope parses stderr bytes into an ErrorEnvelope. +func parseEnvelope(t *testing.T, stderr *bytes.Buffer) output.ErrorEnvelope { + t.Helper() + if stderr.Len() == 0 { + t.Fatal("expected non-empty stderr, got empty") + } + var env output.ErrorEnvelope + if err := json.Unmarshal(stderr.Bytes(), &env); err != nil { + t.Fatalf("failed to parse stderr as ErrorEnvelope: %v\nstderr: %s", err, stderr.String()) + } + return env +} + +// assertEnvelope verifies exit code, stdout is empty, and stderr matches the +// expected ErrorEnvelope exactly via reflect.DeepEqual. +func assertEnvelope(t *testing.T, code int, wantCode int, stdout *bytes.Buffer, stderr *bytes.Buffer, want output.ErrorEnvelope) { + t.Helper() + if code != wantCode { + t.Errorf("exit code: got %d, want %d", code, wantCode) + } + if stdout.Len() != 0 { + t.Errorf("expected empty stdout, got:\n%s", stdout.String()) + } + got := parseEnvelope(t, stderr) + if !reflect.DeepEqual(got, want) { + gotJSON, _ := json.MarshalIndent(got, "", " ") + wantJSON, _ := json.MarshalIndent(want, "", " ") + t.Errorf("stderr envelope mismatch:\ngot:\n%s\nwant:\n%s", gotJSON, wantJSON) + } +} + +// --- api command --- + +func TestE2E_Api_BusinessError_OutputsEnvelope(t *testing.T) { + f, stdout, stderr, reg := cmdutil.TestFactory(t, &core.CliConfig{ + AppID: "e2e-api-err", AppSecret: "secret", Brand: core.BrandFeishu, + }) + registerTokenStub(reg) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/im/v1/messages", + Body: map[string]interface{}{ + "code": 230002, + "msg": "Bot/User can NOT be out of the chat.", + "error": map[string]interface{}{ + "log_id": "test-log-id-001", + }, + }, + }) + + rootCmd := buildTestRootCmd(t, f) + code := executeE2E(t, f, rootCmd, []string{ + "api", "--as", "bot", "POST", "/open-apis/im/v1/messages", + "--params", `{"receive_id_type":"chat_id"}`, + "--data", `{"receive_id":"oc_xxx","msg_type":"text","content":"{\"text\":\"test\"}"}`, + }) + + // api uses MarkRaw: detail preserved, no enrichment + assertEnvelope(t, code, output.ExitAPI, stdout, stderr, output.ErrorEnvelope{ + OK: false, + Identity: "bot", + Error: &output.ErrDetail{ + Type: "api_error", + Code: 230002, + Message: "API error: [230002] Bot/User can NOT be out of the chat.", + Detail: map[string]interface{}{ + "log_id": "test-log-id-001", + }, + }, + }) +} + +func TestE2E_Api_PermissionError_NotEnriched(t *testing.T) { + f, stdout, stderr, reg := cmdutil.TestFactory(t, &core.CliConfig{ + AppID: "e2e-api-perm", AppSecret: "secret", Brand: core.BrandFeishu, + }) + registerTokenStub(reg) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/test/perm", + Body: map[string]interface{}{ + "code": 99991672, + "msg": "scope not enabled for this app", + "error": map[string]interface{}{ + "permission_violations": []interface{}{ + map[string]interface{}{"subject": "calendar:calendar:readonly"}, + }, + "log_id": "test-log-id-perm", + }, + }, + }) + + rootCmd := buildTestRootCmd(t, f) + code := executeE2E(t, f, rootCmd, []string{ + "api", "--as", "bot", "GET", "/open-apis/test/perm", + }) + + // api uses MarkRaw: enrichment skipped, detail preserved, no console_url + assertEnvelope(t, code, output.ExitAPI, stdout, stderr, output.ErrorEnvelope{ + OK: false, + Identity: "bot", + Error: &output.ErrDetail{ + Type: "permission", + Code: 99991672, + Message: "Permission denied [99991672]", + Hint: "check app permissions or re-authorize: lark-cli auth login", + Detail: map[string]interface{}{ + "permission_violations": []interface{}{ + map[string]interface{}{"subject": "calendar:calendar:readonly"}, + }, + "log_id": "test-log-id-perm", + }, + }, + }) +} + +// --- service command --- + +func TestE2E_Service_BusinessError_OutputsEnvelope(t *testing.T) { + f, stdout, stderr, reg := cmdutil.TestFactory(t, &core.CliConfig{ + AppID: "e2e-svc-err", AppSecret: "secret", Brand: core.BrandFeishu, + }) + registerTokenStub(reg) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/im/v1/chats/oc_fake", + Body: map[string]interface{}{ + "code": 99992356, + "msg": "id not exist", + "error": map[string]interface{}{ + "log_id": "test-log-id-svc", + }, + }, + }) + + rootCmd := buildTestRootCmd(t, f) + code := executeE2E(t, f, rootCmd, []string{ + "im", "chats", "get", "--params", `{"chat_id":"oc_fake"}`, "--as", "bot", + }) + + // service: no MarkRaw, non-permission error — detail preserved + assertEnvelope(t, code, output.ExitAPI, stdout, stderr, output.ErrorEnvelope{ + OK: false, + Identity: "bot", + Error: &output.ErrDetail{ + Type: "api_error", + Code: 99992356, + Message: "API error: [99992356] id not exist", + Detail: map[string]interface{}{ + "log_id": "test-log-id-svc", + }, + }, + }) +} + +func TestE2E_Service_PermissionError_Enriched(t *testing.T) { + f, stdout, stderr, reg := cmdutil.TestFactory(t, &core.CliConfig{ + AppID: "e2e-svc-perm", AppSecret: "secret", Brand: core.BrandFeishu, + }) + registerTokenStub(reg) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/im/v1/chats/oc_test", + Body: map[string]interface{}{ + "code": 99991672, + "msg": "scope not enabled", + "error": map[string]interface{}{ + "permission_violations": []interface{}{ + map[string]interface{}{"subject": "im:chat:readonly"}, + }, + }, + }, + }) + + rootCmd := buildTestRootCmd(t, f) + code := executeE2E(t, f, rootCmd, []string{ + "im", "chats", "get", "--params", `{"chat_id":"oc_test"}`, "--as", "bot", + }) + + // service: no MarkRaw — enrichment applied, detail cleared, console_url set + assertEnvelope(t, code, output.ExitAPI, stdout, stderr, output.ErrorEnvelope{ + OK: false, + Identity: "bot", + Error: &output.ErrDetail{ + Type: "permission", + Code: 99991672, + Message: "App scope not enabled: required scope im:chat:readonly [99991672]", + Hint: "enable the scope in developer console (see console_url)", + ConsoleURL: "https://open.feishu.cn/page/scope-apply?clientID=e2e-svc-perm&scopes=im%3Achat%3Areadonly", + }, + }) +} + +// --- shortcut command --- + +func TestE2E_Shortcut_BusinessError_OutputsEnvelope(t *testing.T) { + f, stdout, stderr, reg := cmdutil.TestFactory(t, &core.CliConfig{ + AppID: "e2e-sc-err", AppSecret: "secret", Brand: core.BrandFeishu, + }) + registerTokenStub(reg) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/im/v1/messages", + Status: 400, + Body: map[string]interface{}{ + "code": 230002, + "msg": "Bot/User can NOT be out of the chat.", + }, + }) + + rootCmd := buildTestRootCmd(t, f) + code := executeE2E(t, f, rootCmd, []string{ + "im", "+messages-send", "--as", "bot", "--chat-id", "oc_xxx", "--text", "test", + }) + + // shortcut: no MarkRaw, no HandleResponse — error via DoAPIJSON path + assertEnvelope(t, code, output.ExitAPI, stdout, stderr, output.ErrorEnvelope{ + OK: false, + Identity: "bot", + Error: &output.ErrDetail{ + Type: "api_error", + Code: 230002, + Message: "HTTP 400: Bot/User can NOT be out of the chat.", + }, + }) +} diff --git a/cmd/root_test.go b/cmd/root_test.go index f5668d5e..940270b1 100644 --- a/cmd/root_test.go +++ b/cmd/root_test.go @@ -65,7 +65,7 @@ func TestPersistentPreRunE_ConfigSubcommands(t *testing.T) { } } -func TestHandleRootError_RawError_SkipsEnrichmentAndEnvelope(t *testing.T) { +func TestHandleRootError_RawError_SkipsEnrichmentButWritesEnvelope(t *testing.T) { f, _, stderr, _ := cmdutil.TestFactory(t, &core.CliConfig{ AppID: "test-app", AppSecret: "test-secret", Brand: core.BrandFeishu, }) @@ -82,9 +82,9 @@ func TestHandleRootError_RawError_SkipsEnrichmentAndEnvelope(t *testing.T) { if code != output.ExitAPI { t.Errorf("expected exit code %d, got %d", output.ExitAPI, code) } - // stderr should be empty — no envelope written - if stderr.Len() != 0 { - t.Errorf("expected empty stderr for Raw error, got: %s", stderr.String()) + // stderr should contain the error envelope + if stderr.Len() == 0 { + t.Error("expected non-empty stderr for Raw error — WriteErrorEnvelope should always run") } // The message should NOT have been enriched by enrichPermissionError // (ErrAPI sets "Permission denied [code]" but enrichment would replace it with "App scope not enabled: ...") diff --git a/internal/auth/token_store.go b/internal/auth/token_store.go index 80883a64..7e52f670 100644 --- a/internal/auth/token_store.go +++ b/internal/auth/token_store.go @@ -39,8 +39,8 @@ func MaskToken(token string) string { // GetStoredToken reads the stored UAT for a given (appId, userOpenId) pair. func GetStoredToken(appId, userOpenId string) *StoredUAToken { - jsonStr := keychain.Get(keychain.LarkCliService, accountKey(appId, userOpenId)) - if jsonStr == "" { + jsonStr, err := keychain.Get(keychain.LarkCliService, accountKey(appId, userOpenId)) + if err != nil || jsonStr == "" { return nil } var token StoredUAToken diff --git a/internal/core/config.go b/internal/core/config.go index ced1e27b..18a2aa4e 100644 --- a/internal/core/config.go +++ b/internal/core/config.go @@ -5,11 +5,13 @@ package core import ( "encoding/json" + "errors" "fmt" "os" "path/filepath" "github.com/larksuite/cli/internal/keychain" + "github.com/larksuite/cli/internal/output" "github.com/larksuite/cli/internal/validate" ) @@ -113,6 +115,12 @@ func RequireConfig(kc keychain.KeychainAccess) (*CliConfig, error) { app := raw.Apps[0] secret, err := ResolveSecretInput(app.AppSecret, kc) if err != nil { + // If the error comes from the keychain, it will already be wrapped as an ExitError. + // For other errors (e.g. file read errors, unknown sources), wrap them as ConfigError. + var exitErr *output.ExitError + if errors.As(err, &exitErr) { + return nil, exitErr + } return nil, &ConfigError{Code: 2, Type: "config", Message: err.Error()} } cfg := &CliConfig{ diff --git a/internal/keychain/default.go b/internal/keychain/default.go index 5d9e3d10..59d99ebf 100644 --- a/internal/keychain/default.go +++ b/internal/keychain/default.go @@ -3,17 +3,12 @@ package keychain -import "fmt" - -// defaultKeychain implements KeychainAccess using the real platform keychain. +// defaultKeychain is the default implementation of KeychainAccess +// that uses the package-level functions. type defaultKeychain struct{} func (d *defaultKeychain) Get(service, account string) (string, error) { - val := Get(service, account) - if val == "" { - return "", fmt.Errorf("keychain entry not found: %s/%s", service, account) - } - return val, nil + return Get(service, account) } func (d *defaultKeychain) Set(service, account, value string) error { diff --git a/internal/keychain/keychain.go b/internal/keychain/keychain.go index c225db8b..a5dc74b5 100644 --- a/internal/keychain/keychain.go +++ b/internal/keychain/keychain.go @@ -5,6 +5,15 @@ // macOS uses the system Keychain; Linux uses AES-256-GCM encrypted files; Windows uses DPAPI + registry. package keychain +import ( + "errors" + "fmt" + + "github.com/larksuite/cli/internal/output" +) + +var errNotInitialized = errors.New("keychain not initialized") + const ( // LarkCliService is the unified keychain service name for all secrets // (both AppSecret and UAT). Entries are distinguished by account key format: @@ -13,6 +22,22 @@ const ( LarkCliService = "lark-cli" ) +// wrapError is a helper to wrap underlying errors into output.ExitError. +// It formats the error message and provides a hint for troubleshooting keychain access issues. +func wrapError(op string, err error) error { + if err == nil { + return nil + } + msg := fmt.Sprintf("keychain %s failed: %v", op, err) + hint := "Check if the OS keychain/credential manager is locked or accessible. If running inside a sandbox or CI environment, please ensure the process has the necessary permissions to access the keychain." + + if errors.Is(err, errNotInitialized) { + hint = "The keychain master key may have been cleaned up or deleted. Please reconfigure the CLI by running `lark-cli config init`." + } + + return output.ErrWithHint(output.ExitAPI, "config", msg, hint) +} + // KeychainAccess abstracts keychain Get/Set/Remove for dependency injection. // Used by AppSecret operations (ForStorage, ResolveSecretInput, RemoveSecretStore). // UAT operations in token_store.go use the package-level Get/Set/Remove directly. @@ -24,16 +49,17 @@ type KeychainAccess interface { // Get retrieves a value from the keychain. // Returns empty string if the entry does not exist. -func Get(service, account string) string { - return platformGet(service, account) +func Get(service, account string) (string, error) { + val, err := platformGet(service, account) + return val, wrapError("Get", err) } // Set stores a value in the keychain, overwriting any existing entry. func Set(service, account, data string) error { - return platformSet(service, account, data) + return wrapError("Set", platformSet(service, account, data)) } // Remove deletes an entry from the keychain. No error if not found. func Remove(service, account string) error { - return platformRemove(service, account) + return wrapError("Remove", platformRemove(service, account)) } diff --git a/internal/keychain/keychain_darwin.go b/internal/keychain/keychain_darwin.go index fe71583d..a49633f7 100644 --- a/internal/keychain/keychain_darwin.go +++ b/internal/keychain/keychain_darwin.go @@ -11,6 +11,7 @@ import ( "crypto/cipher" "crypto/rand" "encoding/base64" + "errors" "os" "path/filepath" "regexp" @@ -36,11 +37,14 @@ func StorageDir(service string) string { var safeFileNameRe = regexp.MustCompile(`[^a-zA-Z0-9._-]`) +// safeFileName sanitizes an account name to be used as a safe file name. func safeFileName(account string) string { return safeFileNameRe.ReplaceAllString(account, "_") + ".enc" } -func getMasterKey(service string) ([]byte, error) { +// getMasterKey retrieves the master key from the system keychain. +// If allowCreate is true, it generates and stores a new master key if one doesn't exist. +func getMasterKey(service string, allowCreate bool) ([]byte, error) { ctx, cancel := context.WithTimeout(context.Background(), keychainTimeout) defer cancel() @@ -59,28 +63,48 @@ func getMasterKey(service string) ([]byte, error) { resCh <- result{key: key, err: nil} return } + // Key is found but invalid or corrupted + resCh <- result{key: nil, err: errors.New("keychain is corrupted")} + return + } else if !errors.Is(err, keyring.ErrNotFound) { + // Not ErrNotFound, which means access was denied or blocked by the system + resCh <- result{key: nil, err: errors.New("keychain access blocked")} + return + } + + // If ErrNotFound, check if we are allowed to create a new key + if !allowCreate { + // Creation not allowed (e.g., during Get operation), return error + resCh <- result{key: nil, err: errNotInitialized} + return } - // Generate new master key if not found or invalid + // It's the first time and creation is allowed (Set operation), generate a new key key := make([]byte, masterKeyBytes) if _, randErr := rand.Read(key); randErr != nil { resCh <- result{key: nil, err: randErr} return } - encodedKey = base64.StdEncoding.EncodeToString(key) - setErr := keyring.Set(service, "master.key", encodedKey) - resCh <- result{key: key, err: setErr} + encodedKeyStr := base64.StdEncoding.EncodeToString(key) + setErr := keyring.Set(service, "master.key", encodedKeyStr) + if setErr != nil { + resCh <- result{key: nil, err: setErr} + return + } + resCh <- result{key: key, err: nil} }() select { case res := <-resCh: return res.key, res.err case <-ctx.Done(): - return nil, ctx.Err() + // Timeout is usually caused by ignored/blocked permission prompts + return nil, errors.New("keychain access blocked") } } +// encryptData encrypts data using AES-GCM. func encryptData(plaintext string, key []byte) ([]byte, error) { block, err := aes.NewCipher(key) if err != nil { @@ -103,6 +127,7 @@ func encryptData(plaintext string, key []byte) ([]byte, error) { return result, nil } +// decryptData decrypts data using AES-GCM. func decryptData(data []byte, key []byte) (string, error) { if len(data) < ivBytes+tagBytes { return "", os.ErrInvalid @@ -125,24 +150,30 @@ func decryptData(data []byte, key []byte) (string, error) { return string(plaintext), nil } -func platformGet(service, account string) string { - key, err := getMasterKey(service) +// platformGet retrieves a value from the macOS keychain. +func platformGet(service, account string) (string, error) { + path := filepath.Join(StorageDir(service), safeFileName(account)) + data, err := os.ReadFile(path) + if errors.Is(err, os.ErrNotExist) { + return "", nil + } if err != nil { - return "" + return "", err } - data, err := os.ReadFile(filepath.Join(StorageDir(service), safeFileName(account))) + key, err := getMasterKey(service, false) if err != nil { - return "" + return "", err } plaintext, err := decryptData(data, key) if err != nil { - return "" + return "", err } - return plaintext + return plaintext, nil } +// platformSet stores a value in the macOS keychain. func platformSet(service, account, data string) error { - key, err := getMasterKey(service) + key, err := getMasterKey(service, true) if err != nil { return err } @@ -170,6 +201,7 @@ func platformSet(service, account, data string) error { return nil } +// platformRemove deletes a value from the macOS keychain. func platformRemove(service, account string) error { err := os.Remove(filepath.Join(StorageDir(service), safeFileName(account))) if err != nil && !os.IsNotExist(err) { diff --git a/internal/keychain/keychain_other.go b/internal/keychain/keychain_other.go index 631a9fb0..55192d46 100644 --- a/internal/keychain/keychain_other.go +++ b/internal/keychain/keychain_other.go @@ -9,6 +9,7 @@ import ( "crypto/aes" "crypto/cipher" "crypto/rand" + "errors" "fmt" "os" "path/filepath" @@ -21,8 +22,7 @@ const masterKeyBytes = 32 const ivBytes = 12 const tagBytes = 16 -// StorageDir returns the storage directory for a given service name. -// Each service gets its own directory for physical isolation. +// StorageDir returns the directory where encrypted files are stored. func StorageDir(service string) string { home, err := os.UserHomeDir() if err != nil || home == "" { @@ -36,11 +36,14 @@ func StorageDir(service string) string { var safeFileNameRe = regexp.MustCompile(`[^a-zA-Z0-9._-]`) +// safeFileName sanitizes an account name to be used as a safe file name. func safeFileName(account string) string { return safeFileNameRe.ReplaceAllString(account, "_") + ".enc" } -func getMasterKey(service string) ([]byte, error) { +// getMasterKey retrieves the master key from the file system. +// If allowCreate is true, it generates and stores a new master key if one doesn't exist. +func getMasterKey(service string, allowCreate bool) ([]byte, error) { dir := StorageDir(service) keyPath := filepath.Join(dir, "master.key") @@ -48,6 +51,18 @@ func getMasterKey(service string) ([]byte, error) { if err == nil && len(key) == masterKeyBytes { return key, nil } + if err == nil && len(key) != masterKeyBytes { + // Key file exists but is corrupted + return nil, errors.New("keychain is corrupted") + } + if err != nil && !errors.Is(err, os.ErrNotExist) { + // Real I/O error (permission denied, etc.) - propagate it + return nil, err + } + + if !allowCreate { + return nil, errNotInitialized + } if err := os.MkdirAll(dir, 0700); err != nil { return nil, err @@ -78,6 +93,7 @@ func getMasterKey(service string) ([]byte, error) { return key, nil } +// encryptData encrypts data using AES-GCM. func encryptData(plaintext string, key []byte) ([]byte, error) { block, err := aes.NewCipher(key) if err != nil { @@ -100,6 +116,7 @@ func encryptData(plaintext string, key []byte) ([]byte, error) { return result, nil } +// decryptData decrypts data using AES-GCM. func decryptData(data []byte, key []byte) (string, error) { if len(data) < ivBytes+tagBytes { return "", os.ErrInvalid @@ -122,24 +139,30 @@ func decryptData(data []byte, key []byte) (string, error) { return string(plaintext), nil } -func platformGet(service, account string) string { - key, err := getMasterKey(service) +// platformGet retrieves a value from the file system. +func platformGet(service, account string) (string, error) { + path := filepath.Join(StorageDir(service), safeFileName(account)) + data, err := os.ReadFile(path) + if errors.Is(err, os.ErrNotExist) { + return "", nil + } if err != nil { - return "" + return "", err } - data, err := os.ReadFile(filepath.Join(StorageDir(service), safeFileName(account))) + key, err := getMasterKey(service, false) if err != nil { - return "" + return "", err } plaintext, err := decryptData(data, key) if err != nil { - return "" + return "", err } - return plaintext + return plaintext, nil } +// platformSet stores a value in the file system. func platformSet(service, account, data string) error { - key, err := getMasterKey(service) + key, err := getMasterKey(service, true) if err != nil { return err } @@ -167,6 +190,7 @@ func platformSet(service, account, data string) error { return nil } +// platformRemove deletes a value from the file system. func platformRemove(service, account string) error { err := os.Remove(filepath.Join(StorageDir(service), safeFileName(account))) if err != nil && !os.IsNotExist(err) { diff --git a/internal/keychain/keychain_windows.go b/internal/keychain/keychain_windows.go index 8830e8ac..f0e3f9f8 100644 --- a/internal/keychain/keychain_windows.go +++ b/internal/keychain/keychain_windows.go @@ -22,12 +22,14 @@ import ( const regRootPath = `Software\LarkCli\keychain` +// registryPathForService returns the registry path for a given service. func registryPathForService(service string) string { return regRootPath + `\` + safeRegistryComponent(service) } var safeRegRe = regexp.MustCompile(`[^a-zA-Z0-9._-]`) +// safeRegistryComponent sanitizes a string to be used as a registry key component. func safeRegistryComponent(s string) string { // Registry key path uses '\\' separators; avoid accidental nesting and odd chars. s = strings.ReplaceAll(s, "\\", "_") @@ -39,6 +41,7 @@ func valueNameForAccount(account string) string { return base64.RawURLEncoding.EncodeToString([]byte(account)) } +// dpapiEntropy generates entropy for DPAPI encryption based on the service and account names. func dpapiEntropy(service, account string) *windows.DataBlob { // Bind ciphertext to (service, account) to reduce swap/replay risks. // Note: empty entropy is allowed, but we intentionally use deterministic entropy. @@ -49,6 +52,7 @@ func dpapiEntropy(service, account string) *windows.DataBlob { return &windows.DataBlob{Size: uint32(len(data)), Data: &data[0]} } +// dpapiProtect encrypts data using Windows DPAPI. func dpapiProtect(plaintext []byte, entropy *windows.DataBlob) ([]byte, error) { var in windows.DataBlob if len(plaintext) > 0 { @@ -70,6 +74,7 @@ func dpapiProtect(plaintext []byte, entropy *windows.DataBlob) ([]byte, error) { return res, nil } +// dpapiUnprotect decrypts data using Windows DPAPI. func dpapiUnprotect(ciphertext []byte, entropy *windows.DataBlob) ([]byte, error) { var in windows.DataBlob if len(ciphertext) > 0 { @@ -91,6 +96,7 @@ func dpapiUnprotect(ciphertext []byte, entropy *windows.DataBlob) ([]byte, error return res, nil } +// freeDataBlob frees the memory allocated for a DataBlob. func freeDataBlob(b *windows.DataBlob) { if b == nil || b.Data == nil { return @@ -101,11 +107,16 @@ func freeDataBlob(b *windows.DataBlob) { b.Size = 0 } -func platformGet(service, account string) string { - v, _ := registryGet(service, account) - return v +// platformGet retrieves a value from the Windows registry. +func platformGet(service, account string) (string, error) { + v, ok := registryGet(service, account) + if !ok { + return "", nil + } + return v, nil } +// platformSet stores a value in the Windows registry. func platformSet(service, account, data string) error { entropy := dpapiEntropy(service, account) protected, err := dpapiProtect([]byte(data), entropy) @@ -115,10 +126,12 @@ func platformSet(service, account, data string) error { return registrySet(service, account, protected) } +// platformRemove deletes a value from the Windows registry. func platformRemove(service, account string) error { return registryRemove(service, account) } +// registryGet retrieves a string value from the registry under the given service and account. func registryGet(service, account string) (string, bool) { keyPath := registryPathForService(service) k, err := registry.OpenKey(registry.CURRENT_USER, keyPath, registry.QUERY_VALUE) @@ -143,6 +156,7 @@ func registryGet(service, account string) (string, bool) { return string(plain), true } +// registrySet stores a string value in the registry under the given service and account. func registrySet(service, account string, protected []byte) error { keyPath := registryPathForService(service) k, _, err := registry.CreateKey(registry.CURRENT_USER, keyPath, registry.SET_VALUE) @@ -158,6 +172,7 @@ func registrySet(service, account string, protected []byte) error { return nil } +// registryRemove deletes a value from the registry under the given service and account. func registryRemove(service, account string) error { keyPath := registryPathForService(service) k, err := registry.OpenKey(registry.CURRENT_USER, keyPath, registry.SET_VALUE) diff --git a/internal/output/envelope.go b/internal/output/envelope.go index 21caefab..e76b6d5c 100644 --- a/internal/output/envelope.go +++ b/internal/output/envelope.go @@ -5,18 +5,20 @@ package output // Envelope is the standard success response wrapper. type Envelope struct { - OK bool `json:"ok"` - Identity string `json:"identity,omitempty"` - Data interface{} `json:"data,omitempty"` - Meta *Meta `json:"meta,omitempty"` + OK bool `json:"ok"` + Identity string `json:"identity,omitempty"` + Data interface{} `json:"data,omitempty"` + Meta *Meta `json:"meta,omitempty"` + Notice map[string]interface{} `json:"_notice,omitempty"` } // ErrorEnvelope is the standard error response wrapper. type ErrorEnvelope struct { - OK bool `json:"ok"` - Identity string `json:"identity,omitempty"` - Error *ErrDetail `json:"error"` - Meta *Meta `json:"meta,omitempty"` + OK bool `json:"ok"` + Identity string `json:"identity,omitempty"` + Error *ErrDetail `json:"error"` + Meta *Meta `json:"meta,omitempty"` + Notice map[string]interface{} `json:"_notice,omitempty"` } // ErrDetail describes a structured error. @@ -34,3 +36,17 @@ type Meta struct { Count int `json:"count,omitempty"` Rollback string `json:"rollback,omitempty"` } + +// PendingNotice, if set, returns system-level notices to inject as the +// "_notice" field in JSON output envelopes. Set by cmd/root.go. +// Returns nil when there is nothing to report. +var PendingNotice func() map[string]interface{} + +// GetNotice returns the current pending notice for struct-based callers. +// Returns nil when there is nothing to report. +func GetNotice() map[string]interface{} { + if PendingNotice == nil { + return nil + } + return PendingNotice() +} diff --git a/internal/output/errors.go b/internal/output/errors.go index e61c9b27..0a909923 100644 --- a/internal/output/errors.go +++ b/internal/output/errors.go @@ -40,10 +40,11 @@ func WriteErrorEnvelope(w io.Writer, err *ExitError, identity string) { if err.Detail == nil { return } - env := ErrorEnvelope{ + env := &ErrorEnvelope{ OK: false, Identity: identity, Error: err.Detail, + Notice: GetNotice(), } var buf bytes.Buffer enc := json.NewEncoder(&buf) diff --git a/internal/output/errors_test.go b/internal/output/errors_test.go index 2cc3d1f2..30662dd0 100644 --- a/internal/output/errors_test.go +++ b/internal/output/errors_test.go @@ -4,6 +4,8 @@ package output import ( + "bytes" + "encoding/json" "fmt" "testing" ) @@ -37,3 +39,112 @@ func TestMarkRaw_Nil(t *testing.T) { t.Error("expected MarkRaw(nil) to return nil") } } + +func TestWriteErrorEnvelope_WithNotice(t *testing.T) { + // Set up PendingNotice + origNotice := PendingNotice + PendingNotice = func() map[string]interface{} { + return map[string]interface{}{ + "update": map[string]interface{}{ + "current": "1.0.0", + "latest": "2.0.0", + }, + } + } + defer func() { PendingNotice = origNotice }() + + exitErr := &ExitError{ + Code: 1, + Detail: &ErrDetail{Type: "api_error", Message: "something failed"}, + } + + var buf bytes.Buffer + WriteErrorEnvelope(&buf, exitErr, "user") + + var env map[string]interface{} + if err := json.Unmarshal(buf.Bytes(), &env); err != nil { + t.Fatalf("failed to parse output: %v", err) + } + + // Verify _notice is present + notice, ok := env["_notice"].(map[string]interface{}) + if !ok { + t.Fatal("expected _notice field in output") + } + update, ok := notice["update"].(map[string]interface{}) + if !ok { + t.Fatal("expected _notice.update field") + } + if update["latest"] != "2.0.0" { + t.Errorf("expected latest=2.0.0, got %v", update["latest"]) + } + + // Verify standard fields + if env["ok"] != false { + t.Error("expected ok=false") + } + if env["identity"] != "user" { + t.Errorf("expected identity=user, got %v", env["identity"]) + } +} + +func TestWriteErrorEnvelope_WithoutNotice(t *testing.T) { + // Ensure PendingNotice is nil + origNotice := PendingNotice + PendingNotice = nil + defer func() { PendingNotice = origNotice }() + + exitErr := &ExitError{ + Code: 1, + Detail: &ErrDetail{Type: "api_error", Message: "something failed"}, + } + + var buf bytes.Buffer + WriteErrorEnvelope(&buf, exitErr, "bot") + + var env map[string]interface{} + if err := json.Unmarshal(buf.Bytes(), &env); err != nil { + t.Fatalf("failed to parse output: %v", err) + } + + if _, ok := env["_notice"]; ok { + t.Error("expected no _notice field when PendingNotice is nil") + } +} + +func TestWriteErrorEnvelope_NilDetail(t *testing.T) { + exitErr := &ExitError{Code: 1} + + var buf bytes.Buffer + WriteErrorEnvelope(&buf, exitErr, "user") + + if buf.Len() != 0 { + t.Errorf("expected no output for nil Detail, got: %s", buf.String()) + } +} + +func TestGetNotice(t *testing.T) { + // Nil PendingNotice → nil + origNotice := PendingNotice + PendingNotice = nil + if got := GetNotice(); got != nil { + t.Errorf("expected nil, got %v", got) + } + + // With PendingNotice → returns value + PendingNotice = func() map[string]interface{} { + return map[string]interface{}{"update": "test"} + } + got := GetNotice() + if got == nil || got["update"] != "test" { + t.Errorf("expected {update: test}, got %v", got) + } + + // PendingNotice returns nil → nil + PendingNotice = func() map[string]interface{} { return nil } + if got := GetNotice(); got != nil { + t.Errorf("expected nil, got %v", got) + } + + PendingNotice = origNotice +} diff --git a/internal/output/print.go b/internal/output/print.go index e26e5117..c26c2edb 100644 --- a/internal/output/print.go +++ b/internal/output/print.go @@ -14,6 +14,7 @@ import ( // PrintJson prints data as formatted JSON to w. func PrintJson(w io.Writer, data interface{}) { + injectNotice(data) b, err := json.MarshalIndent(data, "", " ") if err != nil { fmt.Fprintf(os.Stderr, "json marshal error: %v\n", err) @@ -22,6 +23,31 @@ func PrintJson(w io.Writer, data interface{}) { fmt.Fprintln(w, string(b)) } +// injectNotice adds a "_notice" field into CLI envelope maps. +// Only modifies map[string]interface{} values that have an "ok" key +// (e.g. doctor, auth, config commands that build map envelopes directly). +// +// Struct-based envelopes (Envelope, ErrorEnvelope) are NOT handled here — +// callers must set the Notice field explicitly via GetNotice(). +// See: shortcuts/common/runner.go Out(), output/errors.go WriteErrorEnvelope(). +func injectNotice(data interface{}) { + if PendingNotice == nil { + return + } + m, ok := data.(map[string]interface{}) + if !ok { + return + } + if _, isEnvelope := m["ok"]; !isEnvelope { + return + } + notice := PendingNotice() + if notice == nil { + return + } + m["_notice"] = notice +} + // PrintNdjson prints data as NDJSON (Newline Delimited JSON) to w. func PrintNdjson(w io.Writer, data interface{}) { emit := func(item interface{}) { diff --git a/internal/output/print_test.go b/internal/output/print_test.go new file mode 100644 index 00000000..46c13f93 --- /dev/null +++ b/internal/output/print_test.go @@ -0,0 +1,101 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package output + +import ( + "bytes" + "encoding/json" + "testing" +) + +func TestPrintJson_InjectNotice_Map(t *testing.T) { + origNotice := PendingNotice + PendingNotice = func() map[string]interface{} { + return map[string]interface{}{"update": "available"} + } + defer func() { PendingNotice = origNotice }() + + data := map[string]interface{}{"ok": true, "data": "test"} + var buf bytes.Buffer + PrintJson(&buf, data) + + var got map[string]interface{} + if err := json.Unmarshal(buf.Bytes(), &got); err != nil { + t.Fatalf("failed to parse: %v", err) + } + notice, ok := got["_notice"].(map[string]interface{}) + if !ok { + t.Fatal("expected _notice in map-based envelope") + } + if notice["update"] != "available" { + t.Errorf("expected update=available, got %v", notice["update"]) + } +} + +func TestPrintJson_InjectNotice_SkipsNonEnvelope(t *testing.T) { + origNotice := PendingNotice + PendingNotice = func() map[string]interface{} { + return map[string]interface{}{"update": "available"} + } + defer func() { PendingNotice = origNotice }() + + // Map without "ok" key should not get _notice + data := map[string]interface{}{"name": "test"} + var buf bytes.Buffer + PrintJson(&buf, data) + + var got map[string]interface{} + if err := json.Unmarshal(buf.Bytes(), &got); err != nil { + t.Fatalf("failed to parse: %v", err) + } + if _, ok := got["_notice"]; ok { + t.Error("expected no _notice for non-envelope map") + } +} + +func TestPrintJson_Struct_PreservesNotice(t *testing.T) { + origNotice := PendingNotice + PendingNotice = nil // no global notice + defer func() { PendingNotice = origNotice }() + + // Struct with Notice already set should preserve it + env := &Envelope{ + OK: true, + Identity: "user", + Data: "hello", + Notice: map[string]interface{}{"update": "set-by-caller"}, + } + var buf bytes.Buffer + PrintJson(&buf, env) + + var got map[string]interface{} + if err := json.Unmarshal(buf.Bytes(), &got); err != nil { + t.Fatalf("failed to parse: %v", err) + } + notice, ok := got["_notice"].(map[string]interface{}) + if !ok { + t.Fatal("expected _notice from struct field") + } + if notice["update"] != "set-by-caller" { + t.Errorf("expected update=set-by-caller, got %v", notice["update"]) + } +} + +func TestPrintJson_NoNotice(t *testing.T) { + origNotice := PendingNotice + PendingNotice = nil + defer func() { PendingNotice = origNotice }() + + data := map[string]interface{}{"ok": true, "data": "test"} + var buf bytes.Buffer + PrintJson(&buf, data) + + var got map[string]interface{} + if err := json.Unmarshal(buf.Bytes(), &got); err != nil { + t.Fatalf("failed to parse: %v", err) + } + if _, ok := got["_notice"]; ok { + t.Error("expected no _notice when PendingNotice is nil") + } +} diff --git a/internal/registry/service_descriptions.json b/internal/registry/service_descriptions.json index 2ac898b8..fe290798 100644 --- a/internal/registry/service_descriptions.json +++ b/internal/registry/service_descriptions.json @@ -1,7 +1,7 @@ { "base": { - "en": { "title": "Base", "description": "Table, field, record, and view management" }, - "zh": { "title": "多维表格", "description": "数据表、字段、记录、视图" } + "en": { "title": "Base", "description": "Table, field, record, view, dashboard, workflow, form, role & permission management" }, + "zh": { "title": "多维表格", "description": "数据表、字段、记录、视图、仪表盘、自动化流程、表单、角色权限管理" } }, "calendar": { "en": { "title": "Calendar", "description": "Calendar, event, and attendee management" }, diff --git a/internal/update/update.go b/internal/update/update.go new file mode 100644 index 00000000..68e0a265 --- /dev/null +++ b/internal/update/update.go @@ -0,0 +1,255 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package update + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + "sync/atomic" + "time" + + "github.com/larksuite/cli/internal/core" + "github.com/larksuite/cli/internal/validate" +) + +const ( + registryURL = "https://registry.npmjs.org/@larksuite/cli/latest" + cacheTTL = 24 * time.Hour + fetchTimeout = 5 * time.Second + stateFile = "update-state.json" + maxBody = 256 << 10 // 256 KB + +) + +// UpdateInfo holds version update information. +type UpdateInfo struct { + Current string `json:"current"` + Latest string `json:"latest"` +} + +// Message returns a concise update notification. +func (u *UpdateInfo) Message() string { + return fmt.Sprintf("lark-cli %s available, current %s", u.Latest, u.Current) +} + +// pending stores the latest update info for the current process. +var pending atomic.Pointer[UpdateInfo] + +// SetPending stores the update info for consumption by output decorators. +func SetPending(info *UpdateInfo) { pending.Store(info) } + +// GetPending returns the pending update info, or nil. +func GetPending() *UpdateInfo { return pending.Load() } + +// DefaultClient is the HTTP client used for npm registry requests. +// Override in tests with an httptest server client. +var DefaultClient *http.Client + +func httpClient() *http.Client { + if DefaultClient != nil { + return DefaultClient + } + return &http.Client{Timeout: fetchTimeout} +} + +// updateState is persisted to disk for caching. +type updateState struct { + LatestVersion string `json:"latest_version"` + CheckedAt int64 `json:"checked_at"` +} + +// CheckCached checks the local cache only (no network). Always fast. +func CheckCached(currentVersion string) *UpdateInfo { + if shouldSkip(currentVersion) { + return nil + } + state, _ := loadState() + if state == nil || state.LatestVersion == "" { + return nil + } + if !IsNewer(state.LatestVersion, currentVersion) { + return nil + } + return &UpdateInfo{Current: currentVersion, Latest: state.LatestVersion} +} + +// RefreshCache fetches the latest version from npm and updates the local cache. +// No-op if the cache is still fresh (< 24h). Safe to call from a goroutine. +func RefreshCache(currentVersion string) { + if shouldSkip(currentVersion) { + return + } + state, _ := loadState() + if state != nil && time.Since(time.Unix(state.CheckedAt, 0)) < cacheTTL { + return // cache is fresh + } + latest, err := fetchLatestVersion() + if err != nil { + return + } + _ = saveState(&updateState{ + LatestVersion: latest, + CheckedAt: time.Now().Unix(), + }) +} + +func shouldSkip(version string) bool { + if os.Getenv("LARKSUITE_CLI_NO_UPDATE_NOTIFIER") != "" { + return true + } + // Suppress in CI environments. + for _, key := range []string{"CI", "BUILD_NUMBER", "RUN_ID"} { + if os.Getenv(key) != "" { + return true + } + } + // No version info at all — can't compare. + if version == "DEV" || version == "dev" || version == "" { + return true + } + // Skip local dev builds (e.g. v1.0.0-12-g9b933f1-dirty from git describe). + // Only released versions (clean X.Y.Z) should check for updates. + if !isRelease(version) { + return true + } + return false +} + +// isRelease returns true for published versions: clean semver (1.0.0) +// and npm prerelease (1.0.0-beta.1, 1.0.0-rc.1). +// Returns false for git describe dev builds (v1.0.0-12-g9b933f1-dirty). +var gitDescribePattern = regexp.MustCompile(`-\d+-g[0-9a-f]{7,}`) + +func isRelease(version string) bool { + v := strings.TrimPrefix(version, "v") + if ParseVersion(v) == nil { + return false + } + return !gitDescribePattern.MatchString(v) +} + +// --- state file I/O --- + +func statePath() string { + return filepath.Join(core.GetConfigDir(), stateFile) +} + +func loadState() (*updateState, error) { + data, err := os.ReadFile(statePath()) + if err != nil { + return nil, err + } + var s updateState + if err := json.Unmarshal(data, &s); err != nil { + return nil, err + } + return &s, nil +} + +func saveState(s *updateState) error { + dir := core.GetConfigDir() + if err := os.MkdirAll(dir, 0700); err != nil { + return err + } + data, err := json.Marshal(s) + if err != nil { + return err + } + return validate.AtomicWrite(statePath(), data, 0644) +} + +// FetchLatest queries the npm registry and returns the latest published version. +// This is a synchronous call with timeout, intended for diagnostic commands (doctor). +func FetchLatest() (string, error) { + return fetchLatestVersion() +} + +// --- npm registry --- + +type npmLatestResponse struct { + Version string `json:"version"` +} + +func fetchLatestVersion() (string, error) { + resp, err := httpClient().Get(registryURL) + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("npm registry: HTTP %d", resp.StatusCode) + } + + body, err := io.ReadAll(io.LimitReader(resp.Body, maxBody)) + if err != nil { + return "", err + } + + var result npmLatestResponse + if err := json.Unmarshal(body, &result); err != nil { + return "", err + } + if result.Version == "" { + return "", fmt.Errorf("npm registry: empty version") + } + return result.Version, nil +} + +// --- semver helpers --- + +// IsNewer returns true if version a should be considered an update over b. +// +// When both parse as semver, standard comparison applies. +// When b cannot be parsed (e.g. bare commit hash "9b933f1"), any valid a +// is considered newer — an unparseable local version is assumed outdated. +// When a cannot be parsed, returns false (can't confirm it's newer). +func IsNewer(a, b string) bool { + ap := ParseVersion(a) + bp := ParseVersion(b) + if ap == nil { + return false // can't confirm remote is newer + } + if bp == nil { + return true // local version unparseable → assume outdated + } + for i := 0; i < 3; i++ { + if ap[i] > bp[i] { + return true + } + if ap[i] < bp[i] { + return false + } + } + return false +} + +// ParseVersion parses "X.Y.Z" (with optional "v" prefix and pre-release suffix) +// into [major, minor, patch]. Returns nil on invalid input. +func ParseVersion(v string) []int { + v = strings.TrimPrefix(v, "v") + parts := strings.SplitN(v, ".", 3) + if len(parts) != 3 { + return nil + } + nums := make([]int, 3) + for i, p := range parts { + if idx := strings.IndexAny(p, "-+"); idx >= 0 { + p = p[:idx] + } + n, err := strconv.Atoi(p) + if err != nil { + return nil + } + nums[i] = n + } + return nums +} diff --git a/internal/update/update_test.go b/internal/update/update_test.go new file mode 100644 index 00000000..a56a9967 --- /dev/null +++ b/internal/update/update_test.go @@ -0,0 +1,253 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package update + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "path/filepath" + "strings" + "testing" + "time" +) + +// roundTripFunc adapts a function to http.RoundTripper. +type roundTripFunc func(*http.Request) (*http.Response, error) + +func (f roundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) { return f(req) } + +// clearSkipEnv unsets all env vars that shouldSkip checks, +// preventing the host environment (e.g. CI=true) from polluting test results. +func clearSkipEnv(t *testing.T) { + t.Helper() + for _, key := range []string{"LARKSUITE_CLI_NO_UPDATE_NOTIFIER", "CI", "BUILD_NUMBER", "RUN_ID"} { + t.Setenv(key, "") + os.Unsetenv(key) + } +} + +func mustParseURL(raw string) *url.URL { + u, err := url.Parse(raw) + if err != nil { + panic(err) + } + return u +} + +func TestIsNewer(t *testing.T) { + tests := []struct { + a, b string + want bool + }{ + {"1.1.0", "1.0.0", true}, + {"1.0.0", "1.0.0", false}, + {"1.0.0", "1.1.0", false}, + {"2.0.0", "1.9.9", true}, + {"1.0.1", "1.0.0", true}, + {"v1.1.0", "1.0.0", true}, + {"1.1.0", "v1.0.0", true}, + {"0.0.1", "0.0.0", true}, + {"DEV", "1.0.0", false}, // unparseable remote → false + {"1.0.0", "DEV", true}, // unparseable local → assume outdated + {"1.0.0", "9b933f1", true}, // bare commit hash → assume outdated + {"", "1.0.0", false}, // empty remote → false + {"1.1.0", "v1.0.0-12-g9b933f1-dirty", true}, // git describe: 1.1.0 > 1.0.0 + } + for _, tt := range tests { + got := IsNewer(tt.a, tt.b) + if got != tt.want { + t.Errorf("IsNewer(%q, %q) = %v, want %v", tt.a, tt.b, got, tt.want) + } + } +} + +func TestParseVersion(t *testing.T) { + tests := []struct { + input string + want []int + }{ + {"1.2.3", []int{1, 2, 3}}, + {"v1.2.3", []int{1, 2, 3}}, + {"0.0.1", []int{0, 0, 1}}, + {"1.0.0-beta.1", []int{1, 0, 0}}, + {"DEV", nil}, + {"", nil}, + {"1.2", nil}, + } + for _, tt := range tests { + got := ParseVersion(tt.input) + if tt.want == nil { + if got != nil { + t.Errorf("ParseVersion(%q) = %v, want nil", tt.input, got) + } + continue + } + if got == nil || got[0] != tt.want[0] || got[1] != tt.want[1] || got[2] != tt.want[2] { + t.Errorf("ParseVersion(%q) = %v, want %v", tt.input, got, tt.want) + } + } +} + +func TestShouldSkip(t *testing.T) { + tests := []struct { + name string + version string + env map[string]string + want bool + }{ + {"DEV", "DEV", nil, true}, + {"dev_lower", "dev", nil, true}, + {"empty", "", nil, true}, + {"CI", "1.0.0", map[string]string{"CI": "true"}, true}, + {"BUILD_NUMBER", "1.0.0", map[string]string{"BUILD_NUMBER": "42"}, true}, + {"RUN_ID", "1.0.0", map[string]string{"RUN_ID": "123"}, true}, + {"notifier_off", "1.0.0", map[string]string{"LARKSUITE_CLI_NO_UPDATE_NOTIFIER": "1"}, true}, + {"git_describe", "v1.0.0-12-g9b933f1", nil, true}, + {"git_dirty", "v1.0.0-12-g9b933f1-dirty", nil, true}, + {"commit_hash", "9b933f1", nil, true}, + {"clean_semver", "1.0.0", nil, false}, + {"clean_semver_v", "v1.0.0", nil, false}, + {"prerelease_beta", "1.0.0-beta.1", nil, false}, + {"prerelease_rc", "2.0.0-rc.1", nil, false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + clearSkipEnv(t) + for k, v := range tt.env { + t.Setenv(k, v) + } + got := shouldSkip(tt.version) + if got != tt.want { + t.Errorf("shouldSkip(%q) = %v, want %v", tt.version, got, tt.want) + } + }) + } +} + +func TestIsRelease(t *testing.T) { + tests := []struct { + version string + want bool + }{ + {"1.0.0", true}, + {"v1.0.0", true}, + {"0.1.0", true}, + {"1.0.0-beta.1", true}, + {"1.0.0-rc.1", true}, + {"2.0.0-alpha.0", true}, + {"v1.0.0-12-g9b933f1", false}, // git describe + {"v1.0.0-12-g9b933f1-dirty", false}, // git describe dirty + {"v2.1.0-3-gabcdef0", false}, // git describe short + {"9b933f1", false}, // bare commit hash + {"DEV", false}, // dev marker + {"", false}, // empty + {"1.0", false}, // incomplete semver + } + for _, tt := range tests { + t.Run(tt.version, func(t *testing.T) { + got := isRelease(tt.version) + if got != tt.want { + t.Errorf("isRelease(%q) = %v, want %v", tt.version, got, tt.want) + } + }) + } +} + +func TestUpdateInfoMethods(t *testing.T) { + info := &UpdateInfo{Current: "1.0.0", Latest: "2.0.0"} + + msg := info.Message() + if !strings.Contains(msg, "2.0.0") { + t.Errorf("Message() missing latest version: %s", msg) + } + if !strings.Contains(msg, "1.0.0") { + t.Errorf("Message() missing current version: %s", msg) + } +} + +func TestCheckCached(t *testing.T) { + clearSkipEnv(t) + tmp := t.TempDir() + t.Setenv("LARKSUITE_CLI_CONFIG_DIR", tmp) + + // No cache → nil + info := CheckCached("1.0.0") + if info != nil { + t.Errorf("expected nil with no cache, got %+v", info) + } + + // Write cache with newer version + state := &updateState{LatestVersion: "2.0.0", CheckedAt: time.Now().Unix()} + data, _ := json.Marshal(state) + os.WriteFile(filepath.Join(tmp, stateFile), data, 0644) + + info = CheckCached("1.0.0") + if info == nil { + t.Fatal("expected update info, got nil") + } + if info.Latest != "2.0.0" || info.Current != "1.0.0" { + t.Errorf("unexpected info: %+v", info) + } + + // Same version → nil + info = CheckCached("2.0.0") + if info != nil { + t.Errorf("expected nil when versions match, got %+v", info) + } +} + +func TestRefreshCache(t *testing.T) { + clearSkipEnv(t) + tmp := t.TempDir() + t.Setenv("LARKSUITE_CLI_CONFIG_DIR", tmp) + + // Set up mock npm registry via DefaultClient + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + json.NewEncoder(w).Encode(npmLatestResponse{Version: "3.0.0"}) + })) + defer srv.Close() + + // Redirect all requests to the mock server. + DefaultClient = srv.Client() + DefaultClient.Transport = roundTripFunc(func(req *http.Request) (*http.Response, error) { + req.URL = mustParseURL(srv.URL + req.URL.Path) + return http.DefaultTransport.RoundTrip(req) + }) + defer func() { DefaultClient = nil }() + + RefreshCache("1.0.0") + + // Verify cache was written + info := CheckCached("1.0.0") + if info == nil { + t.Fatal("expected update info after refresh, got nil") + } + if info.Latest != "3.0.0" { + t.Errorf("expected latest 3.0.0, got %s", info.Latest) + } + + // Second refresh should be no-op (cache is fresh) — won't hit network. + RefreshCache("1.0.0") +} + +func TestPendingAtomicAccess(t *testing.T) { + // Initially nil + if got := GetPending(); got != nil { + t.Errorf("expected nil, got %+v", got) + } + + info := &UpdateInfo{Current: "1.0.0", Latest: "2.0.0"} + SetPending(info) + + got := GetPending() + if got == nil || got.Current != "1.0.0" || got.Latest != "2.0.0" { + t.Errorf("unexpected pending: %+v", got) + } + + // Clean up for other tests + SetPending(nil) +} diff --git a/internal/validate/url.go b/internal/validate/url.go index 6d6ab0c4..e25df97b 100644 --- a/internal/validate/url.go +++ b/internal/validate/url.go @@ -181,6 +181,25 @@ func cloneDownloadTransport(base http.RoundTripper) *http.Transport { return cloned } +// DialContextFunc is the signature for DialContext / DialTLSContext. +type DialContextFunc func(ctx context.Context, network, addr string) (net.Conn, error) + +// WrapDialContextWithIPCheck wraps a DialContext function to validate the +// remote IP after connection, rejecting local/internal addresses (SSRF protection). +func WrapDialContextWithIPCheck(origDial DialContextFunc) DialContextFunc { + return func(ctx context.Context, network, addr string) (net.Conn, error) { + conn, err := dialConn(ctx, origDial, network, addr) + if err != nil { + return nil, err + } + if err := validateConnRemoteIP(conn); err != nil { + conn.Close() + return nil, err + } + return conn, nil + } +} + func dialConn(ctx context.Context, dialFn func(context.Context, string, string) (net.Conn, error), network, addr string) (net.Conn, error) { if dialFn != nil { return dialFn(ctx, network, addr) diff --git a/package.json b/package.json index 13e68149..7cdae730 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@larksuite/cli", - "version": "1.0.0", + "version": "1.0.2", "description": "The official CLI for Lark/Feishu open platform", "bin": { "lark-cli": "scripts/run.js" diff --git a/scripts/build-pkg-pr-new.sh b/scripts/build-pkg-pr-new.sh new file mode 100755 index 00000000..0a9ea4c3 --- /dev/null +++ b/scripts/build-pkg-pr-new.sh @@ -0,0 +1,105 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +OUT_DIR="$ROOT_DIR/.pkg-pr-new" + +cd "$ROOT_DIR" + +python3 scripts/fetch_meta.py + +rm -rf "$OUT_DIR" +mkdir -p "$OUT_DIR/bin" "$OUT_DIR/scripts" + +VERSION="$(node -p "require('./package.json').version")" +DATE="$(date -u +%Y-%m-%dT%H:%M:%SZ)" +SHA="$(git rev-parse --short HEAD)" +LDFLAGS="-s -w -X github.com/larksuite/cli/internal/build.Version=${VERSION}-${SHA} -X github.com/larksuite/cli/internal/build.Date=${DATE}" + +build_target() { + local goos="$1" + local goarch="$2" + local ext="" + if [[ "$goos" == "windows" ]]; then + ext=".exe" + fi + + local output="$OUT_DIR/bin/lark-cli-${goos}-${goarch}${ext}" + echo "Building ${goos}/${goarch} -> ${output}" + CGO_ENABLED=0 GOOS="$goos" GOARCH="$goarch" go build -trimpath -ldflags "$LDFLAGS" -o "$output" ./main.go +} + +build_target darwin arm64 +build_target linux amd64 +build_target darwin amd64 +build_target linux arm64 +build_target windows amd64 +build_target windows arm64 + +cat > "$OUT_DIR/scripts/run.js" <<'RUNJS' +#!/usr/bin/env node +const path = require("path"); +const { execFileSync } = require("child_process"); + +const isWindows = process.platform === "win32"; + +const platformMap = { + darwin: "darwin", + linux: "linux", + win32: "windows", +}; + +// TODO: Keep broad platform mapping for now; with pkg.pr.new 20MB limit we only ship a subset of binaries. +// Track upstream progress before tightening runtime handling: https://github.com/stackblitz-labs/pkg.pr.new/pull/484 + +const archMap = { + x64: "amd64", + arm64: "arm64", +}; + +const platform = platformMap[process.platform]; +const arch = archMap[process.arch]; + +if (!platform || !arch) { + console.error(`Unsupported platform: ${process.platform}-${process.arch}`); + process.exit(1); +} + +const ext = isWindows ? ".exe" : ""; +const binary = path.join(__dirname, "..", "bin", `lark-cli-${platform}-${arch}${ext}`); + +try { + execFileSync(binary, process.argv.slice(2), { stdio: "inherit" }); +} catch (err) { + process.exit(err.status || 1); +} +RUNJS + +chmod +x "$OUT_DIR/scripts/run.js" + +cat > "$OUT_DIR/package.json" <= 300 lines), cross-domain changes, or any changes touching core architecture paths (like `cmd/`). +- **`size/XL`**: Architectural overhauls, extremely large PRs (>1200 lines), or sensitive refactors. + +### Domain Tags (`domain/*`) +The script also identifies which business domains a PR touches to give reviewers an immediate sense of the impact scope. Currently tracked domains include: +- `domain/im` +- `domain/vc` +- `domain/ccm` +- `domain/base` +- `domain/mail` +- `domain/calendar` +- `domain/task` +- `domain/contact` + +Minor modules like docs and tests are omitted to keep PR tags clean and focused on structural changes. + +## Usage + +### In GitHub Actions +This script is designed to run in CI workflows. It automatically reads the `GITHUB_EVENT_PATH` payload to get the PR context. + +```bash +node scripts/pr-labels/index.js +``` + +### Local Dry Run +You can test the labeling logic against an existing GitHub PR without actually applying labels by using the `--dry-run` flag. + +```bash +# Requires GITHUB_TOKEN environment variable or passing --token +node scripts/pr-labels/index.js --dry-run --repo larksuite/cli --pr-number 123 +``` + +## Testing + +A regression test suite is available in `test.js` which verifies the output of the classification logic against historical PRs configured in `samples.json`. + +```bash +# Requires GITHUB_TOKEN environment variable to avoid rate limits +GITHUB_TOKEN=$(gh auth token) node scripts/pr-labels/test.js +``` + +This test suite also runs automatically in CI via `.github/workflows/pr-labels-test.yml` when changes are made to this directory. \ No newline at end of file diff --git a/scripts/pr-labels/index.js b/scripts/pr-labels/index.js new file mode 100755 index 00000000..5897d0c2 --- /dev/null +++ b/scripts/pr-labels/index.js @@ -0,0 +1,747 @@ +#!/usr/bin/env node +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +const fs = require("node:fs/promises"); +const path = require("node:path"); + +// ============================================================================ +// Constants & Configuration +// ============================================================================ + +const API_BASE = "https://api.github.com"; +const SCRIPT_DIR = __dirname; +const ROOT = path.join(SCRIPT_DIR, "..", ".."); + +const THRESHOLD_L = 300; +const THRESHOLD_XL = 1200; + +const LABEL_DEFINITIONS = { + "size/S": { color: "77bb00", description: "Low-risk docs, CI, test, or chore only changes" }, + "size/M": { color: "eebb00", description: "Single-domain feat or fix with limited business impact" }, + "size/L": { color: "ff8800", description: "Large or sensitive change across domains or core paths" }, + "size/XL": { color: "ee0000", description: "Architecture-level or global-impact change" }, +}; + +const MANAGED_LABELS = new Set(Object.keys(LABEL_DEFINITIONS)); + +// File path matching configurations +const DOC_SUFFIXES = [".md", ".mdx", ".txt", ".rst"]; +const LOW_RISK_PREFIXES = [".github/", "docs/", ".changeset/", "testdata/", "tests/", "skill-template/"]; +const LOW_RISK_FILENAMES = new Set(["readme.md", "readme.zh.md", "changelog.md", "license", "cla.md"]); +const LOW_RISK_TEST_SUFFIXES = ["_test.go", ".snap"]; + +const CORE_PREFIXES = ["internal/auth/", "internal/engine/", "internal/config/", "cmd/"]; +const HEAD_BUSINESS_DOMAINS = new Set(["im", "contact", "ccm", "base", "docx"]); +const LOW_RISK_TYPES = new Set(["docs", "ci", "test", "chore"]); + +// CODEOWNERS-based path to domain label mapping +// Maps shortcuts and skills paths to business domain labels +const PATH_TO_DOMAIN_MAP = { + // shortcuts + "shortcuts/im/": "im", + "shortcuts/vc/": "vc", + "shortcuts/calendar/": "calendar", + "shortcuts/doc/": "ccm", + "shortcuts/sheets/": "ccm", + "shortcuts/drive/": "ccm", + "shortcuts/base/": "base", + "shortcuts/mail/": "mail", + "shortcuts/task/": "task", + "shortcuts/contact/": "contact", + // skills + "skills/lark-im/": "im", + "skills/lark-vc/": "vc", + "skills/lark-doc/": "ccm", + "skills/lark-base/": "base", + "skills/lark-mail/": "mail", + "skills/lark-calendar/": "calendar", + "skills/lark-task/": "task", + "skills/lark-contact/": "contact", +}; + +const SENSITIVE_PATTERN = /(^|\/)(auth|permission|permissions|security)(\/|_|\.|$)/; + +const CLASS_STANDARDS = { + "size/S": { + channel: "Fast track (S)", + gates: [ + "Code quality: AI code review passed", + "Dependency and configuration security checks passed", + ], + }, + "size/M": { + channel: "Fast track (M)", + gates: [ + "Code quality: AI code review passed", + "Dependency and configuration security checks passed", + "Skill format validation: added or modified Skills load successfully", + "CLI automation tests: all required business-line tests passed", + ], + }, + "size/L": { + channel: "Standard track (L)", + gates: [ + "Code quality: AI code review passed", + "Dependency and configuration security checks passed", + "Skill format validation: added or modified Skills load successfully", + "CLI automation tests: all required business-line tests passed", + "Domain evaluation passed: reported success rate is greater than 95%", + ], + }, + "size/XL": { + channel: "Strict track (XL)", + gates: [ + "Code quality: AI code review passed", + "Dependency and configuration security checks passed", + "Skill format validation: added or modified Skills load successfully", + "CLI automation tests: all required business-line tests passed", + "Domain evaluation passed: reported success rate is greater than 95%", + "Cross-domain release gate: all domains and full integration evaluations passed", + ], + }, +}; + +// ============================================================================ +// Utilities +// ============================================================================ + +function log(message) { + console.error(`sync-pr-labels: ${message}`); +} + +function normalizePath(input) { + return String(input || "").trim().toLowerCase(); +} + +function envValue(name) { + return (process.env[name] || "").trim(); +} + +function envOrFail(name) { + const value = envValue(name); + if (!value) { + throw new Error(`missing required environment variable: ${name}`); + } + return value; +} + +// ============================================================================ +// GitHub API Client +// ============================================================================ + +class GitHubClient { + constructor(token, repo, prNumber) { + this.token = token; + this.repo = repo; + this.prNumber = prNumber; + } + + buildHeaders(hasBody = false) { + const headers = { + Accept: "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + }; + if (this.token) { + headers.Authorization = `Bearer ${this.token}`; + } + if (hasBody) { + headers["Content-Type"] = "application/json"; + } + return headers; + } + + async request(endpoint, options = {}) { + const { method = "GET", payload, allow404 = false } = options; + const hasBody = payload !== undefined; + const url = endpoint.startsWith("http") ? endpoint : `${API_BASE}${endpoint}`; + + const response = await fetch(url, { + method, + headers: this.buildHeaders(hasBody), + body: hasBody ? JSON.stringify(payload) : undefined, + }); + + if (allow404 && response.status === 404) { + return null; + } + + if (!response.ok) { + const detail = await response.text(); + const error = new Error(`GitHub API ${method} ${url} failed: ${response.status} ${detail}`); + error.status = response.status; + throw error; + } + + const text = await response.text(); + return text ? JSON.parse(text) : null; + } + + async getPullRequest() { + return this.request(`/repos/${this.repo}/pulls/${this.prNumber}`); + } + + async listPrFiles() { + const files = []; + for (let page = 1; ; page += 1) { + const params = new URLSearchParams({ per_page: "100", page: String(page) }); + const batch = await this.request(`/repos/${this.repo}/pulls/${this.prNumber}/files?${params}`); + if (!batch || batch.length === 0) { + break; + } + files.push(...batch); + if (batch.length < 100) { + break; + } + } + return files; + } + + async listIssueLabels() { + const labels = await this.request(`/repos/${this.repo}/issues/${this.prNumber}/labels`); + return new Set(labels.map((item) => item.name)); + } + + async syncLabelDefinition(name) { + const label = LABEL_DEFINITIONS[name]; + const createUrl = `/repos/${this.repo}/labels`; + const updateUrl = `/repos/${this.repo}/labels/${encodeURIComponent(name)}`; + + try { + await this.request(createUrl, { + method: "POST", + payload: { name, color: label.color, description: label.description }, + }); + log(`created label ${name}`); + } catch (error) { + if (error.status !== 422) { + throw error; + } + await this.request(updateUrl, { + method: "PATCH", + payload: { new_name: name, color: label.color, description: label.description }, + }); + log(`updated label ${name}`); + } + } + + async addLabels(labels) { + if (labels.length === 0) return; + await this.request(`/repos/${this.repo}/issues/${this.prNumber}/labels`, { + method: "POST", + payload: { labels }, + }); + log(`added labels: ${labels.join(", ")}`); + } + + async removeLabel(name) { + await this.request(`/repos/${this.repo}/issues/${this.prNumber}/labels/${encodeURIComponent(name)}`, { + method: "DELETE", + allow404: true, + }); + log(`removed label: ${name}`); + } +} + +// ============================================================================ +// Path & Domain Heuristics +// ============================================================================ + +function parsePrType(title) { + const match = String(title || "").trim().match(/^([a-z]+)(?:\([^)]+\))?!?:/i); + return match ? match[1].toLowerCase() : ""; +} + +function isLowRiskPath(filePath) { + const normalized = normalizePath(filePath); + const basename = path.posix.basename(normalized); + + if (normalized.startsWith("skills/lark-")) return false; + if (DOC_SUFFIXES.some((suffix) => normalized.endsWith(suffix))) return true; + if (LOW_RISK_FILENAMES.has(basename)) return true; + if (LOW_RISK_PREFIXES.some((prefix) => normalized.startsWith(prefix))) return true; + if (LOW_RISK_TEST_SUFFIXES.some((suffix) => normalized.endsWith(suffix))) return true; + return normalized.includes("/testdata/"); +} + +function isBusinessSkillPath(filePath) { + const normalized = normalizePath(filePath); + return normalized.startsWith("shortcuts/") || normalized.startsWith("skills/lark-"); +} + +function shortcutDomainForPath(filePath) { + const parts = normalizePath(filePath).split("/"); + return parts.length >= 2 && parts[0] === "shortcuts" ? parts[1] : ""; +} + +function skillDomainForPath(filePath) { + const parts = normalizePath(filePath).split("/"); + return parts.length >= 2 && parts[0] === "skills" && parts[1].startsWith("lark-") + ? parts[1].slice("lark-".length) + : ""; +} + +// Get business domain label based on CODEOWNERS path mapping +function getBusinessDomain(filePath) { + const normalized = normalizePath(filePath); + for (const [prefix, domain] of Object.entries(PATH_TO_DOMAIN_MAP)) { + if (normalized.startsWith(prefix)) { + return domain; + } + } + return ""; +} + +async function detectNewShortcutDomain(files) { + for (const item of files) { + if (item.status !== "added") continue; + const domain = shortcutDomainForPath(item.filename); + if (!domain) continue; + try { + await fs.access(path.join(ROOT, "shortcuts", domain)); + } catch { + return domain; + } + } + return ""; +} + +function collectCoreAreas(filenames) { + const areas = new Set(); + for (const name of filenames) { + const normalized = normalizePath(name); + for (const prefix of CORE_PREFIXES) { + if (normalized.startsWith(prefix)) { + // remove trailing slash for area name + areas.add(prefix.slice(0, -1)); + } + } + } + return areas; +} + +function collectSensitiveKeywords(filenames) { + const hits = new Set(); + for (const name of filenames) { + const match = normalizePath(name).match(SENSITIVE_PATTERN); + if (match && match[2]) { + hits.add(match[2]); + } + } + return [...hits].sort(); +} + +// ============================================================================ +// Classification Logic +// ============================================================================ + +function evaluateRules(context) { + const { + prType, effectiveChanges, lowRiskOnly, + domains, headDomains, coreAreas, coreSignals, + sensitiveKeywords, sensitive, newShortcutDomain, + singleDomain, multiDomain, filenames + } = context; + + const reasons = []; + let label; + + if (lowRiskOnly && (LOW_RISK_TYPES.has(prType) || effectiveChanges === 0)) { + reasons.push("Only low-risk docs, CI, test, or chore paths were changed, with no effective business code or Skill changes"); + label = "size/S"; + return { label, reasons }; + } + + // XL is reserved for architecture-level or global-impact changes. + const isXL = + effectiveChanges > THRESHOLD_XL || + (prType === "refactor" && sensitive && effectiveChanges >= THRESHOLD_L) || + (coreAreas.size >= 2 && (multiDomain || effectiveChanges >= THRESHOLD_L)) || + (headDomains.length >= 2 && sensitive); + + if (isXL) { + if (effectiveChanges > THRESHOLD_XL) reasons.push("Effective business code or Skill changes are far beyond the L threshold"); + if (prType === "refactor" && sensitive && effectiveChanges >= THRESHOLD_L) reasons.push("Refactor PR touches core or sensitive paths"); + if (coreAreas.size >= 2) reasons.push("Touches multiple core areas at the same time"); + if (headDomains.length >= 2) reasons.push("Impacts multiple major business domains"); + coreSignals.forEach((signal) => reasons.push(`Core area hit: ${signal}`)); + sensitiveKeywords.forEach((keyword) => reasons.push(`Sensitive keyword hit: ${keyword}`)); + label = "size/XL"; + } else if ( + prType === "refactor" || + effectiveChanges >= THRESHOLD_L || + Boolean(newShortcutDomain) || + multiDomain || + sensitive + ) { + if (prType === "refactor") reasons.push("PR type is refactor"); + if (effectiveChanges >= THRESHOLD_L) reasons.push(`Effective business code or Skill changes exceed ${THRESHOLD_L} lines`); + if (newShortcutDomain) reasons.push(`Introduces a new business domain directory: shortcuts/${newShortcutDomain}/`); + if (multiDomain) reasons.push("Touches multiple business domains"); + coreSignals.forEach((signal) => reasons.push(`Core area hit: ${signal}`)); + sensitiveKeywords.forEach((keyword) => reasons.push(`Sensitive keyword hit: ${keyword}`)); + label = "size/L"; + } else { + if (filenames.some(isBusinessSkillPath) || effectiveChanges > 0) { + reasons.push("Regular feat, fix, or Skill change within a single business domain"); + } + if (singleDomain && domains.size > 0) { + reasons.push(`Impact is limited to a single business domain: ${[...domains].sort().join(", ")}`); + } + if (effectiveChanges < THRESHOLD_L) { + reasons.push(`Effective business code or Skill changes are below ${THRESHOLD_L} lines`); + } + label = "size/M"; + } + + return { label, reasons }; +} + +async function classifyPr(payload, files) { + const pr = payload.pull_request; + const title = pr.title || ""; + const prType = parsePrType(title); + const filenames = files.map((item) => item.filename || ""); + const impactedPaths = files.flatMap((item) => { + const paths = [item.filename || ""]; + if (item.status === "renamed" && item.previous_filename) { + paths.push(item.previous_filename); + } + return paths.filter(Boolean); + }); + + // Filter out docs, tests, and other low-risk paths so the size label tracks business impact. + const effectiveChanges = files.reduce( + (sum, item) => sum + (isLowRiskPath(item.filename) ? 0 : (item.changes || 0)), + 0, + ); + const totalChanges = files.reduce((sum, item) => sum + (item.changes || 0), 0); + + const domains = new Set(); + const businessDomains = new Set(); + + for (const name of impactedPaths) { + const businessDomain = getBusinessDomain(name); + if (businessDomain) { + businessDomains.add(businessDomain); + domains.add(businessDomain); + continue; + } + + const shortcutDomain = shortcutDomainForPath(name); + if (shortcutDomain) domains.add(shortcutDomain); + + const skillDomain = skillDomainForPath(name); + if (skillDomain) domains.add(skillDomain); + } + + const coreAreas = collectCoreAreas(impactedPaths); + const newShortcutDomain = await detectNewShortcutDomain(files); + + const lowRiskOnly = impactedPaths.length > 0 && impactedPaths.every(isLowRiskPath); + const singleDomain = domains.size <= 1; + const multiDomain = domains.size >= 2; + const headDomains = [...domains].filter((domain) => HEAD_BUSINESS_DOMAINS.has(domain)); + const coreSignals = [...coreAreas].sort(); + const sensitiveKeywords = collectSensitiveKeywords(impactedPaths); + const sensitive = coreSignals.length > 0 || sensitiveKeywords.length > 0; + + const context = { + prType, effectiveChanges, lowRiskOnly, + domains, headDomains, coreAreas, coreSignals, + sensitiveKeywords, sensitive, newShortcutDomain, + singleDomain, multiDomain, filenames: impactedPaths + }; + + const { label, reasons } = evaluateRules(context); + + return { + label, + title, + prType: prType || "unknown", + totalChanges, + effectiveChanges, + domains: [...domains].sort(), + businessDomains: [...businessDomains].sort(), + coreAreas: [...coreAreas].sort(), + coreSignals, + sensitiveKeywords, + newShortcutDomain, + reasons, + lowRiskOnly, + filenames, + }; +} + +// ============================================================================ +// Output & Formatting +// ============================================================================ + +async function writeStepSummary(prNumber, classification) { + const summaryPath = (process.env.GITHUB_STEP_SUMMARY || "").trim(); + if (!summaryPath) return; + + const standard = CLASS_STANDARDS[classification.label]; + const domains = classification.domains.join(", ") || "-"; + const bDomains = classification.businessDomains.join(", ") || "-"; + const coreAreas = classification.coreAreas.join(", ") || "-"; + const reasons = classification.reasons.length > 0 + ? classification.reasons + : ["No higher-severity rule matched, so the PR defaults to medium classification"]; + + const lines = [ + "## PR Size Classification", + "", + `- PR: #${prNumber}`, + `- Label: \`${classification.label}\``, + `- PR Type: \`${classification.prType}\``, + `- Total Changes: \`${classification.totalChanges}\``, + `- Effective Business/SKILL Changes: \`${classification.effectiveChanges}\``, + `- Business Domains: \`${domains}\``, + `- Impacted Domains: \`${bDomains}\``, + `- Core Areas: \`${coreAreas}\``, + `- CI/CD Channel: \`${standard.channel}\``, + `- Low Risk Only: \`${classification.lowRiskOnly}\``, + "", + "### Reasons", + "", + ...reasons.map((reason) => `- ${reason}`), + "", + "### Pipeline Gates", + "", + ...standard.gates.map((gate) => `- ${gate}`), + "", + ]; + + await fs.appendFile(summaryPath, `${lines.join("\n")}\n`, "utf8"); +} + +function formatDryRunResult(repo, prNumber, classification) { + const standard = CLASS_STANDARDS[classification.label]; + return { + repo, + prNumber, + label: classification.label, + prType: classification.prType, + totalChanges: classification.totalChanges, + effectiveChanges: classification.effectiveChanges, + lowRiskOnly: classification.lowRiskOnly, + domains: classification.domains, + businessDomains: classification.businessDomains, + coreAreas: classification.coreAreas, + coreSignals: classification.coreSignals, + sensitiveKeywords: classification.sensitiveKeywords, + reasons: classification.reasons, + channel: standard.channel, + gates: standard.gates, + }; +} + +function printDryRunResult(result, options) { + if (options.json) { + console.log(JSON.stringify(result, null, 2)); + return; + } + + const signalParts = [ + ...result.coreSignals.map((signal) => `core:${signal}`), + ...result.sensitiveKeywords.map((keyword) => `keyword:${keyword}`), + ...(result.domains.length > 0 ? [`domains:${result.domains.join(",")}`] : []), + ]; + const reasonParts = result.reasons.length > 0 + ? result.reasons + : ["No higher-severity rule matched, so the PR defaults to medium classification"]; + + console.log( + `${result.label} | #${result.prNumber} | type:${result.prType} | eff:${result.effectiveChanges} | ` + + `sig:${signalParts.join(";") || "-"} | reason:${reasonParts.join("; ")}`, + ); +} + +function printHelp() { + const lines = [ + "Usage:", + " node scripts/pr-labels/index.js", + " node scripts/pr-labels/index.js --dry-run --pr-url [--token ] [--json]", + " node scripts/pr-labels/index.js --dry-run --repo --pr-number [--token ] [--json]", + "", + "Modes:", + " default Read the GitHub Actions event payload and apply labels", + " --dry-run Fetch the PR, compute the managed label, and print the result without writing labels", + "", + "Options:", + " --pr-url GitHub pull request URL, for example https://github.com/larksuite/cli/pull/123", + " --repo Repository name, used with --pr-number", + " --pr-number Pull request number, used with --repo", + " --token GitHub token override; falls back to GITHUB_TOKEN", + " --json Print dry-run output as JSON instead of the default one-line summary", + " --help Show this message", + ]; + console.log(lines.join("\n")); +} + +function parseArgs(argv) { + const options = { + dryRun: false, + json: false, + help: false, + prUrl: "", + repo: "", + prNumber: "", + token: "", + }; + + for (let i = 0; i < argv.length; i += 1) { + const arg = argv[i]; + if (arg === "--dry-run") options.dryRun = true; + else if (arg === "--json") options.json = true; + else if (arg === "--help" || arg === "-h") options.help = true; + else if (arg === "--pr-url") options.prUrl = argv[++i] || ""; + else if (arg === "--repo") options.repo = argv[++i] || ""; + else if (arg === "--pr-number") options.prNumber = argv[++i] || ""; + else if (arg === "--token") options.token = argv[++i] || ""; + else throw new Error(`unknown argument: ${arg}`); + } + + return options; +} + +function parsePrUrl(prUrl) { + let parsed; + try { + parsed = new URL(prUrl); + } catch { + throw new Error(`invalid PR URL: ${prUrl}`); + } + + const match = parsed.pathname.match(/^\/([^/]+)\/([^/]+)\/pull\/(\d+)\/?$/); + if (!match) throw new Error(`unsupported PR URL format: ${prUrl}`); + + return { repo: `${match[1]}/${match[2]}`, prNumber: Number(match[3]) }; +} + +async function loadEventPayload(filePath) { + return JSON.parse(await fs.readFile(filePath, "utf8")); +} + +async function resolveContext(options) { + const token = options.token; + + if (options.prUrl) { + const { repo, prNumber } = parsePrUrl(options.prUrl); + const client = new GitHubClient(token, repo, prNumber); + const payload = { + repository: { full_name: repo }, + pull_request: await client.getPullRequest(), + }; + return { repo, prNumber, payload, client }; + } + + if (options.repo || options.prNumber) { + if (!options.repo || !options.prNumber) throw new Error("--repo and --pr-number must be provided together"); + const prNumber = Number(options.prNumber); + if (!Number.isInteger(prNumber) || prNumber <= 0) throw new Error(`invalid PR number: ${options.prNumber}`); + + const client = new GitHubClient(token, options.repo, prNumber); + const payload = { + repository: { full_name: options.repo }, + pull_request: await client.getPullRequest(), + }; + return { repo: options.repo, prNumber, payload, client }; + } + + const eventPath = envOrFail("GITHUB_EVENT_PATH"); + const payload = await loadEventPayload(eventPath); + const repo = payload.repository.full_name; + const prNumber = payload.pull_request.number; + const client = new GitHubClient(token, repo, prNumber); + + return { repo, prNumber, payload, client }; +} + +// ============================================================================ +// Main Execution +// ============================================================================ + +async function main() { + const options = parseArgs(process.argv.slice(2)); + if (options.help) { + printHelp(); + return; + } + + options.token = options.token || envValue("GITHUB_TOKEN"); + + if (!options.dryRun && !options.token) { + throw new Error("missing required GitHub token; set GITHUB_TOKEN or pass --token"); + } + + const { repo, prNumber, payload, client } = await resolveContext(options); + + const files = await client.listPrFiles(); + const classification = await classifyPr(payload, files); + + if (options.dryRun) { + printDryRunResult(formatDryRunResult(repo, prNumber, classification), options); + return; + } + + const desired = new Set([classification.label]); + for (const domain of classification.businessDomains) { + desired.add(`domain/${domain}`); + } + + const current = await client.listIssueLabels(); + const managedCurrent = [...current].filter((label) => MANAGED_LABELS.has(label) || label.startsWith("domain/")); + const toAdd = [...desired].filter((label) => !current.has(label)).sort(); + const toRemove = managedCurrent.filter((label) => !desired.has(label)).sort(); + + for (const domain of classification.businessDomains) { + const labelName = `domain/${domain}`; + if (!LABEL_DEFINITIONS[labelName]) { + LABEL_DEFINITIONS[labelName] = { color: "1d76db", description: `PR touches the ${domain} domain` }; + } + } + + // Ensure labels to be added actually exist in the repository first + // If the label doesn't exist, GitHub API will return 422 Unprocessable Entity when trying to add it to a PR. + for (const label of toAdd) { + if (LABEL_DEFINITIONS[label]) { + try { + await client.syncLabelDefinition(label); + } catch (e) { + log(`Warning: Failed to bootstrap new label ${label}: ${e.message}`); + } + } + } + + await client.addLabels(toAdd); + + for (const label of toRemove) { + await client.removeLabel(label); + } + + // Keep other label metadata consistent. This is best-effort trailing work. + for (const label of Object.keys(LABEL_DEFINITIONS)) { + if (toAdd.includes(label)) continue; // Already synced above + try { + await client.syncLabelDefinition(label); + } catch (e) { + log(`Warning: Failed to sync label definition for ${label}: ${e.message}`); + } + } + + await writeStepSummary(prNumber, classification); + + log( + `pr #${prNumber} type=${classification.prType} total_changes=${classification.totalChanges} ` + + `effective_changes=${classification.effectiveChanges} files=${files.length} ` + + `desired=${[...desired].sort().join(",") || "-"} current_managed=${managedCurrent.sort().join(",") || "-"} ` + + `reasons=${classification.reasons.join(" | ") || "-"}`, + ); +} + +main().catch((error) => { + log(error.message || String(error)); + process.exit(1); +}); diff --git a/scripts/pr-labels/samples.json b/scripts/pr-labels/samples.json new file mode 100644 index 00000000..76dd7291 --- /dev/null +++ b/scripts/pr-labels/samples.json @@ -0,0 +1,145 @@ +[ + { + "name": "size-s-docs-badge", + "number": 103, + "title": "docs: add official badge to distinguish from third-party Lark CLI tools", + "pr_url": "https://github.com/larksuite/cli/pull/103", + "status": "merged", + "merged_at": "2026-03-30T12:15:45Z", + "expected_label": "size/S", + "expected_domains": [], + "review_note": "Pure docs sample. Useful to confirm low-risk paths stay in S even when total changed lines are not tiny." + }, + { + "name": "size-s-docs-simplify", + "number": 26, + "title": "docs: simplify installation steps by merging CLI and Skills into one …", + "pr_url": "https://github.com/larksuite/cli/pull/26", + "status": "merged", + "merged_at": "2026-03-28T09:33:24Z", + "expected_label": "size/S", + "expected_domains": [], + "review_note": "Docs sample, verifying docs changes remain in S." + }, + { + "name": "size-s-docs-star-history", + "number": 12, + "title": "docs: add Star History chart to readmes", + "pr_url": "https://github.com/larksuite/cli/pull/12", + "status": "merged", + "merged_at": "2026-03-28T16:00:15Z", + "expected_label": "size/S", + "expected_domains": [], + "review_note": "Docs sample, no effective business code changes." + }, + { + "name": "size-s-docs-clarify-install", + "number": 3, + "title": "docs: clarify install methods and add source build steps", + "pr_url": "https://github.com/larksuite/cli/pull/3", + "status": "merged", + "merged_at": "2026-03-28T03:43:44Z", + "expected_label": "size/S", + "expected_domains": [], + "review_note": "Docs sample, pure documentation clarification." + }, + { + "name": "size-m-fix-base-scope", + "number": 96, + "title": "fix(base): correct scope for record history list shortcut", + "pr_url": "https://github.com/larksuite/cli/pull/96", + "status": "merged", + "merged_at": "2026-03-30T11:40:18Z", + "expected_label": "size/M", + "expected_domains": ["domain/base"], + "review_note": "Small fix sample. Verify the lower edge of the M bucket within a single domain." + }, + { + "name": "size-m-fix-mail-sensitive", + "number": 92, + "title": "fix: remove sensitive send scope from reply and forward shortcuts", + "pr_url": "https://github.com/larksuite/cli/pull/92", + "status": "merged", + "merged_at": "2026-03-30T10:19:11Z", + "expected_label": "size/M", + "expected_domains": ["domain/mail"], + "review_note": "Security-like wording in the title but stays in one business domain (mail)." + }, + { + "name": "size-m-ci-improve", + "number": 71, + "title": "ci: improve CI workflows and add golangci-lint config", + "pr_url": "https://github.com/larksuite/cli/pull/71", + "status": "merged", + "merged_at": "2026-03-30T03:09:31Z", + "expected_label": "size/M", + "expected_domains": [], + "review_note": "CI workflow change that goes beyond S threshold." + }, + { + "name": "size-m-feat-im-pagination", + "number": 30, + "title": "feat: add auto-pagination to messages search and update lark-im docs", + "pr_url": "https://github.com/larksuite/cli/pull/30", + "status": "merged", + "merged_at": "2026-03-30T15:00:41Z", + "expected_label": "size/M", + "expected_domains": ["domain/im"], + "review_note": "Single-domain feature with larger diff but effective changes stay in M." + }, + { + "name": "size-l-fix-api-silent", + "number": 85, + "title": "fix: resolve silent failure in `lark-cli api` error output (#39)", + "pr_url": "https://github.com/larksuite/cli/pull/85", + "status": "merged", + "merged_at": "2026-03-30T09:19:24Z", + "expected_label": "size/L", + "expected_domains": [], + "review_note": "Touches core area (cmd), bumping the size to L." + }, + { + "name": "size-l-fix-cli", + "number": 91, + "title": "fix: correct CLI examples in root help and READMEs (closes #48)", + "pr_url": "https://github.com/larksuite/cli/pull/91", + "status": "closed", + "merged_at": null, + "expected_label": "size/L", + "expected_domains": [], + "review_note": "Closed PR touching core area (cmd)." + }, + { + "name": "size-m-skill-format-check", + "number": 134, + "title": "feat(ci): add skill format check workflow to ensure SKILL.md compliance", + "pr_url": "https://github.com/larksuite/cli/pull/134", + "status": "closed", + "merged_at": null, + "expected_label": "size/M", + "expected_domains": [], + "review_note": "Includes updates to tests/bad-skill/SKILL.md inside skills-like paths, testing how skill mock files and test scripts are handled." + }, + { + "name": "size-l-ccm-multi-path", + "number": 57, + "title": "feat(docs): support local image upload in docs +create", + "pr_url": "https://github.com/larksuite/cli/pull/57", + "status": "closed", + "merged_at": null, + "expected_label": "size/L", + "expected_domains": ["domain/ccm"], + "review_note": "Touches docs_create_images.go and table_auto_width.go, representing multiple CCM sub-paths but resolving to a single ccm domain." + }, + { + "name": "size-l-domain-rename", + "number": 11, + "title": "docs: rename user-facing Bitable references to Base", + "pr_url": "https://github.com/larksuite/cli/pull/11", + "status": "merged", + "merged_at": "2026-03-28T16:00:52Z", + "expected_label": "size/L", + "expected_domains": ["domain/base", "domain/ccm"], + "review_note": "A rename across paths. Since we track previous_filename to evaluate domains, this should properly capture the base domain." + } +] \ No newline at end of file diff --git a/scripts/pr-labels/test.js b/scripts/pr-labels/test.js new file mode 100644 index 00000000..db08ddc1 --- /dev/null +++ b/scripts/pr-labels/test.js @@ -0,0 +1,52 @@ +const fs = require('fs'); +const { execFileSync } = require('child_process'); +const path = require('path'); + +const samplesPath = path.join(__dirname, 'samples.json'); +const indexPath = path.join(__dirname, 'index.js'); +const samples = JSON.parse(fs.readFileSync(samplesPath, 'utf8')); + +if (!process.env.GITHUB_TOKEN) { + console.error("❌ Error: GITHUB_TOKEN environment variable is required to run tests without hitting API rate limits."); + console.error("Please run: GITHUB_TOKEN=$(gh auth token) node scripts/pr-labels/test.js"); + process.exit(1); +} + +let passed = 0; +let failed = 0; + +for (const sample of samples) { + try { + const output = execFileSync( + process.execPath, + [indexPath, '--dry-run', '--json', '--pr-url', sample.pr_url], + { encoding: 'utf8', env: process.env } + ); + const result = JSON.parse(output); + + const matchLabel = result.label === sample.expected_label; + + // Sort before comparing to ignore order + const actualDomains = (result.businessDomains || []).sort(); + const expectedDomains = (sample.expected_domains || []).map(d => d.replace('domain/', '')).sort(); + + const matchDomains = JSON.stringify(actualDomains) === JSON.stringify(expectedDomains); + + if (matchLabel && matchDomains) { + console.log(`✅ Passed: ${sample.name}`); + passed++; + } else { + console.log(`❌ Failed: ${sample.name}`); + console.log(` Label expected: ${sample.expected_label}, got: ${result.label}`); + console.log(` Domains expected: ${expectedDomains}, got: ${actualDomains}`); + failed++; + } + } catch (e) { + console.log(`❌ Failed: ${sample.name} (Execution error)`); + console.error(e.message); + failed++; + } +} + +console.log(`\nTest Summary: ${passed} passed, ${failed} failed`); +if (failed > 0) process.exit(1); diff --git a/scripts/skill-format-check/README.md b/scripts/skill-format-check/README.md new file mode 100644 index 00000000..04a00b4e --- /dev/null +++ b/scripts/skill-format-check/README.md @@ -0,0 +1,36 @@ +# Skill Format Check + +This directory contains a script to validate the format of `SKILL.md` files located in the `../../skills` directory. + +## Purpose + +The `index.js` script ensures that all `SKILL.md` files conform to the standard template defined in `skill-template/skill-template.md`. Specifically, it checks that the YAML frontmatter includes the following fields: +- `name` (required) +- `description` (required) +- `metadata` (outputs a warning if missing, does not fail the build) + +> **Note:** The `lark-shared` skill is explicitly excluded from these format checks. + +## Usage + +This script is executed automatically via GitHub Actions (`.github/workflows/skill-format-check.yml`) on pull requests and pushes that modify the `skills/` directory. + +To run the check manually from the root of the repository, execute: + +```bash +node scripts/skill-format-check/index.js +``` + +You can also specify a custom target directory as the first argument: + +```bash +node scripts/skill-format-check/index.js ./path/to/my/skills +``` + +## Testing + +This tool comes with a quick validation script to ensure it correctly identifies good and bad skill formats. To run the tests, execute: + +```bash +./scripts/skill-format-check/test.sh +``` diff --git a/scripts/skill-format-check/index.js b/scripts/skill-format-check/index.js new file mode 100644 index 00000000..71b14f00 --- /dev/null +++ b/scripts/skill-format-check/index.js @@ -0,0 +1,96 @@ +const fs = require('fs'); +const path = require('path'); + +// Allow passing a target directory as the first argument. +// If provided, resolve against process.cwd() so it behaves as the user expects. +// If not provided, default to '../../skills' relative to this script's directory. +const targetDirArg = process.argv[2]; +const SKILLS_DIR = targetDirArg + ? path.resolve(process.cwd(), targetDirArg) + : path.resolve(__dirname, '../../skills'); + +function checkSkillFormat() { + console.log(`Checking skill format in ${SKILLS_DIR}...`); + + if (!fs.existsSync(SKILLS_DIR)) { + console.error('Skills directory not found:', SKILLS_DIR); + process.exit(1); + } + + let skills; + try { + skills = fs + .readdirSync(SKILLS_DIR, { withFileTypes: true }) + .filter(entry => entry.isDirectory()) + .map(entry => entry.name); + } catch (err) { + console.error(`Failed to enumerate skills directory: ${err.message}`); + process.exit(1); + } + + let hasErrors = false; + + skills.forEach(skill => { + // Skip lark-shared skill completely + if (skill === 'lark-shared') { + console.log(`⏭️ Skipping check for ${skill}`); + return; + } + + const skillPath = path.join(SKILLS_DIR, skill); + const skillFile = path.join(skillPath, 'SKILL.md'); + + if (!fs.existsSync(skillFile)) { + console.error(`❌ [${skill}] Missing SKILL.md`); + hasErrors = true; + return; + } + + let content; + try { + content = fs.readFileSync(skillFile, 'utf-8'); + } catch (err) { + console.error(`❌ [${skill}] Failed to read SKILL.md: ${err.message}`); + hasErrors = true; + return; + } + + // Normalize line endings to simplify parsing + const normalizedContent = content.replace(/\r\n/g, '\n'); + + // Check YAML Frontmatter + if (!normalizedContent.startsWith('---\n')) { + console.error(`❌ [${skill}] SKILL.md must start with YAML frontmatter (---)`); + hasErrors = true; + } else { + const frontmatterMatch = normalizedContent.match(/^---\n([\s\S]*?)\n---(?:\n|$)/); + if (!frontmatterMatch) { + console.error(`❌ [${skill}] SKILL.md has unclosed or invalid YAML frontmatter`); + hasErrors = true; + } else { + const frontmatter = frontmatterMatch[1]; + if (!/^name:/m.test(frontmatter)) { + console.error(`❌ [${skill}] YAML frontmatter missing 'name'`); + hasErrors = true; + } + if (!/^description:/m.test(frontmatter)) { + console.error(`❌ [${skill}] YAML frontmatter missing 'description'`); + hasErrors = true; + } + if (!/^metadata:/m.test(frontmatter)) { + console.warn(`⚠️ [${skill}] YAML frontmatter missing 'metadata' (Warning only)`); + // hasErrors = true; // Downgrade to warning to not fail on existing skills + } + } + } + }); + + if (hasErrors) { + console.error('\n❌ Skill format check failed. Please fix the errors above.'); + process.exit(1); + } else { + console.log('\n✅ Skill format check passed!'); + } +} + +checkSkillFormat(); diff --git a/scripts/skill-format-check/test.sh b/scripts/skill-format-check/test.sh new file mode 100755 index 00000000..1a5faf82 --- /dev/null +++ b/scripts/skill-format-check/test.sh @@ -0,0 +1,82 @@ +#!/bin/bash + +# Get the directory of this script +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +INDEX_JS="$DIR/index.js" +TEMP_DIR="$DIR/tests/temp_test_dir" + +echo "=== Running tests for skill-format-check ===" +echo "Index script: $INDEX_JS" + +prepare_fixture() { + local test_name=$1 + rm -rf "$TEMP_DIR" + mkdir -p "$TEMP_DIR" + if [ ! -d "$DIR/tests/$test_name" ]; then + echo "❌ Missing fixture directory: $DIR/tests/$test_name" + exit 1 + fi + cp -r "$DIR/tests/$test_name" "$TEMP_DIR/" || { + echo "❌ Failed to copy fixture: $test_name" + exit 1 + } +} + +# Function to run a positive test +run_positive_test() { + local test_name=$1 + echo -e "\n--- [Positive] $test_name ---" + + prepare_fixture "$test_name" + + node "$INDEX_JS" "$TEMP_DIR" + + if [ $? -eq 0 ]; then + echo "✅ Passed! (Correctly validated $test_name)" + rm -rf "$TEMP_DIR" + return 0 + else + echo "❌ Failed! Expected $test_name to pass but it failed." + rm -rf "$TEMP_DIR" + exit 1 + fi +} + +# Function to run a negative test +run_negative_test() { + local test_name=$1 + echo -e "\n--- [Negative] $test_name ---" + + prepare_fixture "$test_name" + + # Capture output for diagnostics while still treating non-zero as expected + local log_file="$TEMP_DIR/.validator.log" + node "$INDEX_JS" "$TEMP_DIR" > "$log_file" 2>&1 + local exit_code=$? + + if [ $exit_code -ne 0 ]; then + echo "✅ Passed! (Correctly rejected $test_name)" + rm -rf "$TEMP_DIR" + return 0 + else + echo "❌ Failed! Expected $test_name to fail but it passed." + if [ -s "$log_file" ]; then + echo "--- Validator output ---" + cat "$log_file" + fi + rm -rf "$TEMP_DIR" + exit 1 + fi +} + +# Run positive tests +run_positive_test "good-skill" +run_positive_test "good-skill-minimal" +run_positive_test "good-skill-complex" + +# Run negative tests +run_negative_test "bad-skill" +run_negative_test "bad-skill-no-frontmatter" +run_negative_test "bad-skill-unclosed-frontmatter" + +echo -e "\n🎉 All tests passed successfully!" diff --git a/scripts/skill-format-check/tests/bad-skill-no-frontmatter/SKILL.md b/scripts/skill-format-check/tests/bad-skill-no-frontmatter/SKILL.md new file mode 100644 index 00000000..5a7afa3e --- /dev/null +++ b/scripts/skill-format-check/tests/bad-skill-no-frontmatter/SKILL.md @@ -0,0 +1,3 @@ +# No Frontmatter Skill + +This skill completely lacks a YAML frontmatter. diff --git a/scripts/skill-format-check/tests/bad-skill-unclosed-frontmatter/SKILL.md b/scripts/skill-format-check/tests/bad-skill-unclosed-frontmatter/SKILL.md new file mode 100644 index 00000000..189d6253 --- /dev/null +++ b/scripts/skill-format-check/tests/bad-skill-unclosed-frontmatter/SKILL.md @@ -0,0 +1,9 @@ +--- +name: bad-skill-unclosed +version: 1.0.0 +description: "This skill has an unclosed frontmatter block." +metadata: {} + +# Unclosed Frontmatter Skill + +This frontmatter does not have a closing `---` block. \ No newline at end of file diff --git a/scripts/skill-format-check/tests/bad-skill/SKILL.md b/scripts/skill-format-check/tests/bad-skill/SKILL.md new file mode 100644 index 00000000..465a05da --- /dev/null +++ b/scripts/skill-format-check/tests/bad-skill/SKILL.md @@ -0,0 +1,8 @@ +--- +version: 1.0.0 +metadata: {} +--- + +# Bad Skill + +This skill is missing required fields like name and description. diff --git a/scripts/skill-format-check/tests/good-skill-complex/SKILL.md b/scripts/skill-format-check/tests/good-skill-complex/SKILL.md new file mode 100644 index 00000000..0f7b5183 --- /dev/null +++ b/scripts/skill-format-check/tests/good-skill-complex/SKILL.md @@ -0,0 +1,17 @@ +--- +name: good-skill-complex +version: 2.5.1-beta +description: > + A very complex description + that spans multiple lines + and contains weird chars: !@#$%^&*() +metadata: + requires: + bins: ["lark-cli", "node"] + cliHelp: "lark-cli something --help" + customField: "customValue" +--- + +# Complex Skill + +This skill has a complex frontmatter block. diff --git a/scripts/skill-format-check/tests/good-skill-minimal/SKILL.md b/scripts/skill-format-check/tests/good-skill-minimal/SKILL.md new file mode 100644 index 00000000..ca3f481c --- /dev/null +++ b/scripts/skill-format-check/tests/good-skill-minimal/SKILL.md @@ -0,0 +1,10 @@ +--- +name: good-skill-minimal +version: 0.1.0 +description: Minimal valid description +metadata: {} +--- + +# Minimal Skill + +This has the bare minimum required fields. diff --git a/scripts/skill-format-check/tests/good-skill/SKILL.md b/scripts/skill-format-check/tests/good-skill/SKILL.md new file mode 100644 index 00000000..8c2e7b40 --- /dev/null +++ b/scripts/skill-format-check/tests/good-skill/SKILL.md @@ -0,0 +1,12 @@ +--- +name: good-skill +version: 1.0.0 +description: "This is a properly formatted skill." +metadata: + requires: + bins: ["lark-cli"] +--- + +# Good Skill + +This skill follows all the formatting rules. diff --git a/shortcuts/base/base_data_query.go b/shortcuts/base/base_data_query.go index d316e4f2..f3724c1b 100644 --- a/shortcuts/base/base_data_query.go +++ b/shortcuts/base/base_data_query.go @@ -14,7 +14,7 @@ import ( var BaseDataQuery = common.Shortcut{ Service: "base", Command: "+data-query", - Description: "Query and analyze Bitable data with JSON DSL (aggregation, filter, sort)", + Description: "Query and analyze Base data with JSON DSL (aggregation, filter, sort)", Risk: "read", Scopes: []string{"base:table:read"}, AuthTypes: authTypes(), diff --git a/shortcuts/base/record_history_list.go b/shortcuts/base/record_history_list.go index d9ce8f4e..3a2f08e6 100644 --- a/shortcuts/base/record_history_list.go +++ b/shortcuts/base/record_history_list.go @@ -14,7 +14,7 @@ var BaseRecordHistoryList = common.Shortcut{ Command: "+record-history-list", Description: "List record change history", Risk: "read", - Scopes: []string{"base:record:read"}, + Scopes: []string{"base:history:read"}, AuthTypes: authTypes(), Flags: []common.Flag{ baseTokenFlag(true), diff --git a/shortcuts/calendar/calendar_rsvp.go b/shortcuts/calendar/calendar_rsvp.go new file mode 100644 index 00000000..2f126087 --- /dev/null +++ b/shortcuts/calendar/calendar_rsvp.go @@ -0,0 +1,90 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package calendar + +import ( + "context" + "fmt" + "strings" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +var CalendarRsvp = common.Shortcut{ + Service: "calendar", + Command: "+rsvp", + Description: "Reply to a calendar event (accept/decline/tentative)", + Risk: "write", + Scopes: []string{"calendar:calendar.event:reply"}, + AuthTypes: []string{"user", "bot"}, + HasFormat: false, + Flags: []common.Flag{ + {Name: "calendar-id", Desc: "calendar ID (default: primary)"}, + {Name: "event-id", Desc: "event ID", Required: true}, + {Name: "rsvp-status", Desc: "reply status", Required: true, Enum: []string{"accept", "decline", "tentative"}}, + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + calendarId := strings.TrimSpace(runtime.Str("calendar-id")) + d := common.NewDryRunAPI() + switch calendarId { + case "": + d.Desc("(calendar-id omitted) Will use primary calendar") + calendarId = "" + case "primary": + calendarId = "" + } + eventId := strings.TrimSpace(runtime.Str("event-id")) + status := strings.TrimSpace(runtime.Str("rsvp-status")) + + return d. + POST("/open-apis/calendar/v4/calendars/:calendar_id/events/:event_id/reply"). + Body(map[string]interface{}{"rsvp_status": status}). + Set("calendar_id", calendarId). + Set("event_id", eventId) + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + for _, flag := range []string{"calendar-id", "event-id", "rsvp-status"} { + if val := strings.TrimSpace(runtime.Str(flag)); val != "" { + if err := common.RejectDangerousChars("--"+flag, val); err != nil { + return output.ErrValidation(err.Error()) + } + } + } + + eventId := strings.TrimSpace(runtime.Str("event-id")) + if eventId == "" { + return output.ErrValidation("event-id cannot be empty") + } + return nil + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + calendarId := strings.TrimSpace(runtime.Str("calendar-id")) + if calendarId == "" { + calendarId = PrimaryCalendarIDStr + } + eventId := strings.TrimSpace(runtime.Str("event-id")) + status := strings.TrimSpace(runtime.Str("rsvp-status")) + + _, err := runtime.DoAPIJSON("POST", + fmt.Sprintf("/open-apis/calendar/v4/calendars/%s/events/%s/reply", + validate.EncodePathSegment(calendarId), + validate.EncodePathSegment(eventId)), + nil, + map[string]interface{}{ + "rsvp_status": status, + }) + if err != nil { + return err + } + + runtime.Out(map[string]interface{}{ + "calendar_id": calendarId, + "event_id": eventId, + "rsvp_status": status, + }, nil) + return nil + }, +} diff --git a/shortcuts/calendar/calendar_test.go b/shortcuts/calendar/calendar_test.go index 00823f56..6b249324 100644 --- a/shortcuts/calendar/calendar_test.go +++ b/shortcuts/calendar/calendar_test.go @@ -580,6 +580,118 @@ func TestFreebusy_APIError(t *testing.T) { // CalendarSuggestion tests // --------------------------------------------------------------------------- +// --------------------------------------------------------------------------- +// CalendarRsvp tests +// --------------------------------------------------------------------------- + +func TestRsvp_Success(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/calendar/v4/calendars/primary/events/evt_rsvp1/reply", + Body: map[string]interface{}{ + "code": 0, "msg": "ok", + }, + }) + + err := mountAndRun(t, CalendarRsvp, []string{ + "+rsvp", + "--event-id", "evt_rsvp1", + "--rsvp-status", "accept", + "--as", "bot", + }, f, stdout) + + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + for _, want := range []string{`"event_id": "evt_rsvp1"`, `"rsvp_status": "accept"`} { + if !strings.Contains(stdout.String(), want) { + t.Errorf("stdout should contain %s, got: %s", want, stdout.String()) + } + } +} + +func TestRsvp_InvalidStatus(t *testing.T) { + f, _, _, _ := cmdutil.TestFactory(t, defaultConfig()) + + err := mountAndRun(t, CalendarRsvp, []string{ + "+rsvp", + "--event-id", "evt_rsvp1", + "--rsvp-status", "invalid_status", + "--as", "bot", + }, f, nil) + + if err == nil { + t.Fatal("expected validation error for invalid status, got nil") + } + if !strings.Contains(err.Error(), "invalid value") { + t.Errorf("error should mention invalid value, got: %v", err) + } +} + +func TestRsvp_APIError(t *testing.T) { + f, _, _, reg := cmdutil.TestFactory(t, defaultConfig()) + + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/calendar/v4/calendars/primary/events/evt_rsvp1/reply", + Body: map[string]interface{}{ + "code": 190001, + "msg": "permission denied", + }, + }) + + err := mountAndRun(t, CalendarRsvp, []string{ + "+rsvp", + "--event-id", "evt_rsvp1", + "--rsvp-status", "decline", + "--as", "bot", + }, f, nil) + + if err == nil { + t.Fatal("expected error for API failure, got nil") + } +} + +func TestRsvp_RejectsDangerousChars(t *testing.T) { + f, _, _, _ := cmdutil.TestFactory(t, defaultConfig()) + + err := mountAndRun(t, CalendarRsvp, []string{ + "+rsvp", + "--event-id", "evt_rsvp1\u202e", + "--rsvp-status", "accept", + "--as", "bot", + }, f, nil) + + if err == nil { + t.Fatal("expected validation error for dangerous characters, got nil") + } + if !strings.Contains(err.Error(), "dangerous Unicode") && !strings.Contains(err.Error(), "control character") { + t.Errorf("error should mention dangerous input, got: %v", err) + } +} + +func TestRsvp_DryRun_TrimmedPrimaryCalendar(t *testing.T) { + f, stdout, _, _ := cmdutil.TestFactory(t, defaultConfig()) + + err := mountAndRun(t, CalendarRsvp, []string{ + "+rsvp", + "--calendar-id", " primary ", + "--event-id", "evt_rsvp1", + "--rsvp-status", "accept", + "--dry-run", + "--as", "bot", + }, f, stdout) + + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !strings.Contains(stdout.String(), `"calendar_id": "\u003cprimary\u003e"`) { + t.Errorf("dry-run should normalize primary calendar, got: %s", stdout.String()) + } +} + func TestSuggestion_Success(t *testing.T) { f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) reg.Register(&httpmock.Stub{ @@ -867,17 +979,17 @@ func TestResolveStartEnd_ExplicitValues(t *testing.T) { // Shortcuts() registration test // --------------------------------------------------------------------------- -func TestShortcuts_Returns4(t *testing.T) { +func TestShortcuts_Returns5(t *testing.T) { shortcuts := Shortcuts() - if len(shortcuts) != 4 { - t.Fatalf("expected 4 shortcuts, got %d", len(shortcuts)) + if len(shortcuts) != 5 { + t.Fatalf("expected 5 shortcuts, got %d", len(shortcuts)) } names := map[string]bool{} for _, s := range shortcuts { names[s.Command] = true } - for _, want := range []string{"+agenda", "+create", "+freebusy", "+suggestion"} { + for _, want := range []string{"+agenda", "+create", "+freebusy", "+rsvp", "+suggestion"} { if !names[want] { t.Errorf("missing shortcut %s", want) } diff --git a/shortcuts/calendar/shortcuts.go b/shortcuts/calendar/shortcuts.go index 5f2ca92b..aed4fe1c 100644 --- a/shortcuts/calendar/shortcuts.go +++ b/shortcuts/calendar/shortcuts.go @@ -11,6 +11,7 @@ func Shortcuts() []common.Shortcut { CalendarAgenda, CalendarCreate, CalendarFreebusy, + CalendarRsvp, CalendarSuggestion, } } diff --git a/shortcuts/common/runner.go b/shortcuts/common/runner.go index 1f1df8bc..51ec9baa 100644 --- a/shortcuts/common/runner.go +++ b/shortcuts/common/runner.go @@ -225,6 +225,20 @@ func (ctx *RuntimeContext) DoAPI(req *larkcore.ApiReq, opts ...larkcore.RequestO return ac.DoSDKRequest(ctx.ctx, req, ctx.As(), opts...) } +// DoAPIAsBot executes a raw Lark SDK request using bot identity (tenant access token), +// regardless of the current --as flag. Use this for bot-only APIs (e.g. image/file upload) +// that must be called with TAT even when the surrounding shortcut runs as user. +func (ctx *RuntimeContext) DoAPIAsBot(req *larkcore.ApiReq, opts ...larkcore.RequestOptionFunc) (*larkcore.ApiResp, error) { + ac, err := ctx.getAPIClient() + if err != nil { + return nil, err + } + if optFn := cmdutil.ShortcutHeaderOpts(ctx.ctx); optFn != nil { + opts = append(opts, optFn) + } + return ac.DoSDKRequest(ctx.ctx, req, core.AsBot, opts...) +} + type cancelOnCloseReadCloser struct { io.ReadCloser cancel context.CancelFunc @@ -418,7 +432,7 @@ func (ctx *RuntimeContext) IO() *cmdutil.IOStreams { // Out prints a success JSON envelope to stdout. func (ctx *RuntimeContext) Out(data interface{}, meta *output.Meta) { - env := output.Envelope{OK: true, Identity: string(ctx.As()), Data: data, Meta: meta} + env := output.Envelope{OK: true, Identity: string(ctx.As()), Data: data, Meta: meta, Notice: output.GetNotice()} b, _ := json.MarshalIndent(env, "", " ") fmt.Fprintln(ctx.IO().Out, string(b)) } diff --git a/shortcuts/drive/drive_export.go b/shortcuts/drive/drive_export.go new file mode 100644 index 00000000..edffcb04 --- /dev/null +++ b/shortcuts/drive/drive_export.go @@ -0,0 +1,245 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + "errors" + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/shortcuts/common" +) + +// DriveExport exports Drive-native documents to local files and falls back to +// a follow-up command when the async export task does not finish in time. +var DriveExport = common.Shortcut{ + Service: "drive", + Command: "+export", + Description: "Export a doc/docx/sheet/bitable to a local file with limited polling", + Risk: "read", + Scopes: []string{ + "docs:document.content:read", + "docs:document:export", + "drive:drive.metadata:readonly", + }, + AuthTypes: []string{"user", "bot"}, + Flags: []common.Flag{ + {Name: "token", Desc: "source document token", Required: true}, + {Name: "doc-type", Desc: "source document type: doc | docx | sheet | bitable", Required: true, Enum: []string{"doc", "docx", "sheet", "bitable"}}, + {Name: "file-extension", Desc: "export format: docx | pdf | xlsx | csv | markdown", Required: true, Enum: []string{"docx", "pdf", "xlsx", "csv", "markdown"}}, + {Name: "sub-id", Desc: "sub-table/sheet ID, required when exporting sheet/bitable as csv"}, + {Name: "output-dir", Default: ".", Desc: "local output directory (default: current directory)"}, + {Name: "overwrite", Type: "bool", Desc: "overwrite existing output file"}, + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + return validateDriveExportSpec(driveExportSpec{ + Token: runtime.Str("token"), + DocType: runtime.Str("doc-type"), + FileExtension: runtime.Str("file-extension"), + SubID: runtime.Str("sub-id"), + }) + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + spec := driveExportSpec{ + Token: runtime.Str("token"), + DocType: runtime.Str("doc-type"), + FileExtension: runtime.Str("file-extension"), + SubID: runtime.Str("sub-id"), + } + // Markdown export is a special case: docx markdown comes from docs content + // directly instead of the Drive export task API. + if spec.FileExtension == "markdown" { + return common.NewDryRunAPI(). + Desc("2-step orchestration: fetch docx markdown -> write local file"). + GET("/open-apis/docs/v1/content"). + Params(map[string]interface{}{ + "doc_token": spec.Token, + "doc_type": "docx", + "content_type": "markdown", + }) + } + + body := map[string]interface{}{ + "token": spec.Token, + "type": spec.DocType, + "file_extension": spec.FileExtension, + } + if strings.TrimSpace(spec.SubID) != "" { + body["sub_id"] = spec.SubID + } + + return common.NewDryRunAPI(). + Desc("3-step orchestration: create export task -> limited polling -> download file"). + POST("/open-apis/drive/v1/export_tasks"). + Body(body) + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + spec := driveExportSpec{ + Token: runtime.Str("token"), + DocType: runtime.Str("doc-type"), + FileExtension: runtime.Str("file-extension"), + SubID: runtime.Str("sub-id"), + } + outputDir := runtime.Str("output-dir") + overwrite := runtime.Bool("overwrite") + + // Markdown export bypasses the async export task and writes the fetched + // markdown content directly to disk. + if spec.FileExtension == "markdown" { + fmt.Fprintf(runtime.IO().ErrOut, "Exporting docx as markdown: %s\n", common.MaskToken(spec.Token)) + data, err := runtime.CallAPI( + "GET", + "/open-apis/docs/v1/content", + map[string]interface{}{ + "doc_token": spec.Token, + "doc_type": "docx", + "content_type": "markdown", + }, + nil, + ) + if err != nil { + return err + } + + // Prefer the remote title for the exported file name, but still fall + // back to the token if metadata is empty. + title, err := fetchDriveMetaTitle(runtime, spec.Token, spec.DocType) + if err != nil { + fmt.Fprintf(runtime.IO().ErrOut, "Title lookup failed, using token as filename: %v\n", err) + title = spec.Token + } + fileName := ensureExportFileExtension(sanitizeExportFileName(title, spec.Token), spec.FileExtension) + savedPath, err := saveContentToOutputDir(outputDir, fileName, []byte(common.GetString(data, "content")), overwrite) + if err != nil { + return err + } + + runtime.Out(map[string]interface{}{ + "token": spec.Token, + "doc_type": spec.DocType, + "file_extension": spec.FileExtension, + "file_name": filepath.Base(savedPath), + "saved_path": savedPath, + "size_bytes": len([]byte(common.GetString(data, "content"))), + }, nil) + return nil + } + + ticket, err := createDriveExportTask(runtime, spec) + if err != nil { + return err + } + fmt.Fprintf(runtime.IO().ErrOut, "Created export task: %s\n", ticket) + + var lastStatus driveExportStatus + var lastPollErr error + hasObservedStatus := false + // Keep the command responsive by polling for a bounded window. If the task + // is still running after that, return a resume command instead of blocking. + for attempt := 1; attempt <= driveExportPollAttempts; attempt++ { + if attempt > 1 { + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(driveExportPollInterval): + } + } + if err := ctx.Err(); err != nil { + return err + } + + status, err := getDriveExportStatus(runtime, spec.Token, ticket) + if err != nil { + // Treat polling failures as transient so short-lived backend hiccups + // do not immediately fail an otherwise healthy export task. + lastPollErr = err + fmt.Fprintf(runtime.IO().ErrOut, "Export status attempt %d/%d failed: %v\n", attempt, driveExportPollAttempts, err) + continue + } + lastStatus = status + hasObservedStatus = true + + if status.Ready() { + fmt.Fprintf(runtime.IO().ErrOut, "Export task completed: %s\n", common.MaskToken(status.FileToken)) + fileName := ensureExportFileExtension(sanitizeExportFileName(status.FileName, spec.Token), spec.FileExtension) + out, err := downloadDriveExportFile(ctx, runtime, status.FileToken, outputDir, fileName, overwrite) + if err != nil { + recoveryCommand := driveExportDownloadCommand(status.FileToken, fileName, outputDir, overwrite) + hint := fmt.Sprintf( + "the export artifact is already ready (ticket=%s, file_token=%s)\nretry download with: %s", + ticket, + status.FileToken, + recoveryCommand, + ) + var exitErr *output.ExitError + if errors.As(err, &exitErr) && exitErr.Detail != nil { + return output.ErrWithHint(exitErr.Code, exitErr.Detail.Type, exitErr.Detail.Message, hint) + } + return output.ErrWithHint(output.ExitAPI, "api_error", err.Error(), hint) + } + out["ticket"] = ticket + out["doc_type"] = spec.DocType + out["file_extension"] = spec.FileExtension + runtime.Out(out, nil) + return nil + } + + if status.Failed() { + msg := strings.TrimSpace(status.JobErrorMsg) + if msg == "" { + msg = status.StatusLabel() + } + return output.Errorf(output.ExitAPI, "api_error", "export task failed: %s (ticket=%s)", msg, ticket) + } + + fmt.Fprintf(runtime.IO().ErrOut, "Export status %d/%d: %s\n", attempt, driveExportPollAttempts, status.StatusLabel()) + } + + nextCommand := driveExportTaskResultCommand(ticket, spec.Token) + if !hasObservedStatus && lastPollErr != nil { + hint := fmt.Sprintf( + "the export task was created but every status poll failed (ticket=%s)\nretry status lookup with: %s", + ticket, + nextCommand, + ) + var exitErr *output.ExitError + if errors.As(lastPollErr, &exitErr) && exitErr.Detail != nil { + if strings.TrimSpace(exitErr.Detail.Hint) != "" { + hint = exitErr.Detail.Hint + "\n" + hint + } + return output.ErrWithHint(exitErr.Code, exitErr.Detail.Type, exitErr.Detail.Message, hint) + } + return output.ErrWithHint(output.ExitAPI, "api_error", lastPollErr.Error(), hint) + } + + failed := false + var jobStatus interface{} + jobStatusLabel := "unknown" + if hasObservedStatus { + failed = lastStatus.Failed() + jobStatus = lastStatus.JobStatus + jobStatusLabel = lastStatus.StatusLabel() + } + // Return the last observed status so callers can resume from a known task + // state instead of losing all progress information on timeout. + runtime.Out(map[string]interface{}{ + "ticket": ticket, + "token": spec.Token, + "doc_type": spec.DocType, + "file_extension": spec.FileExtension, + "ready": false, + "failed": failed, + "job_status": jobStatus, + "job_status_label": jobStatusLabel, + "timed_out": true, + "next_command": nextCommand, + }, nil) + fmt.Fprintf(runtime.IO().ErrOut, "Export task is still in progress. Continue with: %s\n", nextCommand) + return nil + }, +} diff --git a/shortcuts/drive/drive_export_common.go b/shortcuts/drive/drive_export_common.go new file mode 100644 index 00000000..02707a6c --- /dev/null +++ b/shortcuts/drive/drive_export_common.go @@ -0,0 +1,371 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + "fmt" + "net/http" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + larkcore "github.com/larksuite/oapi-sdk-go/v3/core" + + "github.com/larksuite/cli/internal/client" + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +var ( + driveExportPollAttempts = 10 + driveExportPollInterval = 5 * time.Second +) + +// driveExportSpec contains the normalized export request understood by the +// shortcut and the underlying export task APIs. +type driveExportSpec struct { + Token string + DocType string + FileExtension string + SubID string +} + +// driveExportTaskResultCommand prints the resume command shown when bounded +// export polling times out locally. +func driveExportTaskResultCommand(ticket, docToken string) string { + return fmt.Sprintf("lark-cli drive +task_result --scenario export --ticket %s --file-token %s", ticket, docToken) +} + +// driveExportDownloadCommand prints a copy-pasteable follow-up command for +// downloading an already-generated export artifact by file token. +func driveExportDownloadCommand(fileToken, fileName, outputDir string, overwrite bool) string { + parts := []string{ + "lark-cli", "drive", "+export-download", + "--file-token", strconv.Quote(fileToken), + } + if strings.TrimSpace(fileName) != "" { + parts = append(parts, "--file-name", strconv.Quote(fileName)) + } + if strings.TrimSpace(outputDir) != "" && outputDir != "." { + parts = append(parts, "--output-dir", strconv.Quote(outputDir)) + } + if overwrite { + parts = append(parts, "--overwrite") + } + return strings.Join(parts, " ") +} + +// driveExportStatus captures the fields needed to decide whether the export is +// ready for download, still pending, or terminally failed. +type driveExportStatus struct { + Ticket string + FileExtension string + DocType string + FileName string + FileToken string + JobErrorMsg string + FileSize int64 + JobStatus int +} + +func (s driveExportStatus) Ready() bool { + return s.FileToken != "" && s.JobStatus == 0 +} + +func (s driveExportStatus) Pending() bool { + // A zero status without a file token is still in progress because there is + // nothing downloadable yet. + return s.JobStatus == 1 || s.JobStatus == 2 || s.JobStatus == 0 && s.FileToken == "" +} + +func (s driveExportStatus) Failed() bool { + return !s.Ready() && !s.Pending() && s.JobStatus != 0 +} + +func (s driveExportStatus) StatusLabel() string { + switch s.JobStatus { + case 0: + // Success is a special case where the file token is set. + if s.FileToken != "" { + return "success" + } + return "pending" + case 1: + return "new" + case 2: + return "processing" + case 3: + return "internal_error" + case 107: + return "export_size_limit" + case 108: + return "timeout" + case 109: + return "export_block_not_permitted" + case 110: + return "no_permission" + case 111: + return "docs_deleted" + case 122: + return "export_denied_on_copying" + case 123: + return "docs_not_exist" + case 6000: + return "export_images_exceed_limit" + default: + return fmt.Sprintf("status_%d", s.JobStatus) + } +} + +// validateDriveExportSpec enforces shortcut-level export constraints before any +// backend request is sent. +func validateDriveExportSpec(spec driveExportSpec) error { + if err := validate.ResourceName(spec.Token, "--token"); err != nil { + return output.ErrValidation("%s", err) + } + + switch spec.DocType { + case "doc", "docx", "sheet", "bitable": + default: + return output.ErrValidation("invalid --doc-type %q: allowed values are doc, docx, sheet, bitable", spec.DocType) + } + + switch spec.FileExtension { + case "docx", "pdf", "xlsx", "csv", "markdown": + default: + return output.ErrValidation("invalid --file-extension %q: allowed values are docx, pdf, xlsx, csv, markdown", spec.FileExtension) + } + + if spec.FileExtension == "markdown" && spec.DocType != "docx" { + return output.ErrValidation("--file-extension markdown only supports --doc-type docx") + } + + if strings.TrimSpace(spec.SubID) != "" { + if spec.FileExtension != "csv" || (spec.DocType != "sheet" && spec.DocType != "bitable") { + return output.ErrValidation("--sub-id is only used when exporting sheet/bitable as csv") + } + if err := validate.ResourceName(spec.SubID, "--sub-id"); err != nil { + return output.ErrValidation("%s", err) + } + } + + if spec.FileExtension == "csv" && (spec.DocType == "sheet" || spec.DocType == "bitable") && strings.TrimSpace(spec.SubID) == "" { + return output.ErrValidation("--sub-id is required when exporting sheet/bitable as csv") + } + + return nil +} + +// createDriveExportTask starts the asynchronous export job and returns its +// ticket for subsequent polling. +func createDriveExportTask(runtime *common.RuntimeContext, spec driveExportSpec) (string, error) { + body := map[string]interface{}{ + "token": spec.Token, + "type": spec.DocType, + "file_extension": spec.FileExtension, + } + if strings.TrimSpace(spec.SubID) != "" { + body["sub_id"] = spec.SubID + } + + data, err := runtime.CallAPI("POST", "/open-apis/drive/v1/export_tasks", nil, body) + if err != nil { + return "", err + } + + ticket := common.GetString(data, "ticket") + if ticket == "" { + return "", output.Errorf(output.ExitAPI, "api_error", "export task created but ticket is missing") + } + return ticket, nil +} + +// getDriveExportStatus fetches the current backend state for a previously +// created export task. +func getDriveExportStatus(runtime *common.RuntimeContext, token, ticket string) (driveExportStatus, error) { + data, err := runtime.CallAPI( + "GET", + fmt.Sprintf("/open-apis/drive/v1/export_tasks/%s", validate.EncodePathSegment(ticket)), + map[string]interface{}{"token": token}, + nil, + ) + if err != nil { + return driveExportStatus{}, err + } + return parseDriveExportStatus(ticket, data), nil +} + +// parseDriveExportStatus accepts the wrapped export result and normalizes the +// subset of fields used by the shortcut. +func parseDriveExportStatus(ticket string, data map[string]interface{}) driveExportStatus { + result := common.GetMap(data, "result") + status := driveExportStatus{ + Ticket: ticket, + } + if result == nil { + // Keep the ticket even when the result body is missing so callers can + // still show a resumable task reference. + return status + } + + status.FileExtension = common.GetString(result, "file_extension") + status.DocType = common.GetString(result, "type") + status.FileName = common.GetString(result, "file_name") + status.FileToken = common.GetString(result, "file_token") + status.JobErrorMsg = common.GetString(result, "job_error_msg") + status.FileSize = int64(common.GetFloat(result, "file_size")) + status.JobStatus = int(common.GetFloat(result, "job_status")) + return status +} + +// fetchDriveMetaTitle looks up the document title so exported files can use a +// human-readable default name when possible. +func fetchDriveMetaTitle(runtime *common.RuntimeContext, token, docType string) (string, error) { + data, err := runtime.CallAPI( + "POST", + "/open-apis/drive/v1/metas/batch_query", + nil, + map[string]interface{}{ + "request_docs": []map[string]interface{}{ + { + "doc_token": token, + "doc_type": docType, + }, + }, + }, + ) + if err != nil { + return "", err + } + + metas := common.GetSlice(data, "metas") + if len(metas) == 0 { + return "", nil + } + meta, _ := metas[0].(map[string]interface{}) + return common.GetString(meta, "title"), nil +} + +// saveContentToOutputDir validates the target path, enforces overwrite policy, +// and writes the payload atomically to disk. +func saveContentToOutputDir(outputDir, fileName string, payload []byte, overwrite bool) (string, error) { + if outputDir == "" { + outputDir = "." + } + + // Sanitize both the filename and the combined output path so caller-provided + // names cannot escape the requested output directory. + safeName := sanitizeExportFileName(fileName, "export.bin") + target := filepath.Join(outputDir, safeName) + safePath, err := validate.SafeOutputPath(target) + if err != nil { + return "", output.ErrValidation("unsafe output path: %s", err) + } + if err := common.EnsureWritableFile(safePath, overwrite); err != nil { + return "", err + } + + if err := os.MkdirAll(filepath.Dir(safePath), 0755); err != nil { + return "", output.Errorf(output.ExitInternal, "io", "cannot create output directory: %s", err) + } + if err := validate.AtomicWrite(safePath, payload, 0644); err != nil { + return "", output.Errorf(output.ExitInternal, "io", "cannot write file: %s", err) + } + return safePath, nil +} + +// downloadDriveExportFile downloads the exported artifact, derives a safe local +// file name, and returns metadata about the saved file. +func downloadDriveExportFile(ctx context.Context, runtime *common.RuntimeContext, fileToken, outputDir, preferredName string, overwrite bool) (map[string]interface{}, error) { + if err := validate.ResourceName(fileToken, "--file-token"); err != nil { + return nil, output.ErrValidation("%s", err) + } + + apiResp, err := runtime.DoAPI(&larkcore.ApiReq{ + HttpMethod: http.MethodGet, + ApiPath: fmt.Sprintf("/open-apis/drive/v1/export_tasks/file/%s/download", validate.EncodePathSegment(fileToken)), + }, larkcore.WithFileDownload()) + if err != nil { + return nil, output.ErrNetwork("download failed: %s", err) + } + if apiResp.StatusCode >= 400 { + return nil, output.ErrNetwork("download failed: HTTP %d: %s", apiResp.StatusCode, string(apiResp.RawBody)) + } + + fileName := strings.TrimSpace(preferredName) + if fileName == "" { + // Fall back to the server-provided download name when the caller did not + // request an explicit local file name. + fileName = client.ResolveFilename(apiResp) + } + savedPath, err := saveContentToOutputDir(outputDir, fileName, apiResp.RawBody, overwrite) + if err != nil { + return nil, err + } + + return map[string]interface{}{ + "file_token": fileToken, + "file_name": filepath.Base(savedPath), + "saved_path": savedPath, + "size_bytes": len(apiResp.RawBody), + "content_type": apiResp.Header.Get("Content-Type"), + }, nil +} + +// sanitizeExportFileName strips path traversal and unsupported characters while +// preserving a readable file name when possible. +func sanitizeExportFileName(name, fallback string) string { + name = strings.TrimSpace(filepath.Base(name)) + if name == "" || name == "." || name == string(filepath.Separator) { + name = fallback + } + + replacer := strings.NewReplacer( + "/", "_", "\\", "_", ":", "_", "*", "_", "?", "_", + "\"", "_", "<", "_", ">", "_", "|", "_", + "\n", "_", "\r", "_", "\t", "_", "\x00", "_", + ) + name = replacer.Replace(name) + name = strings.Trim(name, ". ") + if name == "" { + return fallback + } + return name +} + +// ensureExportFileExtension appends the expected local suffix when the chosen +// file name does not already end with the export format's extension. +func ensureExportFileExtension(name, fileExtension string) string { + expected := exportFileSuffix(fileExtension) + if expected == "" { + return name + } + if strings.EqualFold(filepath.Ext(name), expected) { + return name + } + return name + expected +} + +// exportFileSuffix maps shortcut-level export formats to the local filename +// suffix written to disk. +func exportFileSuffix(fileExtension string) string { + switch fileExtension { + case "markdown": + return ".md" + case "docx": + return ".docx" + case "pdf": + return ".pdf" + case "xlsx": + return ".xlsx" + case "csv": + return ".csv" + default: + return "" + } +} diff --git a/shortcuts/drive/drive_export_common_test.go b/shortcuts/drive/drive_export_common_test.go new file mode 100644 index 00000000..39258cf6 --- /dev/null +++ b/shortcuts/drive/drive_export_common_test.go @@ -0,0 +1,67 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import "testing" + +func TestDriveExportStatusLabelCoversKnownAndUnknownCodes(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + status driveExportStatus + want string + }{ + { + name: "size limit", + status: driveExportStatus{JobStatus: 107}, + want: "export_size_limit", + }, + { + name: "not exist", + status: driveExportStatus{JobStatus: 123}, + want: "docs_not_exist", + }, + { + name: "unknown status", + status: driveExportStatus{JobStatus: 999}, + want: "status_999", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if got := tt.status.StatusLabel(); got != tt.want { + t.Fatalf("StatusLabel() = %q, want %q", got, tt.want) + } + }) + } +} + +func TestParseDriveExportStatusWithoutResultKeepsTicket(t *testing.T) { + t.Parallel() + + status := parseDriveExportStatus("ticket_export_test", map[string]interface{}{}) + if status.Ticket != "ticket_export_test" { + t.Fatalf("ticket = %q, want %q", status.Ticket, "ticket_export_test") + } + if status.FileToken != "" { + t.Fatalf("file token = %q, want empty", status.FileToken) + } +} + +func TestSanitizeExportFileNameAndEnsureExtension(t *testing.T) { + t.Parallel() + + if got := sanitizeExportFileName("../quarterly:report?.pdf", "fallback.bin"); got != "quarterly_report_.pdf" { + t.Fatalf("sanitizeExportFileName() = %q, want %q", got, "quarterly_report_.pdf") + } + if got := ensureExportFileExtension("meeting-notes", "markdown"); got != "meeting-notes.md" { + t.Fatalf("ensureExportFileExtension() = %q, want %q", got, "meeting-notes.md") + } + if got := ensureExportFileExtension("report.pdf", "pdf"); got != "report.pdf" { + t.Fatalf("ensureExportFileExtension() should preserve suffix, got %q", got) + } +} diff --git a/shortcuts/drive/drive_export_download.go b/shortcuts/drive/drive_export_download.go new file mode 100644 index 00000000..62ddd922 --- /dev/null +++ b/shortcuts/drive/drive_export_download.go @@ -0,0 +1,60 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +// DriveExportDownload downloads an already-generated export artifact when the +// caller has a file token from a previous export task. +var DriveExportDownload = common.Shortcut{ + Service: "drive", + Command: "+export-download", + Description: "Download an exported file by file_token", + Risk: "read", + Scopes: []string{ + "docs:document:export", + }, + AuthTypes: []string{"user", "bot"}, + Flags: []common.Flag{ + {Name: "file-token", Desc: "exported file token", Required: true}, + {Name: "file-name", Desc: "preferred output filename (optional)"}, + {Name: "output-dir", Default: ".", Desc: "local output directory (default: current directory)"}, + {Name: "overwrite", Type: "bool", Desc: "overwrite existing output file"}, + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + if err := validate.ResourceName(runtime.Str("file-token"), "--file-token"); err != nil { + return output.ErrValidation("%s", err) + } + return nil + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + return common.NewDryRunAPI(). + GET("/open-apis/drive/v1/export_tasks/file/:file_token/download"). + Set("file_token", runtime.Str("file-token")). + Set("output_dir", runtime.Str("output-dir")) + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + // Reuse the shared export download helper so overwrite checks, filename + // resolution, and output metadata stay consistent with drive +export. + out, err := downloadDriveExportFile( + ctx, + runtime, + runtime.Str("file-token"), + runtime.Str("output-dir"), + runtime.Str("file-name"), + runtime.Bool("overwrite"), + ) + if err != nil { + return err + } + runtime.Out(out, nil) + return nil + }, +} diff --git a/shortcuts/drive/drive_export_test.go b/shortcuts/drive/drive_export_test.go new file mode 100644 index 00000000..46ba0bfa --- /dev/null +++ b/shortcuts/drive/drive_export_test.go @@ -0,0 +1,516 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "bytes" + "errors" + "net/http" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/httpmock" + "github.com/larksuite/cli/internal/output" +) + +func TestValidateDriveExportSpec(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + spec driveExportSpec + wantErr string + }{ + { + name: "markdown docx ok", + spec: driveExportSpec{Token: "docx123", DocType: "docx", FileExtension: "markdown"}, + }, + { + name: "markdown non docx rejected", + spec: driveExportSpec{Token: "doc123", DocType: "doc", FileExtension: "markdown"}, + wantErr: "only supports --doc-type docx", + }, + { + name: "csv without sub id rejected", + spec: driveExportSpec{Token: "sheet123", DocType: "sheet", FileExtension: "csv"}, + wantErr: "--sub-id is required", + }, + { + name: "sub id on non csv rejected", + spec: driveExportSpec{Token: "docx123", DocType: "docx", FileExtension: "pdf", SubID: "tbl_1"}, + wantErr: "--sub-id is only used", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + err := validateDriveExportSpec(tt.spec) + if tt.wantErr == "" { + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + return + } + if err == nil || !strings.Contains(err.Error(), tt.wantErr) { + t.Fatalf("expected error containing %q, got %v", tt.wantErr, err) + } + }) + } +} + +func TestDriveExportMarkdownWritesFile(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/docs/v1/content", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "content": "# hello\n", + }, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/metas/batch_query", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "metas": []map[string]interface{}{ + {"title": "Weekly Notes"}, + }, + }, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + + err := mountAndRunDrive(t, DriveExport, []string{ + "+export", + "--token", "docx123", + "--doc-type", "docx", + "--file-extension", "markdown", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + data, err := os.ReadFile(filepath.Join(tmpDir, "Weekly Notes.md")) + if err != nil { + t.Fatalf("ReadFile() error: %v", err) + } + if string(data) != "# hello\n" { + t.Fatalf("markdown content = %q", string(data)) + } + if !strings.Contains(stdout.String(), "Weekly Notes.md") { + t.Fatalf("stdout missing file name: %s", stdout.String()) + } +} + +func TestDriveExportAsyncSuccess(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/export_tasks", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"ticket": "tk_123"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/tk_123", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "result": map[string]interface{}{ + "job_status": 0, + "file_token": "box_123", + "file_name": "report", + "file_extension": "pdf", + "type": "docx", + "file_size": 3, + }, + }, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/file/box_123/download", + Status: 200, + RawBody: []byte("pdf"), + Headers: http.Header{ + "Content-Type": []string{"application/pdf"}, + "Content-Disposition": []string{`attachment; filename="report.pdf"`}, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + + prevAttempts, prevInterval := driveExportPollAttempts, driveExportPollInterval + driveExportPollAttempts, driveExportPollInterval = 1, 0 + t.Cleanup(func() { + driveExportPollAttempts, driveExportPollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveExport, []string{ + "+export", + "--token", "docx123", + "--doc-type", "docx", + "--file-extension", "pdf", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + data, err := os.ReadFile(filepath.Join(tmpDir, "report.pdf")) + if err != nil { + t.Fatalf("ReadFile() error: %v", err) + } + if string(data) != "pdf" { + t.Fatalf("downloaded content = %q", string(data)) + } + if !strings.Contains(stdout.String(), `"ticket": "tk_123"`) { + t.Fatalf("stdout missing ticket: %s", stdout.String()) + } +} + +func TestDriveExportReadyDownloadFailureIncludesRecoveryHint(t *testing.T) { + f, _, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/export_tasks", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"ticket": "tk_ready"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/tk_ready", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "result": map[string]interface{}{ + "job_status": 0, + "file_token": "box_ready", + "file_name": "report", + "file_extension": "pdf", + "type": "docx", + }, + }, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/file/box_ready/download", + Status: 200, + RawBody: []byte("pdf"), + Headers: http.Header{ + "Content-Type": []string{"application/pdf"}, + "Content-Disposition": []string{`attachment; filename="report.pdf"`}, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + if err := os.WriteFile(filepath.Join(tmpDir, "report.pdf"), []byte("old"), 0644); err != nil { + t.Fatalf("WriteFile() error: %v", err) + } + + prevAttempts, prevInterval := driveExportPollAttempts, driveExportPollInterval + driveExportPollAttempts, driveExportPollInterval = 1, 0 + t.Cleanup(func() { + driveExportPollAttempts, driveExportPollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveExport, []string{ + "+export", + "--token", "docx123", + "--doc-type", "docx", + "--file-extension", "pdf", + "--as", "bot", + }, f, nil) + if err == nil { + t.Fatal("expected download recovery error, got nil") + } + + var exitErr *output.ExitError + if !errors.As(err, &exitErr) || exitErr.Detail == nil { + t.Fatalf("expected structured exit error, got %v", err) + } + if !strings.Contains(exitErr.Detail.Message, "already exists") { + t.Fatalf("message missing overwrite guidance: %q", exitErr.Detail.Message) + } + if !strings.Contains(exitErr.Detail.Hint, "ticket=tk_ready") { + t.Fatalf("hint missing ticket: %q", exitErr.Detail.Hint) + } + if !strings.Contains(exitErr.Detail.Hint, "file_token=box_ready") { + t.Fatalf("hint missing file token: %q", exitErr.Detail.Hint) + } + if !strings.Contains(exitErr.Detail.Hint, `lark-cli drive +export-download --file-token "box_ready" --file-name "report.pdf"`) { + t.Fatalf("hint missing recovery command: %q", exitErr.Detail.Hint) + } +} + +func TestDriveExportTimeoutReturnsFollowUpCommand(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/export_tasks", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"ticket": "tk_456"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/tk_456", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "result": map[string]interface{}{ + "job_status": 2, + }, + }, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + + prevAttempts, prevInterval := driveExportPollAttempts, driveExportPollInterval + driveExportPollAttempts, driveExportPollInterval = 1, 0 + t.Cleanup(func() { + driveExportPollAttempts, driveExportPollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveExport, []string{ + "+export", + "--token", "docx123", + "--doc-type", "docx", + "--file-extension", "pdf", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !strings.Contains(stdout.String(), `"ticket": "tk_456"`) { + t.Fatalf("stdout missing ticket: %s", stdout.String()) + } + if !strings.Contains(stdout.String(), `"timed_out": true`) { + t.Fatalf("stdout missing timed_out=true: %s", stdout.String()) + } + if !strings.Contains(stdout.String(), `"failed": false`) { + t.Fatalf("stdout missing failed=false: %s", stdout.String()) + } + if !strings.Contains(stdout.String(), `"job_status": 2`) { + t.Fatalf("stdout missing numeric job_status: %s", stdout.String()) + } + if !strings.Contains(stdout.String(), `"job_status_label": "processing"`) { + t.Fatalf("stdout missing processing job_status_label: %s", stdout.String()) + } + if !strings.Contains(stdout.String(), `"next_command": "lark-cli drive +task_result --scenario export --ticket tk_456 --file-token docx123"`) { + t.Fatalf("stdout missing follow-up command: %s", stdout.String()) + } + if _, err := os.Stat(filepath.Join(tmpDir, "report.pdf")); !os.IsNotExist(err) { + t.Fatalf("unexpected downloaded file, err=%v", err) + } +} + +func TestDriveExportPollErrorsReturnLastErrorWithRecoveryHint(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/export_tasks", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"ticket": "tk_poll_fail"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/tk_poll_fail", + Status: 500, + Body: map[string]interface{}{ + "code": 999, + "msg": "temporary backend failure", + }, + }) + + prevAttempts, prevInterval := driveExportPollAttempts, driveExportPollInterval + driveExportPollAttempts, driveExportPollInterval = 1, 0 + t.Cleanup(func() { + driveExportPollAttempts, driveExportPollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveExport, []string{ + "+export", + "--token", "docx123", + "--doc-type", "docx", + "--file-extension", "pdf", + "--as", "bot", + }, f, stdout) + if err == nil { + t.Fatal("expected persistent poll error, got nil") + } + if stdout.Len() != 0 { + t.Fatalf("stdout should stay empty on persistent poll error: %s", stdout.String()) + } + + var exitErr *output.ExitError + if !errors.As(err, &exitErr) || exitErr.Detail == nil { + t.Fatalf("expected structured exit error, got %v", err) + } + if !strings.Contains(exitErr.Detail.Message, "temporary backend failure") { + t.Fatalf("message missing last poll error: %q", exitErr.Detail.Message) + } + if !strings.Contains(exitErr.Detail.Hint, "ticket=tk_poll_fail") { + t.Fatalf("hint missing ticket: %q", exitErr.Detail.Hint) + } + if !strings.Contains(exitErr.Detail.Hint, "lark-cli drive +task_result --scenario export --ticket tk_poll_fail --file-token docx123") { + t.Fatalf("hint missing recovery command: %q", exitErr.Detail.Hint) + } +} + +func TestDriveExportDownloadUsesProvidedFileName(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/file/box_789/download", + Status: 200, + RawBody: []byte("csv"), + Headers: http.Header{ + "Content-Type": []string{"text/csv"}, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + + err := mountAndRunDrive(t, DriveExportDownload, []string{ + "+export-download", + "--file-token", "box_789", + "--file-name", "custom.csv", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + data, err := os.ReadFile(filepath.Join(tmpDir, "custom.csv")) + if err != nil { + t.Fatalf("ReadFile() error: %v", err) + } + if string(data) != "csv" { + t.Fatalf("downloaded content = %q", string(data)) + } +} + +func TestDriveExportDownloadRejectsOverwriteWithoutFlag(t *testing.T) { + f, _, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/file/box_dup/download", + Status: 200, + RawBody: []byte("new"), + Headers: http.Header{ + "Content-Type": []string{"application/pdf"}, + "Content-Disposition": []string{`attachment; filename="dup.pdf"`}, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + if err := os.WriteFile("dup.pdf", []byte("old"), 0644); err != nil { + t.Fatalf("WriteFile() error: %v", err) + } + + err := mountAndRunDrive(t, DriveExportDownload, []string{ + "+export-download", + "--file-token", "box_dup", + "--as", "bot", + }, f, nil) + if err == nil { + t.Fatal("expected overwrite protection error, got nil") + } + if !strings.Contains(err.Error(), "already exists") { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestSaveContentToOutputDirRejectsOverwriteWithoutFlag(t *testing.T) { + + tmpDir := t.TempDir() + target := filepath.Join(tmpDir, "exists.txt") + if err := os.WriteFile(target, []byte("old"), 0644); err != nil { + t.Fatalf("WriteFile() error: %v", err) + } + + cwd, err := os.Getwd() + if err != nil { + t.Fatalf("Getwd() error: %v", err) + } + if err := os.Chdir(tmpDir); err != nil { + t.Fatalf("Chdir() error: %v", err) + } + t.Cleanup(func() { _ = os.Chdir(cwd) }) + + _, err = saveContentToOutputDir(".", "exists.txt", []byte("new"), false) + if err == nil || !strings.Contains(err.Error(), "already exists") { + t.Fatalf("expected overwrite error, got %v", err) + } +} + +func TestDriveTaskResultExportIncludesReadyFlags(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/export_tasks/tk_export", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "result": map[string]interface{}{ + "job_status": 2, + }, + }, + }, + }) + + err := mountAndRunDrive(t, DriveTaskResult, []string{ + "+task_result", + "--scenario", "export", + "--ticket", "tk_export", + "--file-token", "docx123", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"ready": false`)) { + t.Fatalf("stdout missing ready=false: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"failed": false`)) { + t.Fatalf("stdout missing failed=false: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"job_status_label": "processing"`)) { + t.Fatalf("stdout missing job_status_label: %s", stdout.String()) + } +} diff --git a/shortcuts/drive/drive_import.go b/shortcuts/drive/drive_import.go new file mode 100644 index 00000000..745be274 --- /dev/null +++ b/shortcuts/drive/drive_import.go @@ -0,0 +1,246 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + "path/filepath" + "strings" + + larkcore "github.com/larksuite/oapi-sdk-go/v3/core" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +// DriveImport uploads a local file, creates an import task, and polls until +// the imported cloud document is ready or the local polling window expires. +var DriveImport = common.Shortcut{ + Service: "drive", + Command: "+import", + Description: "Import a local file to Drive as a cloud document (docx, sheet, bitable)", + Risk: "write", + Scopes: []string{ + "docs:document.media:upload", + "docs:document:import", + }, + AuthTypes: []string{"user", "bot"}, + Flags: []common.Flag{ + {Name: "file", Desc: "local file path (e.g. .docx, .xlsx, .md)", Required: true}, + {Name: "type", Desc: "target document type (docx, sheet, bitable)", Required: true}, + {Name: "folder-token", Desc: "target folder token (omit for root folder; API accepts empty mount_key as root)"}, + {Name: "name", Desc: "imported file name (default: local file name without extension)"}, + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + return validateDriveImportSpec(driveImportSpec{ + FilePath: runtime.Str("file"), + DocType: strings.ToLower(runtime.Str("type")), + FolderToken: runtime.Str("folder-token"), + Name: runtime.Str("name"), + }) + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + spec := driveImportSpec{ + FilePath: runtime.Str("file"), + DocType: strings.ToLower(runtime.Str("type")), + FolderToken: runtime.Str("folder-token"), + Name: runtime.Str("name"), + } + + dry := common.NewDryRunAPI() + dry.Desc("3-step orchestration: upload file -> create import task -> poll status") + + dry.POST("/open-apis/drive/v1/medias/upload_all"). + Desc("[1] Upload file to get file_token"). + Body(map[string]interface{}{ + "file_name": spec.SourceFileName(), + "parent_type": "ccm_import_open", + "size": "", + "extra": fmt.Sprintf(`{"obj_type":"%s","file_extension":"%s"}`, spec.DocType, spec.FileExtension()), + "file": "@" + spec.FilePath, + }) + + dry.POST("/open-apis/drive/v1/import_tasks"). + Desc("[2] Create import task"). + Body(spec.CreateTaskBody("")) + + dry.GET("/open-apis/drive/v1/import_tasks/:ticket"). + Desc("[3] Poll import task result"). + Set("ticket", "") + + return dry + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + spec := driveImportSpec{ + FilePath: runtime.Str("file"), + DocType: strings.ToLower(runtime.Str("type")), + FolderToken: runtime.Str("folder-token"), + Name: runtime.Str("name"), + } + + // Normalize and validate the local input path before opening the file. + safeFilePath, err := validate.SafeInputPath(spec.FilePath) + if err != nil { + return output.ErrValidation("unsafe file path: %s", err) + } + spec.FilePath = safeFilePath + + // Step 1: Upload file as media + fileToken, uploadErr := uploadMediaForImport(ctx, runtime, spec.FilePath, spec.SourceFileName(), spec.DocType) + if uploadErr != nil { + return uploadErr + } + + fmt.Fprintf(runtime.IO().ErrOut, "Creating import task for %s as %s...\n", spec.TargetFileName(), spec.DocType) + + // Step 2: Create import task + ticket, err := createDriveImportTask(runtime, spec, fileToken) + if err != nil { + return err + } + + // Step 3: Poll task + fmt.Fprintf(runtime.IO().ErrOut, "Polling import task %s...\n", ticket) + + status, ready, err := pollDriveImportTask(runtime, ticket) + if err != nil { + return err + } + + // Some intermediate responses omit the final type, so fall back to the + // requested type to keep the output shape stable. + resultType := status.DocType + if resultType == "" { + resultType = spec.DocType + } + out := map[string]interface{}{ + "ticket": ticket, + "type": resultType, + "ready": ready, + "job_status": status.JobStatus, + "job_status_label": status.StatusLabel(), + } + if status.Token != "" { + out["token"] = status.Token + } + if status.URL != "" { + out["url"] = status.URL + } + if status.JobErrorMsg != "" { + out["job_error_msg"] = status.JobErrorMsg + } + if status.Extra != nil { + out["extra"] = status.Extra + } + if !ready { + nextCommand := driveImportTaskResultCommand(ticket) + fmt.Fprintf(runtime.IO().ErrOut, "Import task is still in progress. Continue with: %s\n", nextCommand) + out["timed_out"] = true + out["next_command"] = nextCommand + } + + runtime.Out(out, nil) + return nil + }, +} + +// importTargetFileName returns the explicit import name when present, otherwise +// derives one from the local file name. +func importTargetFileName(filePath, explicitName string) string { + if explicitName != "" { + return explicitName + } + return importDefaultFileName(filePath) +} + +// importDefaultFileName strips only the last extension so names like +// "report.final.csv" become "report.final". +func importDefaultFileName(filePath string) string { + base := filepath.Base(filePath) + ext := filepath.Ext(base) + if ext == "" { + return base + } + name := strings.TrimSuffix(base, ext) + if name == "" { + return base + } + return name +} + +// uploadMediaForImport uploads the source file to the temporary import media +// endpoint and returns the file token consumed by import_tasks. +func uploadMediaForImport(ctx context.Context, runtime *common.RuntimeContext, filePath, fileName, docType string) (string, error) { + importInfo, err := os.Stat(filePath) + if err != nil { + return "", output.ErrValidation("cannot read file: %s", err) + } + fileSize := importInfo.Size() + if fileSize > maxDriveUploadFileSize { + return "", output.ErrValidation("file %.1fMB exceeds 20MB limit", float64(fileSize)/1024/1024) + } + + fmt.Fprintf(runtime.IO().ErrOut, "Uploading media for import: %s (%s)\n", fileName, common.FormatSize(fileSize)) + + f, err := os.Open(filePath) + if err != nil { + return "", err + } + defer f.Close() + + ext := strings.TrimPrefix(strings.ToLower(filepath.Ext(filePath)), ".") + extraMap := map[string]string{ + "obj_type": docType, + "file_extension": ext, + } + extraBytes, _ := json.Marshal(extraMap) + + // Build SDK Formdata + fd := larkcore.NewFormdata() + fd.AddField("file_name", fileName) + fd.AddField("parent_type", "ccm_import_open") + fd.AddField("size", fmt.Sprintf("%d", fileSize)) + fd.AddField("extra", string(extraBytes)) + fd.AddFile("file", f) + + apiResp, err := runtime.DoAPI(&larkcore.ApiReq{ + HttpMethod: http.MethodPost, + ApiPath: "/open-apis/drive/v1/medias/upload_all", + Body: fd, + }, larkcore.WithFileUpload()) + if err != nil { + var exitErr *output.ExitError + if errors.As(err, &exitErr) { + // Preserve already-classified CLI errors from lower layers instead of + // wrapping them as a generic network failure. + return "", err + } + return "", output.ErrNetwork("upload media failed: %v", err) + } + + var result map[string]interface{} + if err := json.Unmarshal(apiResp.RawBody, &result); err != nil { + return "", output.Errorf(output.ExitAPI, "api_error", "upload media failed: invalid response JSON: %v", err) + } + + if larkCode := int(common.GetFloat(result, "code")); larkCode != 0 { + // Surface the backend error body so callers can see import-specific + // validation failures such as unsupported formats or permission issues. + msg, _ := result["msg"].(string) + return "", output.ErrAPI(larkCode, fmt.Sprintf("upload media failed: [%d] %s", larkCode, msg), result["error"]) + } + + data, _ := result["data"].(map[string]interface{}) + fileToken, _ := data["file_token"].(string) + if fileToken == "" { + return "", output.Errorf(output.ExitAPI, "api_error", "upload media failed: no file_token returned") + } + return fileToken, nil +} diff --git a/shortcuts/drive/drive_import_common.go b/shortcuts/drive/drive_import_common.go new file mode 100644 index 00000000..34eb7bf7 --- /dev/null +++ b/shortcuts/drive/drive_import_common.go @@ -0,0 +1,263 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +var ( + driveImportPollAttempts = 30 + driveImportPollInterval = 2 * time.Second +) + +// driveImportExtToDocTypes defines which source file extensions can be imported +// into which Drive-native document types. +var driveImportExtToDocTypes = map[string][]string{ + "docx": {"docx"}, + "doc": {"docx"}, + "txt": {"docx"}, + "md": {"docx"}, + "mark": {"docx"}, + "markdown": {"docx"}, + "html": {"docx"}, + "xlsx": {"sheet", "bitable"}, + "xls": {"sheet", "bitable"}, + "csv": {"sheet", "bitable"}, +} + +// driveImportSpec contains the user-facing import inputs after normalization. +type driveImportSpec struct { + FilePath string + DocType string + FolderToken string + Name string +} + +func (s driveImportSpec) FileExtension() string { + return strings.TrimPrefix(strings.ToLower(filepath.Ext(s.FilePath)), ".") +} + +func (s driveImportSpec) SourceFileName() string { + return filepath.Base(s.FilePath) +} + +func (s driveImportSpec) TargetFileName() string { + return importTargetFileName(s.FilePath, s.Name) +} + +// CreateTaskBody builds the request body expected by /drive/v1/import_tasks. +func (s driveImportSpec) CreateTaskBody(fileToken string) map[string]interface{} { + return map[string]interface{}{ + "file_extension": s.FileExtension(), + "file_token": fileToken, + "type": s.DocType, + "file_name": s.TargetFileName(), + "point": map[string]interface{}{ + "mount_type": 1, + // The import API treats an empty mount_key as "use the caller's root + // folder", so preserve the zero value when --folder-token is omitted. + "mount_key": s.FolderToken, + }, + } +} + +// validateDriveImportSpec enforces the CLI-level compatibility rules before any +// upload or import request is sent to the backend. +func validateDriveImportSpec(spec driveImportSpec) error { + ext := spec.FileExtension() + if ext == "" { + return output.ErrValidation("file must have an extension (e.g. .md, .docx, .xlsx)") + } + + switch spec.DocType { + case "docx", "sheet", "bitable": + default: + return output.ErrValidation("unsupported target document type: %s. Supported types are: docx, sheet, bitable", spec.DocType) + } + + supportedTypes, ok := driveImportExtToDocTypes[ext] + if !ok { + return output.ErrValidation("unsupported file extension: %s. Supported extensions are: docx, doc, txt, md, mark, markdown, html, xlsx, xls, csv", ext) + } + + typeAllowed := false + // Validate the extension/type pair locally so users get a precise error + // before the file upload step. + for _, allowedType := range supportedTypes { + if allowedType == spec.DocType { + typeAllowed = true + break + } + } + if !typeAllowed { + var hint string + switch ext { + case "xlsx", "xls", "csv": + hint = fmt.Sprintf(".%s files can only be imported as 'sheet' or 'bitable', not '%s'", ext, spec.DocType) + default: + hint = fmt.Sprintf(".%s files can only be imported as 'docx', not '%s'", ext, spec.DocType) + } + return output.ErrValidation("file type mismatch: %s", hint) + } + + if strings.TrimSpace(spec.FolderToken) != "" { + if err := validate.ResourceName(spec.FolderToken, "--folder-token"); err != nil { + return output.ErrValidation("%s", err) + } + } + + return nil +} + +// driveImportStatus captures the backend fields needed to decide whether the +// import can be surfaced immediately or requires a follow-up poll. +type driveImportStatus struct { + Ticket string + DocType string + Token string + URL string + JobErrorMsg string + Extra interface{} + JobStatus int +} + +func (s driveImportStatus) Ready() bool { + return s.Token != "" && s.JobStatus == 0 +} + +func (s driveImportStatus) Pending() bool { + return s.JobStatus == 1 || s.JobStatus == 2 || (s.JobStatus == 0 && s.Token == "") +} + +func (s driveImportStatus) Failed() bool { + return !s.Ready() && !s.Pending() && s.JobStatus != 0 +} + +func (s driveImportStatus) StatusLabel() string { + switch s.JobStatus { + case 0: + // Some responses report status=0 before the imported token is materialized. + // Treat that intermediate state as pending rather than completed. + if s.Token == "" { + return "pending" + } + return "success" + case 1: + return "new" + case 2: + return "processing" + default: + return fmt.Sprintf("status_%d", s.JobStatus) + } +} + +// driveImportTaskResultCommand prints the resume command returned after bounded +// polling times out locally. +func driveImportTaskResultCommand(ticket string) string { + return fmt.Sprintf("lark-cli drive +task_result --scenario import --ticket %s", ticket) +} + +// createDriveImportTask creates the server-side import task after the media +// upload has produced a reusable file token. +func createDriveImportTask(runtime *common.RuntimeContext, spec driveImportSpec, fileToken string) (string, error) { + data, err := runtime.CallAPI("POST", "/open-apis/drive/v1/import_tasks", nil, spec.CreateTaskBody(fileToken)) + if err != nil { + return "", err + } + + ticket := common.GetString(data, "ticket") + if ticket == "" { + return "", output.Errorf(output.ExitAPI, "api_error", "no ticket returned from import_tasks") + } + return ticket, nil +} + +// getDriveImportStatus fetches the current state of an import task by ticket. +func getDriveImportStatus(runtime *common.RuntimeContext, ticket string) (driveImportStatus, error) { + if err := validate.ResourceName(ticket, "--ticket"); err != nil { + return driveImportStatus{}, output.ErrValidation("%s", err) + } + + data, err := runtime.CallAPI( + "GET", + fmt.Sprintf("/open-apis/drive/v1/import_tasks/%s", validate.EncodePathSegment(ticket)), + nil, + nil, + ) + if err != nil { + return driveImportStatus{}, err + } + + return parseDriveImportStatus(ticket, data), nil +} + +// parseDriveImportStatus accepts either the wrapped API response or an already +// extracted result object to keep the helper easy to test. +func parseDriveImportStatus(ticket string, data map[string]interface{}) driveImportStatus { + result := common.GetMap(data, "result") + if result == nil { + // Some tests and helper call sites already pass the unwrapped result body. + result = data + } + + return driveImportStatus{ + Ticket: ticket, + DocType: common.GetString(result, "type"), + Token: common.GetString(result, "token"), + URL: common.GetString(result, "url"), + JobErrorMsg: common.GetString(result, "job_error_msg"), + Extra: result["extra"], + JobStatus: int(common.GetFloat(result, "job_status")), + } +} + +// pollDriveImportTask waits for the import to finish within a bounded window +// and returns the last observed status for resume-on-timeout flows. +func pollDriveImportTask(runtime *common.RuntimeContext, ticket string) (driveImportStatus, bool, error) { + lastStatus := driveImportStatus{Ticket: ticket} + var lastErr error + hadSuccessfulPoll := false + for attempt := 1; attempt <= driveImportPollAttempts; attempt++ { + if attempt > 1 { + time.Sleep(driveImportPollInterval) + } + + status, err := getDriveImportStatus(runtime, ticket) + if err != nil { + lastErr = err + // Log the error but continue polling. + fmt.Fprintf(runtime.IO().ErrOut, "Import status attempt %d/%d failed: %v\n", attempt, driveImportPollAttempts, err) + continue + } + lastStatus = status + hadSuccessfulPoll = true + + // Stop immediately on terminal states and otherwise return the last known + // status so the caller can expose a follow-up command on timeout. + if status.Ready() { + fmt.Fprintf(runtime.IO().ErrOut, "Import completed successfully.\n") + return status, true, nil + } + if status.Failed() { + msg := strings.TrimSpace(status.JobErrorMsg) + if msg == "" { + msg = status.StatusLabel() + } + return status, false, output.Errorf(output.ExitAPI, "api_error", "import failed with status %d: %s", status.JobStatus, msg) + } + } + if !hadSuccessfulPoll && lastErr != nil { + return lastStatus, false, lastErr + } + + return lastStatus, false, nil +} diff --git a/shortcuts/drive/drive_import_common_test.go b/shortcuts/drive/drive_import_common_test.go new file mode 100644 index 00000000..83ea1c1c --- /dev/null +++ b/shortcuts/drive/drive_import_common_test.go @@ -0,0 +1,131 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "bytes" + "os" + "strings" + "testing" + + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/httpmock" +) + +func TestValidateDriveImportSpecRejectsMismatchedType(t *testing.T) { + t.Parallel() + + err := validateDriveImportSpec(driveImportSpec{ + FilePath: "./data.xlsx", + DocType: "docx", + }) + if err == nil || !strings.Contains(err.Error(), "file type mismatch") { + t.Fatalf("expected file type mismatch error, got %v", err) + } +} + +func TestParseDriveImportStatus(t *testing.T) { + t.Parallel() + + status := parseDriveImportStatus("tk_123", map[string]interface{}{ + "result": map[string]interface{}{ + "type": "sheet", + "job_status": 0, + "job_error_msg": "", + "token": "sheet_123", + "url": "https://example.com/sheets/sheet_123", + "extra": []interface{}{"2000"}, + }, + }) + + if !status.Ready() { + t.Fatal("expected import status to be ready") + } + if status.StatusLabel() != "success" { + t.Fatalf("status label = %q, want %q", status.StatusLabel(), "success") + } + if status.Token != "sheet_123" { + t.Fatalf("token = %q, want %q", status.Token, "sheet_123") + } +} + +func TestDriveImportStatusPendingWithoutToken(t *testing.T) { + t.Parallel() + + status := driveImportStatus{JobStatus: 0} + if status.Ready() { + t.Fatal("expected status without token to be not ready") + } + if !status.Pending() { + t.Fatal("expected status without token to be pending") + } + if got := status.StatusLabel(); got != "pending" { + t.Fatalf("StatusLabel() = %q, want %q", got, "pending") + } +} + +func TestDriveImportTimeoutReturnsFollowUpCommand(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/medias/upload_all", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"file_token": "file_123"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/import_tasks", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"ticket": "tk_import"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/import_tasks/tk_import", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "result": map[string]interface{}{ + "type": "sheet", + "job_status": 2, + }, + }, + }, + }) + + tmpDir := t.TempDir() + withDriveWorkingDir(t, tmpDir) + if err := os.WriteFile("data.xlsx", []byte("fake-xlsx"), 0644); err != nil { + t.Fatalf("WriteFile() error: %v", err) + } + + prevAttempts, prevInterval := driveImportPollAttempts, driveImportPollInterval + driveImportPollAttempts, driveImportPollInterval = 1, 0 + t.Cleanup(func() { + driveImportPollAttempts, driveImportPollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveImport, []string{ + "+import", + "--file", "data.xlsx", + "--type", "sheet", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"ready": false`)) { + t.Fatalf("stdout missing ready=false: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"timed_out": true`)) { + t.Fatalf("stdout missing timed_out=true: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"next_command": "lark-cli drive +task_result --scenario import --ticket tk_import"`)) { + t.Fatalf("stdout missing follow-up command: %s", stdout.String()) + } +} diff --git a/shortcuts/drive/drive_import_test.go b/shortcuts/drive/drive_import_test.go new file mode 100644 index 00000000..ee4cbea6 --- /dev/null +++ b/shortcuts/drive/drive_import_test.go @@ -0,0 +1,155 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + "encoding/json" + "testing" + + "github.com/spf13/cobra" + + "github.com/larksuite/cli/shortcuts/common" +) + +func TestImportDefaultFileName(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + filePath string + want string + }{ + { + name: "strip xlsx extension", + filePath: "/tmp/base-import.xlsx", + want: "base-import", + }, + { + name: "strip last extension only", + filePath: "/tmp/report.final.csv", + want: "report.final", + }, + { + name: "keep name without extension", + filePath: "/tmp/README", + want: "README", + }, + { + name: "keep hidden file name when trim would be empty", + filePath: "/tmp/.env", + want: ".env", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if got := importDefaultFileName(tt.filePath); got != tt.want { + t.Fatalf("importDefaultFileName(%q) = %q, want %q", tt.filePath, got, tt.want) + } + }) + } +} + +func TestImportTargetFileName(t *testing.T) { + t.Parallel() + + if got := importTargetFileName("/tmp/base-import.xlsx", "custom-name.xlsx"); got != "custom-name.xlsx" { + t.Fatalf("explicit name should win, got %q", got) + } + if got := importTargetFileName("/tmp/base-import.xlsx", ""); got != "base-import" { + t.Fatalf("default import name = %q, want %q", got, "base-import") + } +} + +func TestDriveImportDryRunUsesExtensionlessDefaultName(t *testing.T) { + t.Parallel() + + cmd := &cobra.Command{Use: "drive +import"} + cmd.Flags().String("file", "", "") + cmd.Flags().String("type", "", "") + cmd.Flags().String("folder-token", "", "") + cmd.Flags().String("name", "", "") + if err := cmd.Flags().Set("file", "./base-import.xlsx"); err != nil { + t.Fatalf("set --file: %v", err) + } + if err := cmd.Flags().Set("type", "bitable"); err != nil { + t.Fatalf("set --type: %v", err) + } + if err := cmd.Flags().Set("folder-token", "fld_test"); err != nil { + t.Fatalf("set --folder-token: %v", err) + } + + runtime := common.TestNewRuntimeContext(cmd, nil) + dry := DriveImport.DryRun(context.Background(), runtime) + if dry == nil { + t.Fatal("DryRun returned nil") + } + + data, err := json.Marshal(dry) + if err != nil { + t.Fatalf("marshal dry run: %v", err) + } + + var got struct { + API []struct { + Body map[string]interface{} `json:"body"` + } `json:"api"` + } + if err := json.Unmarshal(data, &got); err != nil { + t.Fatalf("unmarshal dry run json: %v", err) + } + if len(got.API) != 3 { + t.Fatalf("expected 3 API calls, got %d", len(got.API)) + } + + uploadName, _ := got.API[0].Body["file_name"].(string) + if uploadName != "base-import.xlsx" { + t.Fatalf("upload file_name = %q, want %q", uploadName, "base-import.xlsx") + } + + importName, _ := got.API[1].Body["file_name"].(string) + if importName != "base-import" { + t.Fatalf("import task file_name = %q, want %q", importName, "base-import") + } +} + +func TestDriveImportCreateTaskBodyKeepsEmptyMountKeyForRoot(t *testing.T) { + t.Parallel() + + spec := driveImportSpec{ + FilePath: "/tmp/README.md", + DocType: "docx", + } + + body := spec.CreateTaskBody("file_token_test") + point, ok := body["point"].(map[string]interface{}) + if !ok { + t.Fatalf("point = %#v, want map", body["point"]) + } + + raw, exists := point["mount_key"] + if !exists { + t.Fatal("mount_key missing; want empty string for root import") + } + got, ok := raw.(string) + if !ok { + t.Fatalf("mount_key type = %T, want string", raw) + } + if got != "" { + t.Fatalf("mount_key = %q, want empty string for root import", got) + } + + spec.FolderToken = "fld_test" + body = spec.CreateTaskBody("file_token_test") + point, ok = body["point"].(map[string]interface{}) + if !ok { + t.Fatalf("point = %#v, want map", body["point"]) + } + if got, _ := point["mount_key"].(string); got != "fld_test" { + t.Fatalf("mount_key = %q, want %q", got, "fld_test") + } +} diff --git a/shortcuts/drive/drive_io_test.go b/shortcuts/drive/drive_io_test.go index 66750d58..17f01ba2 100644 --- a/shortcuts/drive/drive_io_test.go +++ b/shortcuts/drive/drive_io_test.go @@ -5,9 +5,11 @@ package drive import ( "bytes" + "fmt" "net/http" "os" "strings" + "sync/atomic" "testing" "github.com/spf13/cobra" @@ -18,9 +20,11 @@ import ( "github.com/larksuite/cli/shortcuts/common" ) +var driveTestConfigSeq atomic.Int64 + func driveTestConfig() *core.CliConfig { return &core.CliConfig{ - AppID: "drive-test-app", AppSecret: "test-secret", Brand: core.BrandFeishu, + AppID: fmt.Sprintf("drive-test-app-%d", driveTestConfigSeq.Add(1)), AppSecret: "test-secret", Brand: core.BrandFeishu, } } diff --git a/shortcuts/drive/drive_move.go b/shortcuts/drive/drive_move.go new file mode 100644 index 00000000..2d2b5ed1 --- /dev/null +++ b/shortcuts/drive/drive_move.go @@ -0,0 +1,153 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + "fmt" + "strings" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +// DriveMove moves a Drive file or folder and handles the async task polling +// required by folder moves. +var DriveMove = common.Shortcut{ + Service: "drive", + Command: "+move", + Description: "Move a file or folder to another location in Drive", + Risk: "write", + Scopes: []string{"space:document:move"}, + AuthTypes: []string{"user", "bot"}, + Flags: []common.Flag{ + {Name: "file-token", Desc: "file or folder token to move", Required: true}, + {Name: "type", Desc: "file type (file, docx, bitable, doc, sheet, mindnote, folder, slides)", Required: true}, + {Name: "folder-token", Desc: "target folder token (default: root folder)"}, + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + return validateDriveMoveSpec(driveMoveSpec{ + FileToken: runtime.Str("file-token"), + FileType: strings.ToLower(runtime.Str("type")), + FolderToken: runtime.Str("folder-token"), + }) + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + spec := driveMoveSpec{ + FileToken: runtime.Str("file-token"), + FileType: strings.ToLower(runtime.Str("type")), + FolderToken: runtime.Str("folder-token"), + } + + dry := common.NewDryRunAPI(). + Desc("Move file or folder in Drive") + + dry.POST("/open-apis/drive/v1/files/:file_token/move"). + Desc("[1] Move file/folder"). + Set("file_token", spec.FileToken). + Body(spec.RequestBody()) + + // If moving a folder, show the async task check step + if spec.FileType == "folder" { + dry.GET("/open-apis/drive/v1/files/task_check"). + Desc("[2] Poll async task status (for folder move)"). + Params(driveTaskCheckParams("")) + } + + return dry + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + spec := driveMoveSpec{ + FileToken: runtime.Str("file-token"), + FileType: strings.ToLower(runtime.Str("type")), + FolderToken: runtime.Str("folder-token"), + } + + // Default to the caller's root folder so the command can move items + // without requiring an explicit destination in common cases. + if spec.FolderToken == "" { + fmt.Fprintf(runtime.IO().ErrOut, "No target folder specified, getting root folder...\n") + rootToken, err := getRootFolderToken(ctx, runtime) + if err != nil { + return err + } + if rootToken == "" { + return output.Errorf(output.ExitAPI, "api_error", "get root folder token failed, root folder is empty") + } + spec.FolderToken = rootToken + } + + fmt.Fprintf(runtime.IO().ErrOut, "Moving %s %s to folder %s...\n", spec.FileType, common.MaskToken(spec.FileToken), common.MaskToken(spec.FolderToken)) + + data, err := runtime.CallAPI( + "POST", + fmt.Sprintf("/open-apis/drive/v1/files/%s/move", validate.EncodePathSegment(spec.FileToken)), + nil, + spec.RequestBody(), + ) + if err != nil { + return err + } + + // Folder moves are asynchronous; file moves complete in the initial call. + if spec.FileType == "folder" { + taskID := common.GetString(data, "task_id") + if taskID == "" { + return output.Errorf(output.ExitAPI, "api_error", "move folder returned no task_id") + } + + fmt.Fprintf(runtime.IO().ErrOut, "Folder move is async, polling task %s...\n", taskID) + + status, ready, err := pollDriveTaskCheck(runtime, taskID) + if err != nil { + return err + } + + // Include both the source and destination identifiers so a timed-out + // folder move can be resumed or inspected without reconstructing inputs. + out := map[string]interface{}{ + "task_id": taskID, + "status": status.StatusLabel(), + "file_token": spec.FileToken, + "folder_token": spec.FolderToken, + "ready": ready, + } + if !ready { + nextCommand := driveTaskCheckResultCommand(taskID) + fmt.Fprintf(runtime.IO().ErrOut, "Folder move task is still in progress. Continue with: %s\n", nextCommand) + out["timed_out"] = true + out["next_command"] = nextCommand + } + + runtime.Out(out, nil) + } else { + // Non-folder moves are synchronous, so the initial request is the final + // outcome and no follow-up task metadata is needed. + runtime.Out(map[string]interface{}{ + "file_token": spec.FileToken, + "folder_token": spec.FolderToken, + "type": spec.FileType, + }, nil) + } + + return nil + }, +} + +// getRootFolderToken resolves the caller's Drive root folder token so other +// commands can safely use it as a default destination. +func getRootFolderToken(ctx context.Context, runtime *common.RuntimeContext) (string, error) { + data, err := runtime.CallAPI("GET", "/open-apis/drive/explorer/v2/root_folder/meta", nil, nil) + if err != nil { + return "", err + } + + token := common.GetString(data, "token") + if token == "" { + return "", output.Errorf(output.ExitAPI, "api_error", "root_folder/meta returned no token") + } + + return token, nil +} diff --git a/shortcuts/drive/drive_move_common.go b/shortcuts/drive/drive_move_common.go new file mode 100644 index 00000000..dfdaa0e6 --- /dev/null +++ b/shortcuts/drive/drive_move_common.go @@ -0,0 +1,160 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "fmt" + "strings" + "time" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +var ( + driveMovePollAttempts = 30 + driveMovePollInterval = 2 * time.Second +) + +// driveMoveAllowedTypes mirrors the document kinds accepted by the Drive move +// endpoint that this shortcut wraps. +var driveMoveAllowedTypes = map[string]bool{ + "file": true, + "docx": true, + "bitable": true, + "doc": true, + "sheet": true, + "mindnote": true, + "folder": true, + "slides": true, +} + +// driveMoveSpec contains the normalized input needed to issue a move request. +type driveMoveSpec struct { + FileToken string + FileType string + FolderToken string +} + +func (s driveMoveSpec) RequestBody() map[string]interface{} { + return map[string]interface{}{ + "type": s.FileType, + "folder_token": s.FolderToken, + } +} + +func validateDriveMoveSpec(spec driveMoveSpec) error { + if err := validate.ResourceName(spec.FileToken, "--file-token"); err != nil { + return output.ErrValidation("%s", err) + } + if strings.TrimSpace(spec.FolderToken) != "" { + if err := validate.ResourceName(spec.FolderToken, "--folder-token"); err != nil { + return output.ErrValidation("%s", err) + } + } + if !driveMoveAllowedTypes[spec.FileType] { + return output.ErrValidation("unsupported file type: %s. Supported types: file, docx, bitable, doc, sheet, mindnote, folder, slides", spec.FileType) + } + return nil +} + +// driveTaskCheckStatus represents the status payload returned by +// /drive/v1/files/task_check for async folder operations. +type driveTaskCheckStatus struct { + TaskID string + Status string +} + +func (s driveTaskCheckStatus) Ready() bool { + return strings.EqualFold(strings.TrimSpace(s.Status), "success") +} + +func (s driveTaskCheckStatus) Failed() bool { + return strings.EqualFold(strings.TrimSpace(s.Status), "failed") +} + +func (s driveTaskCheckStatus) Pending() bool { + return !s.Ready() && !s.Failed() +} + +func (s driveTaskCheckStatus) StatusLabel() string { + status := strings.TrimSpace(s.Status) + if status == "" { + // Empty status is treated as unknown so callers can still render a + // meaningful label instead of an empty string. + return "unknown" + } + return status +} + +// driveTaskCheckResultCommand prints the resume command shown when bounded +// polling ends before the backend task completes. +func driveTaskCheckResultCommand(taskID string) string { + return fmt.Sprintf("lark-cli drive +task_result --scenario task_check --task-id %s", taskID) +} + +// driveTaskCheckParams keeps the task_check query parameter shape in one place +// for both dry-run and execution paths. +func driveTaskCheckParams(taskID string) map[string]interface{} { + return map[string]interface{}{"task_id": taskID} +} + +// getDriveTaskCheckStatus fetches and validates the current state of an async +// folder move or delete task. +func getDriveTaskCheckStatus(runtime *common.RuntimeContext, taskID string) (driveTaskCheckStatus, error) { + if err := validate.ResourceName(taskID, "--task-id"); err != nil { + return driveTaskCheckStatus{}, output.ErrValidation("%s", err) + } + + data, err := runtime.CallAPI("GET", "/open-apis/drive/v1/files/task_check", driveTaskCheckParams(taskID), nil) + if err != nil { + return driveTaskCheckStatus{}, err + } + + return parseDriveTaskCheckStatus(taskID, data), nil +} + +// parseDriveTaskCheckStatus tolerates both wrapped and already-unwrapped +// response shapes used in tests and helpers. +func parseDriveTaskCheckStatus(taskID string, data map[string]interface{}) driveTaskCheckStatus { + result := common.GetMap(data, "result") + if result == nil { + result = data + } + + return driveTaskCheckStatus{ + TaskID: taskID, + Status: common.GetString(result, "status"), + } +} + +// pollDriveTaskCheck polls the backend for a bounded period and returns the +// last seen status so callers can emit a follow-up command when needed. +func pollDriveTaskCheck(runtime *common.RuntimeContext, taskID string) (driveTaskCheckStatus, bool, error) { + lastStatus := driveTaskCheckStatus{TaskID: taskID} + for attempt := 1; attempt <= driveMovePollAttempts; attempt++ { + if attempt > 1 { + time.Sleep(driveMovePollInterval) + } + + status, err := getDriveTaskCheckStatus(runtime, taskID) + if err != nil { + fmt.Fprintf(runtime.IO().ErrOut, "Error polling task %s: %s\n", taskID, err) + continue + } + lastStatus = status + // Success and failure are terminal backend states. Any other value is kept + // as pending so the caller can decide whether to continue or resume later. + if status.Ready() { + fmt.Fprintf(runtime.IO().ErrOut, "Folder move completed successfully.\n") + return status, true, nil + } + if status.Failed() { + return status, false, output.Errorf(output.ExitAPI, "api_error", "folder move task failed") + } + } + + return lastStatus, false, nil +} diff --git a/shortcuts/drive/drive_move_common_test.go b/shortcuts/drive/drive_move_common_test.go new file mode 100644 index 00000000..8221ada9 --- /dev/null +++ b/shortcuts/drive/drive_move_common_test.go @@ -0,0 +1,194 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "bytes" + "context" + "encoding/json" + "testing" + + "github.com/spf13/cobra" + + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/httpmock" + "github.com/larksuite/cli/shortcuts/common" +) + +func TestParseDriveTaskCheckStatusFallback(t *testing.T) { + t.Parallel() + + status := parseDriveTaskCheckStatus("task_123", map[string]interface{}{ + "status": "success", + }) + + if !status.Ready() { + t.Fatal("expected task check status to be ready") + } + if status.StatusLabel() != "success" { + t.Fatalf("status label = %q, want %q", status.StatusLabel(), "success") + } +} + +func TestDriveTaskCheckStatusPendingAndUnknownLabel(t *testing.T) { + t.Parallel() + + status := driveTaskCheckStatus{} + if !status.Pending() { + t.Fatal("expected empty status to be treated as pending") + } + if got := status.StatusLabel(); got != "unknown" { + t.Fatalf("StatusLabel() = %q, want %q", got, "unknown") + } +} + +func TestValidateDriveMoveSpecRejectsUnsupportedType(t *testing.T) { + t.Parallel() + + err := validateDriveMoveSpec(driveMoveSpec{ + FileToken: "file_token_test", + FileType: "unsupported_type", + }) + if err == nil { + t.Fatal("expected unsupported type error, got nil") + } + if got := err.Error(); !bytes.Contains([]byte(got), []byte("unsupported file type")) { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestDriveMoveDryRunFolderIncludesTaskCheckParams(t *testing.T) { + t.Parallel() + + cmd := &cobra.Command{Use: "drive +move"} + cmd.Flags().String("file-token", "", "") + cmd.Flags().String("type", "", "") + cmd.Flags().String("folder-token", "", "") + if err := cmd.Flags().Set("file-token", "fld_src"); err != nil { + t.Fatalf("set --file-token: %v", err) + } + if err := cmd.Flags().Set("type", "folder"); err != nil { + t.Fatalf("set --type: %v", err) + } + if err := cmd.Flags().Set("folder-token", "fld_dst"); err != nil { + t.Fatalf("set --folder-token: %v", err) + } + + runtime := common.TestNewRuntimeContext(cmd, nil) + dry := DriveMove.DryRun(context.Background(), runtime) + if dry == nil { + t.Fatal("DryRun returned nil") + } + + data, err := json.Marshal(dry) + if err != nil { + t.Fatalf("marshal dry run: %v", err) + } + + var got struct { + API []struct { + Params map[string]interface{} `json:"params"` + } `json:"api"` + } + if err := json.Unmarshal(data, &got); err != nil { + t.Fatalf("unmarshal dry run json: %v", err) + } + if len(got.API) != 2 { + t.Fatalf("expected 2 API calls, got %d", len(got.API)) + } + if got.API[1].Params["task_id"] != "" { + t.Fatalf("task check params = %#v", got.API[1].Params) + } +} + +func TestDriveMoveFolderSuccessUsesTaskCheckHelper(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/files/fld_src/move", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"task_id": "task_123"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/files/task_check", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"status": "success"}, + }, + }) + + prevAttempts, prevInterval := driveMovePollAttempts, driveMovePollInterval + driveMovePollAttempts, driveMovePollInterval = 1, 0 + t.Cleanup(func() { + driveMovePollAttempts, driveMovePollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveMove, []string{ + "+move", + "--file-token", "fld_src", + "--type", "folder", + "--folder-token", "fld_dst", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"task_id": "task_123"`)) { + t.Fatalf("stdout missing task id: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"ready": true`)) { + t.Fatalf("stdout missing ready=true: %s", stdout.String()) + } +} + +func TestDriveMoveFolderTimeoutReturnsFollowUpCommand(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/files/fld_src/move", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"task_id": "task_123"}, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/files/task_check", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"status": "pending"}, + }, + }) + + prevAttempts, prevInterval := driveMovePollAttempts, driveMovePollInterval + driveMovePollAttempts, driveMovePollInterval = 1, 0 + t.Cleanup(func() { + driveMovePollAttempts, driveMovePollInterval = prevAttempts, prevInterval + }) + + err := mountAndRunDrive(t, DriveMove, []string{ + "+move", + "--file-token", "fld_src", + "--type", "folder", + "--folder-token", "fld_dst", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"ready": false`)) { + t.Fatalf("stdout missing ready=false: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"timed_out": true`)) { + t.Fatalf("stdout missing timed_out=true: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"next_command": "lark-cli drive +task_result --scenario task_check --task-id task_123"`)) { + t.Fatalf("stdout missing follow-up command: %s", stdout.String()) + } +} diff --git a/shortcuts/drive/drive_move_test.go b/shortcuts/drive/drive_move_test.go new file mode 100644 index 00000000..184b9e5e --- /dev/null +++ b/shortcuts/drive/drive_move_test.go @@ -0,0 +1,77 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "strings" + "testing" + + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/httpmock" +) + +func TestDriveMoveUsesRootFolderWhenFolderTokenMissing(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/explorer/v2/root_folder/meta", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "token": "folder_root_token_test", + }, + }, + }) + reg.Register(&httpmock.Stub{ + Method: "POST", + URL: "/open-apis/drive/v1/files/file_token_test/move", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{}, + }, + }) + + err := mountAndRunDrive(t, DriveMove, []string{ + "+move", + "--file-token", "file_token_test", + "--type", "file", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !strings.Contains(stdout.String(), `"folder_token": "folder_root_token_test"`) { + t.Fatalf("stdout missing resolved root folder token: %s", stdout.String()) + } + if !strings.Contains(stdout.String(), `"file_token": "file_token_test"`) { + t.Fatalf("stdout missing file token: %s", stdout.String()) + } +} + +func TestDriveMoveRootFolderLookupRequiresToken(t *testing.T) { + f, _, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/explorer/v2/root_folder/meta", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{}, + }, + }) + + err := mountAndRunDrive(t, DriveMove, []string{ + "+move", + "--file-token", "file_token_test", + "--type", "file", + "--as", "bot", + }, f, nil) + if err == nil { + t.Fatal("expected missing root folder token error, got nil") + } + if !strings.Contains(err.Error(), "root_folder/meta returned no token") { + t.Fatalf("unexpected error: %v", err) + } +} diff --git a/shortcuts/drive/drive_task_result.go b/shortcuts/drive/drive_task_result.go new file mode 100644 index 00000000..7b52c3d3 --- /dev/null +++ b/shortcuts/drive/drive_task_result.go @@ -0,0 +1,190 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "context" + "fmt" + "strings" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +// DriveTaskResult exposes a unified read path for the async task types produced +// by Drive import, export, and folder move flows. +var DriveTaskResult = common.Shortcut{ + Service: "drive", + Command: "+task_result", + Description: "Poll async task result for import, export, move, or delete operations", + Risk: "read", + Scopes: []string{"drive:drive.metadata:readonly"}, + AuthTypes: []string{"user", "bot"}, + Flags: []common.Flag{ + {Name: "ticket", Desc: "async task ticket (for import/export tasks)", Required: false}, + {Name: "task-id", Desc: "async task ID (for move/delete folder tasks)", Required: false}, + {Name: "scenario", Desc: "task scenario: import, export, or task_check", Required: true}, + {Name: "file-token", Desc: "source document token used for export task status lookup", Required: false}, + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + scenario := strings.ToLower(runtime.Str("scenario")) + validScenarios := map[string]bool{ + "import": true, + "export": true, + "task_check": true, + } + if !validScenarios[scenario] { + return output.ErrValidation("unsupported scenario: %s. Supported scenarios: import, export, task_check", scenario) + } + + // Validate required params based on scenario + switch scenario { + case "import", "export": + if runtime.Str("ticket") == "" { + return output.ErrValidation("--ticket is required for %s scenario", scenario) + } + if err := validate.ResourceName(runtime.Str("ticket"), "--ticket"); err != nil { + return output.ErrValidation("%s", err) + } + case "task_check": + if runtime.Str("task-id") == "" { + return output.ErrValidation("--task-id is required for task_check scenario") + } + if err := validate.ResourceName(runtime.Str("task-id"), "--task-id"); err != nil { + return output.ErrValidation("%s", err) + } + } + + // For export scenario, file-token is required + if scenario == "export" && runtime.Str("file-token") == "" { + return output.ErrValidation("--file-token is required for export scenario") + } + if scenario == "export" { + if err := validate.ResourceName(runtime.Str("file-token"), "--file-token"); err != nil { + return output.ErrValidation("%s", err) + } + } + + return nil + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + scenario := strings.ToLower(runtime.Str("scenario")) + ticket := runtime.Str("ticket") + taskID := runtime.Str("task-id") + fileToken := runtime.Str("file-token") + + dry := common.NewDryRunAPI() + dry.Desc(fmt.Sprintf("Poll async task result for %s scenario", scenario)) + + switch scenario { + case "import": + dry.GET("/open-apis/drive/v1/import_tasks/:ticket"). + Desc("[1] Query import task result"). + Set("ticket", ticket) + case "export": + dry.GET("/open-apis/drive/v1/export_tasks/:ticket"). + Desc("[1] Query export task result"). + Set("ticket", ticket). + Params(map[string]interface{}{"token": fileToken}) + case "task_check": + dry.GET("/open-apis/drive/v1/files/task_check"). + Desc("[1] Query move/delete folder task status"). + Params(driveTaskCheckParams(taskID)) + } + + return dry + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + scenario := strings.ToLower(runtime.Str("scenario")) + ticket := runtime.Str("ticket") + taskID := runtime.Str("task-id") + fileToken := runtime.Str("file-token") + + fmt.Fprintf(runtime.IO().ErrOut, "Querying %s task result...\n", scenario) + + var result map[string]interface{} + var err error + + // Each scenario maps to a different backend API, but this shortcut keeps + // the CLI surface uniform for resume-on-timeout workflows. + switch scenario { + case "import": + result, err = queryImportTask(runtime, ticket) + case "export": + result, err = queryExportTask(runtime, ticket, fileToken) + case "task_check": + result, err = queryTaskCheck(runtime, taskID) + } + + if err != nil { + return err + } + + runtime.Out(result, nil) + return nil + }, +} + +// queryImportTask returns a stable, shortcut-friendly view of the import task. +func queryImportTask(runtime *common.RuntimeContext, ticket string) (map[string]interface{}, error) { + status, err := getDriveImportStatus(runtime, ticket) + if err != nil { + return nil, err + } + + return map[string]interface{}{ + "scenario": "import", + "ticket": status.Ticket, + "type": status.DocType, + "ready": status.Ready(), + "failed": status.Failed(), + "job_status": status.JobStatus, + "job_status_label": status.StatusLabel(), + "job_error_msg": status.JobErrorMsg, + "token": status.Token, + "url": status.URL, + "extra": status.Extra, + }, nil +} + +// queryExportTask returns the export task status together with download metadata +// once the backend has produced the exported file. +func queryExportTask(runtime *common.RuntimeContext, ticket, fileToken string) (map[string]interface{}, error) { + status, err := getDriveExportStatus(runtime, fileToken, ticket) + if err != nil { + return nil, err + } + + return map[string]interface{}{ + "scenario": "export", + "ticket": status.Ticket, + "ready": status.Ready(), + "failed": status.Failed(), + "file_extension": status.FileExtension, + "type": status.DocType, + "file_name": status.FileName, + "file_token": status.FileToken, + "file_size": status.FileSize, + "job_error_msg": status.JobErrorMsg, + "job_status": status.JobStatus, + "job_status_label": status.StatusLabel(), + }, nil +} + +// queryTaskCheck returns the normalized status of a folder move/delete task. +func queryTaskCheck(runtime *common.RuntimeContext, taskID string) (map[string]interface{}, error) { + status, err := getDriveTaskCheckStatus(runtime, taskID) + if err != nil { + return nil, err + } + + return map[string]interface{}{ + "scenario": "task_check", + "task_id": status.TaskID, + "status": status.StatusLabel(), + "ready": status.Ready(), + "failed": status.Failed(), + }, nil +} diff --git a/shortcuts/drive/drive_task_result_test.go b/shortcuts/drive/drive_task_result_test.go new file mode 100644 index 00000000..cb11ec75 --- /dev/null +++ b/shortcuts/drive/drive_task_result_test.go @@ -0,0 +1,192 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import ( + "bytes" + "context" + "encoding/json" + "strings" + "testing" + + "github.com/spf13/cobra" + + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/httpmock" + "github.com/larksuite/cli/shortcuts/common" +) + +func TestDriveTaskResultValidateErrorsByScenario(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + flags map[string]string + wantErr string + }{ + { + name: "unsupported scenario", + flags: map[string]string{ + "scenario": "unknown", + }, + wantErr: "unsupported scenario", + }, + { + name: "import missing ticket", + flags: map[string]string{ + "scenario": "import", + }, + wantErr: "--ticket is required", + }, + { + name: "export missing file token", + flags: map[string]string{ + "scenario": "export", + "ticket": "ticket_export_test", + }, + wantErr: "--file-token is required", + }, + { + name: "task check missing task id", + flags: map[string]string{ + "scenario": "task_check", + }, + wantErr: "--task-id is required", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + cmd := &cobra.Command{Use: "drive +task_result"} + cmd.Flags().String("scenario", "", "") + cmd.Flags().String("ticket", "", "") + cmd.Flags().String("task-id", "", "") + cmd.Flags().String("file-token", "", "") + for key, value := range tt.flags { + if err := cmd.Flags().Set(key, value); err != nil { + t.Fatalf("set --%s: %v", key, err) + } + } + + runtime := common.TestNewRuntimeContext(cmd, nil) + err := DriveTaskResult.Validate(context.Background(), runtime) + if err == nil || !strings.Contains(err.Error(), tt.wantErr) { + t.Fatalf("expected error containing %q, got %v", tt.wantErr, err) + } + }) + } +} + +func TestDriveTaskResultDryRunExportIncludesTokenParam(t *testing.T) { + t.Parallel() + + cmd := &cobra.Command{Use: "drive +task_result"} + cmd.Flags().String("scenario", "", "") + cmd.Flags().String("ticket", "", "") + cmd.Flags().String("task-id", "", "") + cmd.Flags().String("file-token", "", "") + if err := cmd.Flags().Set("scenario", "export"); err != nil { + t.Fatalf("set --scenario: %v", err) + } + if err := cmd.Flags().Set("ticket", "tk_export"); err != nil { + t.Fatalf("set --ticket: %v", err) + } + if err := cmd.Flags().Set("file-token", "doc_123"); err != nil { + t.Fatalf("set --file-token: %v", err) + } + + runtime := common.TestNewRuntimeContext(cmd, nil) + dry := DriveTaskResult.DryRun(context.Background(), runtime) + if dry == nil { + t.Fatal("DryRun returned nil") + } + + data, err := json.Marshal(dry) + if err != nil { + t.Fatalf("marshal dry run: %v", err) + } + + var got struct { + API []struct { + Params map[string]interface{} `json:"params"` + } `json:"api"` + } + if err := json.Unmarshal(data, &got); err != nil { + t.Fatalf("unmarshal dry run json: %v", err) + } + if len(got.API) != 1 { + t.Fatalf("expected 1 API call, got %d", len(got.API)) + } + if got.API[0].Params["token"] != "doc_123" { + t.Fatalf("export status params = %#v", got.API[0].Params) + } +} + +func TestDriveTaskResultImportIncludesReadyFlags(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/import_tasks/tk_import", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "result": map[string]interface{}{ + "type": "sheet", + "job_status": 2, + }, + }, + }, + }) + + err := mountAndRunDrive(t, DriveTaskResult, []string{ + "+task_result", + "--scenario", "import", + "--ticket", "tk_import", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"ready": false`)) { + t.Fatalf("stdout missing ready=false: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"job_status_label": "processing"`)) { + t.Fatalf("stdout missing job_status_label: %s", stdout.String()) + } +} + +func TestDriveTaskResultTaskCheckIncludesReadyFlags(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, driveTestConfig()) + registerDriveBotTokenStub(reg) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/drive/v1/files/task_check", + Body: map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{"status": "pending"}, + }, + }) + + err := mountAndRunDrive(t, DriveTaskResult, []string{ + "+task_result", + "--scenario", "task_check", + "--task-id", "task_123", + "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"status": "pending"`)) { + t.Fatalf("stdout missing pending status: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"ready": false`)) { + t.Fatalf("stdout missing ready=false: %s", stdout.String()) + } + if !bytes.Contains(stdout.Bytes(), []byte(`"failed": false`)) { + t.Fatalf("stdout missing failed=false: %s", stdout.String()) + } +} diff --git a/shortcuts/drive/shortcuts.go b/shortcuts/drive/shortcuts.go index fb12d6c6..e8fbad3c 100644 --- a/shortcuts/drive/shortcuts.go +++ b/shortcuts/drive/shortcuts.go @@ -11,5 +11,10 @@ func Shortcuts() []common.Shortcut { DriveUpload, DriveDownload, DriveAddComment, + DriveExport, + DriveExportDownload, + DriveImport, + DriveMove, + DriveTaskResult, } } diff --git a/shortcuts/drive/shortcuts_test.go b/shortcuts/drive/shortcuts_test.go new file mode 100644 index 00000000..1fbfe019 --- /dev/null +++ b/shortcuts/drive/shortcuts_test.go @@ -0,0 +1,40 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package drive + +import "testing" + +func TestShortcutsIncludesExpectedCommands(t *testing.T) { + t.Parallel() + + got := Shortcuts() + want := []string{ + "+upload", + "+download", + "+add-comment", + "+export", + "+export-download", + "+import", + "+move", + "+task_result", + } + + if len(got) != len(want) { + t.Fatalf("len(Shortcuts()) = %d, want %d", len(got), len(want)) + } + + seen := make(map[string]bool, len(got)) + for _, shortcut := range got { + if seen[shortcut.Command] { + t.Fatalf("duplicate shortcut command: %s", shortcut.Command) + } + seen[shortcut.Command] = true + } + + for _, command := range want { + if !seen[command] { + t.Fatalf("missing shortcut command %q in Shortcuts()", command) + } + } +} diff --git a/shortcuts/im/builders_test.go b/shortcuts/im/builders_test.go index 36e06521..a4a54de0 100644 --- a/shortcuts/im/builders_test.go +++ b/shortcuts/im/builders_test.go @@ -447,6 +447,17 @@ func TestShortcutValidateBranches(t *testing.T) { } }) + t.Run("ImMessagesSearch invalid page limit", func(t *testing.T) { + runtime := newTestRuntimeContext(t, map[string]string{ + "query": "incident", + "page-limit": "41", + }, nil) + err := ImMessagesSearch.Validate(context.Background(), runtime) + if err == nil || !strings.Contains(err.Error(), "--page-limit must be an integer between 1 and 40") { + t.Fatalf("ImMessagesSearch.Validate() error = %v", err) + } + }) + t.Run("ImMessagesSearch invalid sender id", func(t *testing.T) { runtime := newTestRuntimeContext(t, map[string]string{ "sender": "user_1", @@ -479,6 +490,45 @@ func TestShortcutValidateBranches(t *testing.T) { }) } +func TestMessagesSearchPaginationConfig(t *testing.T) { + t.Run("default single page", func(t *testing.T) { + runtime := newTestRuntimeContext(t, nil, nil) + autoPaginate, pageLimit := messagesSearchPaginationConfig(runtime) + if autoPaginate { + t.Fatal("messagesSearchPaginationConfig() autoPaginate = true, want false") + } + if pageLimit != messagesSearchDefaultPageLimit { + t.Fatalf("messagesSearchPaginationConfig() pageLimit = %d, want %d", pageLimit, messagesSearchDefaultPageLimit) + } + }) + + t.Run("page all uses max limit", func(t *testing.T) { + runtime := newTestRuntimeContext(t, nil, map[string]bool{ + "page-all": true, + }) + autoPaginate, pageLimit := messagesSearchPaginationConfig(runtime) + if !autoPaginate { + t.Fatal("messagesSearchPaginationConfig() autoPaginate = false, want true") + } + if pageLimit != messagesSearchMaxPageLimit { + t.Fatalf("messagesSearchPaginationConfig() pageLimit = %d, want %d", pageLimit, messagesSearchMaxPageLimit) + } + }) + + t.Run("explicit page limit enables auto pagination", func(t *testing.T) { + runtime := newTestRuntimeContext(t, map[string]string{ + "page-limit": "3", + }, nil) + autoPaginate, pageLimit := messagesSearchPaginationConfig(runtime) + if !autoPaginate { + t.Fatal("messagesSearchPaginationConfig() autoPaginate = false, want true") + } + if pageLimit != 3 { + t.Fatalf("messagesSearchPaginationConfig() pageLimit = %d, want 3", pageLimit) + } + }) +} + func TestShortcutDryRunShapes(t *testing.T) { t.Run("ImChatCreate dry run includes params and body", func(t *testing.T) { runtime := newTestRuntimeContext(t, map[string]string{ diff --git a/shortcuts/im/coverage_additional_test.go b/shortcuts/im/coverage_additional_test.go index c4389b7c..f9c931fd 100644 --- a/shortcuts/im/coverage_additional_test.go +++ b/shortcuts/im/coverage_additional_test.go @@ -101,6 +101,9 @@ func TestResolveMarkdownAsPost(t *testing.T) { if !strings.Contains(got, `#### Title`) || !strings.Contains(got, `##### Subtitle`) { t.Fatalf("resolveMarkdownAsPost() = %q, want optimized heading levels", got) } + if strings.Contains(got, `
`) { + t.Fatalf("resolveMarkdownAsPost() = %q, want no literal
", got) + } } func TestValidateContentFlags(t *testing.T) { diff --git a/shortcuts/im/helpers.go b/shortcuts/im/helpers.go index 15d12773..32a0a33f 100644 --- a/shortcuts/im/helpers.go +++ b/shortcuts/im/helpers.go @@ -619,31 +619,22 @@ func readMp4Duration(f *os.File, fileSize int64) int64 { // Steps: // 1. Extract code blocks with placeholders to protect them // 2. Downgrade headings: H1 → H4, H2~H6 → H5 (only when H1~H3 present) -// 3. Add
between consecutive headings -// 4. Add spacing around tables with
-// 5. Restore code blocks with
wrappers -// 6. Compress excess blank lines -// 7. Strip invalid image references (keep only img_xxx keys) +// 3. Normalize spacing between consecutive headings and tables with blank lines +// 4. Restore code blocks +// 5. Compress excess blank lines +// 6. Strip invalid image references (keep only img_xxx keys) var ( - reH2toH6 = regexp.MustCompile(`(?m)^#{2,6} (.+)$`) - reH1 = regexp.MustCompile(`(?m)^# (.+)$`) - reHasH1toH3 = regexp.MustCompile(`(?m)^#{1,3} `) - reConsecH = regexp.MustCompile(`(?m)^(#{4,5} .+)\n{1,2}(#{4,5} )`) - reTableNoGap = regexp.MustCompile(`(?m)^([^|\n].*)\n(\|.+\|)`) - reTableBefore = regexp.MustCompile(`\n\n((?:\|.+\|[^\S\n]*\n?)+)`) - reTableAfter = regexp.MustCompile(`(?m)((?:^\|.+\|[^\S\n]*\n?)+)`) - reTableTxtPre = regexp.MustCompile(`(?m)^([^\n]+)\n\n(
)\n\n(\|)`) - reTableBoldPre = regexp.MustCompile(`(?m)^(\*\*.+)\n\n(
)\n\n(\|)`) - reTableTxtPost = regexp.MustCompile(`(?m)(\|[^\n]*\n)\n(
\n)([^\n]+)`) - reExcessNL = regexp.MustCompile(`\n{3,}`) - reInvalidImg = regexp.MustCompile(`!\[[^\]]*\]\(([^)\s]+)\)`) - reCodeBlock = regexp.MustCompile("```[\\s\\S]*?```") + reH2toH6 = regexp.MustCompile(`(?m)^#{2,6} (.+)$`) + reH1 = regexp.MustCompile(`(?m)^# (.+)$`) + reHasH1toH3 = regexp.MustCompile(`(?m)^#{1,3} `) + reConsecH = regexp.MustCompile(`(?m)^(#{4,5} .+)\n{1,2}(#{4,5} )`) + reTableNoGap = regexp.MustCompile(`(?m)^([^|\n].*)\n(\|.+\|)`) + reTableAfter = regexp.MustCompile(`(?m)((?:^\|.+\|[^\S\n]*\n?)+)`) + reExcessNL = regexp.MustCompile(`\n{3,}`) + reInvalidImg = regexp.MustCompile(`!\[[^\]]*\]\(([^)\s]+)\)`) + reCodeBlock = regexp.MustCompile("```[\\s\\S]*?```") ) -func isTableSpacingProtectedLine(line string) bool { - return strings.HasPrefix(line, "#### ") || strings.HasPrefix(line, "##### ") || strings.HasPrefix(line, "**") -} - func optimizeMarkdownStyle(text string) string { const mark = "___CB_" var codeBlocks []string @@ -659,29 +650,13 @@ func optimizeMarkdownStyle(text string) string { r = reH1.ReplaceAllString(r, "#### $1") } - r = reConsecH.ReplaceAllString(r, "$1\n
\n$2") + r = reConsecH.ReplaceAllString(r, "$1\n\n$2") r = reTableNoGap.ReplaceAllString(r, "$1\n\n$2") - r = reTableBefore.ReplaceAllString(r, "\n\n
\n\n$1") - r = reTableAfter.ReplaceAllString(r, "$1\n
\n") - r = reTableTxtPre.ReplaceAllStringFunc(r, func(m string) string { - sub := reTableTxtPre.FindStringSubmatch(m) - if len(sub) != 4 || isTableSpacingProtectedLine(sub[1]) { - return m - } - return sub[1] + "\n" + sub[2] + "\n" + sub[3] - }) - r = reTableBoldPre.ReplaceAllString(r, "$1\n$2\n\n$3") - r = reTableTxtPost.ReplaceAllStringFunc(r, func(m string) string { - sub := reTableTxtPost.FindStringSubmatch(m) - if len(sub) != 4 || isTableSpacingProtectedLine(sub[3]) { - return m - } - return sub[1] + sub[2] + sub[3] - }) + r = reTableAfter.ReplaceAllString(r, "$1\n") for i, block := range codeBlocks { - r = strings.Replace(r, fmt.Sprintf("%s%d___", mark, i), "\n
\n"+block+"\n
\n", 1) + r = strings.Replace(r, fmt.Sprintf("%s%d___", mark, i), block, 1) } r = reExcessNL.ReplaceAllString(r, "\n\n") @@ -901,7 +876,7 @@ func uploadImageToIM(ctx context.Context, runtime *common.RuntimeContext, filePa fd.AddField("image_type", imageType) fd.AddFile("image", f) - apiResp, err := runtime.DoAPI(&larkcore.ApiReq{ + apiResp, err := runtime.DoAPIAsBot(&larkcore.ApiReq{ HttpMethod: http.MethodPost, ApiPath: "/open-apis/im/v1/images", Body: fd, @@ -947,7 +922,7 @@ func uploadFileToIM(ctx context.Context, runtime *common.RuntimeContext, filePat } fd.AddFile("file", f) - apiResp, err := runtime.DoAPI(&larkcore.ApiReq{ + apiResp, err := runtime.DoAPIAsBot(&larkcore.ApiReq{ HttpMethod: http.MethodPost, ApiPath: "/open-apis/im/v1/files", Body: fd, diff --git a/shortcuts/im/helpers_network_test.go b/shortcuts/im/helpers_network_test.go index 3321b683..b09b45e0 100644 --- a/shortcuts/im/helpers_network_test.go +++ b/shortcuts/im/helpers_network_test.go @@ -263,7 +263,7 @@ func TestDownloadIMResourceToPathSuccess(t *testing.T) { })) target := filepath.Join(t.TempDir(), "nested", "resource.bin") - size, err := downloadIMResourceToPath(context.Background(), runtime, "om_123", "file_123", "file", target) + _, size, err := downloadIMResourceToPath(context.Background(), runtime, "om_123", "file_123", "file", target) if err != nil { t.Fatalf("downloadIMResourceToPath() error = %v", err) } @@ -307,7 +307,7 @@ func TestDownloadIMResourceToPathHTTPErrorBody(t *testing.T) { } })) - _, err := downloadIMResourceToPath(context.Background(), runtime, "om_403", "file_403", "file", filepath.Join(t.TempDir(), "out.bin")) + _, _, err := downloadIMResourceToPath(context.Background(), runtime, "om_403", "file_403", "file", filepath.Join(t.TempDir(), "out.bin")) if err == nil || !strings.Contains(err.Error(), "HTTP 403: denied") { t.Fatalf("downloadIMResourceToPath() error = %v", err) } diff --git a/shortcuts/im/helpers_test.go b/shortcuts/im/helpers_test.go index e5cbf7e0..bf9bf870 100644 --- a/shortcuts/im/helpers_test.go +++ b/shortcuts/im/helpers_test.go @@ -282,7 +282,7 @@ func TestOptimizeMarkdownStyle(t *testing.T) { { name: "heading downgrade H1 and H2", input: "# Title\n## Section\ntext", - want: "#### Title\n
\n##### Section\ntext", + want: "#### Title\n\n##### Section\ntext", }, { name: "no downgrade when no H1-H3", @@ -292,17 +292,17 @@ func TestOptimizeMarkdownStyle(t *testing.T) { { name: "code block protected", input: "# Title\n```\n# not a heading\n```\ntext", - want: "#### Title\n\n
\n```\n# not a heading\n```\n
\n\ntext", + want: "#### Title\n```\n# not a heading\n```\ntext", }, { name: "table spacing", input: "text\n| A | B |\n| - | - |\n| 1 | 2 |\nafter", - want: "text\n
\n| A | B |\n| - | - |\n| 1 | 2 |\n
\nafter", + want: "text\n\n| A | B |\n| - | - |\n| 1 | 2 |\n\nafter", }, { name: "table spacing keeps heading separation", input: "# Title\n| A | B |\n| - | - |\n| 1 | 2 |\n## Next", - want: "#### Title\n\n
\n\n| A | B |\n| - | - |\n| 1 | 2 |\n\n
\n##### Next", + want: "#### Title\n\n| A | B |\n| - | - |\n| 1 | 2 |\n\n##### Next", }, { name: "excess blank lines compressed", @@ -483,7 +483,7 @@ func TestDownloadIMResourceToPathHTTPClientError(t *testing.T) { }, } - _, err := downloadIMResourceToPath(context.Background(), runtime, "om_123", "img_123", "image", "out.bin") + _, _, err := downloadIMResourceToPath(context.Background(), runtime, "om_123", "img_123", "image", "out.bin") if err == nil || !strings.Contains(err.Error(), "http client unavailable") { t.Fatalf("downloadIMResourceToPath() error = %v", err) } diff --git a/shortcuts/im/im_messages_reply.go b/shortcuts/im/im_messages_reply.go index d7aab26b..30087d58 100644 --- a/shortcuts/im/im_messages_reply.go +++ b/shortcuts/im/im_messages_reply.go @@ -17,10 +17,12 @@ import ( var ImMessagesReply = common.Shortcut{ Service: "im", Command: "+messages-reply", - Description: "Reply to a message (supports thread replies) with bot identity; bot-only; supports text/markdown/post/media replies, reply-in-thread, idempotency key", + Description: "Reply to a message (supports thread replies); user/bot; supports text/markdown/post/media replies, reply-in-thread, idempotency key", Risk: "write", Scopes: []string{"im:message:send_as_bot"}, - AuthTypes: []string{"bot"}, + UserScopes: []string{"im:message.send_as_user"}, + BotScopes: []string{"im:message:send_as_bot"}, + AuthTypes: []string{"bot", "user"}, Flags: []common.Flag{ {Name: "message-id", Desc: "message ID (om_xxx)", Required: true}, {Name: "msg-type", Default: "text", Desc: "message type for --content JSON; when using --text/--markdown/--image/--file/--video/--audio, the effective type is inferred automatically", Enum: []string{"text", "post", "image", "file", "audio", "media", "interactive", "share_chat", "share_user"}}, diff --git a/shortcuts/im/im_messages_resources_download.go b/shortcuts/im/im_messages_resources_download.go index 259c1391..beeaacd8 100644 --- a/shortcuts/im/im_messages_resources_download.go +++ b/shortcuts/im/im_messages_resources_download.go @@ -72,12 +72,12 @@ var ImMessagesResourcesDownload = common.Shortcut{ return output.ErrValidation("unsafe output path: %s", err) } - sizeBytes, err := downloadIMResourceToPath(ctx, runtime, messageId, fileKey, fileType, safePath) + finalPath, sizeBytes, err := downloadIMResourceToPath(ctx, runtime, messageId, fileKey, fileType, safePath) if err != nil { return err } - runtime.Out(map[string]interface{}{"saved_path": safePath, "size_bytes": sizeBytes}, nil) + runtime.Out(map[string]interface{}{"saved_path": finalPath, "size_bytes": sizeBytes}, nil) return nil }, } @@ -108,7 +108,38 @@ func normalizeDownloadOutputPath(fileKey, outputPath string) (string, error) { const defaultIMResourceDownloadTimeout = 120 * time.Second -func downloadIMResourceToPath(ctx context.Context, runtime *common.RuntimeContext, messageID, fileKey, fileType, safePath string) (int64, error) { +var imMimeToExt = map[string]string{ + "image/png": ".png", + "image/jpeg": ".jpg", + "image/gif": ".gif", + "image/webp": ".webp", + "image/svg+xml": ".svg", + "application/pdf": ".pdf", + "video/mp4": ".mp4", + "video/3gpp": ".3gp", + "video/x-msvideo": ".avi", + "audio/mpeg": ".mp3", + "audio/ogg": ".ogg", + "audio/wav": ".wav", + "text/plain": ".txt", + "text/html": ".html", + "text/css": ".css", + "text/csv": ".csv", + "application/zip": ".zip", + "application/x-zip-compressed": ".zip", + "application/x-rar-compressed": ".rar", + "application/json": ".json", + "application/xml": ".xml", + "application/octet-stream": ".bin", + "application/msword": ".doc", + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": ".docx", + "application/vnd.ms-excel": ".xls", + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": ".xlsx", + "application/vnd.ms-powerpoint": ".ppt", + "application/vnd.openxmlformats-officedocument.presentationml.presentation": ".pptx", +} + +func downloadIMResourceToPath(ctx context.Context, runtime *common.RuntimeContext, messageID, fileKey, fileType, safePath string) (string, int64, error) { query := larkcore.QueryParams{} query.Set("type", fileType) downloadResp, err := runtime.DoAPIStream(ctx, &larkcore.ApiReq{ @@ -121,24 +152,36 @@ func downloadIMResourceToPath(ctx context.Context, runtime *common.RuntimeContex QueryParams: query, }, defaultIMResourceDownloadTimeout) if err != nil { - return 0, err + return "", 0, err } defer downloadResp.Body.Close() if downloadResp.StatusCode >= 400 { body, _ := io.ReadAll(io.LimitReader(downloadResp.Body, 4096)) if len(body) > 0 { - return 0, output.ErrNetwork("download failed: HTTP %d: %s", downloadResp.StatusCode, strings.TrimSpace(string(body))) + return "", 0, output.ErrNetwork("download failed: HTTP %d: %s", downloadResp.StatusCode, strings.TrimSpace(string(body))) } - return 0, output.ErrNetwork("download failed: HTTP %d", downloadResp.StatusCode) + return "", 0, output.ErrNetwork("download failed: HTTP %d", downloadResp.StatusCode) } if err := os.MkdirAll(filepath.Dir(safePath), 0700); err != nil { - return 0, output.Errorf(output.ExitInternal, "api_error", "cannot create parent directory: %s", err) + return "", 0, output.Errorf(output.ExitInternal, "api_error", "cannot create parent directory: %s", err) + } + + // Auto-detect extension from Content-Type if missing + finalPath := safePath + if filepath.Ext(safePath) == "" { + contentType := downloadResp.Header.Get("Content-Type") + mimeType := strings.Split(contentType, ";")[0] + mimeType = strings.TrimSpace(mimeType) + if ext, ok := imMimeToExt[mimeType]; ok { + finalPath = safePath + ext + } } - sizeBytes, err := validate.AtomicWriteFromReader(safePath, downloadResp.Body, 0600) + + sizeBytes, err := validate.AtomicWriteFromReader(finalPath, downloadResp.Body, 0600) if err != nil { - return 0, output.Errorf(output.ExitInternal, "api_error", "cannot create file: %s", err) + return "", 0, output.Errorf(output.ExitInternal, "api_error", "cannot create file: %s", err) } - return sizeBytes, nil + return finalPath, sizeBytes, nil } diff --git a/shortcuts/im/im_messages_search.go b/shortcuts/im/im_messages_search.go index 3b0f7dfc..d122cc23 100644 --- a/shortcuts/im/im_messages_search.go +++ b/shortcuts/im/im_messages_search.go @@ -9,6 +9,7 @@ import ( "io" "net/http" "strconv" + "strings" "github.com/larksuite/cli/internal/output" "github.com/larksuite/cli/shortcuts/common" @@ -16,6 +17,15 @@ import ( larkcore "github.com/larksuite/oapi-sdk-go/v3/core" ) +const ( + messagesSearchDefaultPageSize = 20 + messagesSearchMaxPageSize = 50 + messagesSearchDefaultPageLimit = 20 + messagesSearchMaxPageLimit = 40 + messagesSearchMGetBatchSize = 50 + messagesSearchChatBatchSize = 50 +) + var ImMessagesSearch = common.Shortcut{ Service: "im", Command: "+messages-search", @@ -37,6 +47,8 @@ var ImMessagesSearch = common.Shortcut{ {Name: "end", Desc: "end time(ISO 8601) with local timezone offset (e.g. 2026-03-25T23:59:59+08:00)"}, {Name: "page-size", Default: "20", Desc: "page size (1-50)"}, {Name: "page-token", Desc: "page token"}, + {Name: "page-all", Type: "bool", Desc: "automatically paginate search results"}, + {Name: "page-limit", Type: "int", Default: "20", Desc: "max search pages when auto-pagination is enabled (default 20, max 40)"}, }, DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { req, err := buildMessagesSearchRequest(runtime) @@ -49,8 +61,14 @@ var ImMessagesSearch = common.Shortcut{ dryParams[k] = vs[0] } } - return common.NewDryRunAPI(). - Desc("Step 1: search messages"). + autoPaginate, pageLimit := messagesSearchPaginationConfig(runtime) + d := common.NewDryRunAPI() + if autoPaginate { + d = d.Desc(fmt.Sprintf("Step 1: search messages (auto-paginates up to %d page(s))", pageLimit)) + } else { + d = d.Desc("Step 1: search messages") + } + return d. POST("/open-apis/im/v1/messages/search"). Params(dryParams). Body(req.body). @@ -67,12 +85,10 @@ var ImMessagesSearch = common.Shortcut{ return err } - searchData, err := runtime.DoAPIJSON(http.MethodPost, "/open-apis/im/v1/messages/search", req.params, req.body) + rawItems, hasMore, nextPageToken, truncatedByLimit, pageLimit, err := searchMessages(runtime, req) if err != nil { return err } - rawItems, _ := searchData["items"].([]interface{}) - hasMore, nextPageToken := common.PaginationMeta(searchData) if len(rawItems) == 0 { outData := map[string]interface{}{ @@ -99,8 +115,7 @@ var ImMessagesSearch = common.Shortcut{ } // ── Step 2: Batch fetch message details (mget) ── - mgetURL := buildMGetURL(messageIds) - mgetData, err := runtime.DoAPIJSON(http.MethodGet, mgetURL, nil, nil) + msgItems, err := batchMGetMessages(runtime, messageIds) if err != nil { // Fallback when mget fails: return ID list only outData := map[string]interface{}{ @@ -118,37 +133,22 @@ var ImMessagesSearch = common.Shortcut{ }) return nil } - msgItems, _ := mgetData["items"].([]interface{}) // ── Step 3: Batch fetch chat info ── - chatIdSet := map[string]bool{} + chatIds := make([]string, 0, len(msgItems)) + chatSeen := make(map[string]bool) for _, item := range msgItems { m, _ := item.(map[string]interface{}) if chatId, _ := m["chat_id"].(string); chatId != "" { - chatIdSet[chatId] = true + if !chatSeen[chatId] { + chatSeen[chatId] = true + chatIds = append(chatIds, chatId) + } } } chatContexts := map[string]map[string]interface{}{} - if len(chatIdSet) > 0 { - chatIds := make([]string, 0, len(chatIdSet)) - for id := range chatIdSet { - chatIds = append(chatIds, id) - } - chatRes, chatErr := runtime.DoAPIJSON( - http.MethodPost, "/open-apis/im/v1/chats/batch_query", - larkcore.QueryParams{"user_id_type": []string{"open_id"}}, - map[string]interface{}{"chat_ids": chatIds}, - ) - if chatErr == nil { - if chatItems, ok := chatRes["items"].([]interface{}); ok { - for _, ci := range chatItems { - cm, _ := ci.(map[string]interface{}) - if cid, _ := cm["chat_id"].(string); cid != "" { - chatContexts[cid] = cm - } - } - } - } + if len(chatIds) > 0 { + chatContexts = batchQueryChatContexts(runtime, chatIds) } // ── Step 4: Format message content + attach chat context ── @@ -225,6 +225,9 @@ var ImMessagesSearch = common.Shortcut{ moreHint = " (more available, use --page-token to fetch next page)" } fmt.Fprintf(w, "\n%d search result(s)%s\n", len(enriched), moreHint) + if truncatedByLimit { + fmt.Fprintf(w, "warning: stopped after fetching %d page(s); use --page-limit, --page-all, or --page-token to continue\n", pageLimit) + } }) return nil }, @@ -247,6 +250,14 @@ func buildMessagesSearchRequest(runtime *common.RuntimeContext) (*messagesSearch endFlag := runtime.Str("end") pageSizeStr := runtime.Str("page-size") pageToken := runtime.Str("page-token") + pageLimitStr := strings.TrimSpace(runtime.Str("page-limit")) + + if runtime.Cmd != nil && runtime.Cmd.Flags().Changed("page-limit") { + pageLimit, err := strconv.Atoi(pageLimitStr) + if err != nil || pageLimit < 1 || pageLimit > messagesSearchMaxPageLimit { + return nil, output.ErrValidation("--page-limit must be an integer between 1 and 40") + } + } filter := map[string]interface{}{} timeRange := map[string]interface{}{} @@ -322,14 +333,14 @@ func buildMessagesSearchRequest(runtime *common.RuntimeContext) (*messagesSearch body["filter"] = filter } - pageSize := 20 + pageSize := messagesSearchDefaultPageSize if pageSizeStr != "" { n, err := strconv.Atoi(pageSizeStr) if err != nil || n < 1 { return nil, output.ErrValidation("--page-size must be an integer between 1 and 50") } - if n > 50 { - n = 50 + if n > messagesSearchMaxPageSize { + n = messagesSearchMaxPageSize } pageSize = n } @@ -346,3 +357,124 @@ func buildMessagesSearchRequest(runtime *common.RuntimeContext) (*messagesSearch body: body, }, nil } + +func messagesSearchPaginationConfig(runtime *common.RuntimeContext) (autoPaginate bool, pageLimit int) { + autoPaginate = runtime.Bool("page-all") + if runtime.Cmd != nil && runtime.Cmd.Flags().Changed("page-limit") { + autoPaginate = true + } + + pageLimit = messagesSearchDefaultPageLimit + if runtime.Cmd != nil && runtime.Cmd.Flags().Changed("page-limit") { + if n, err := strconv.Atoi(strings.TrimSpace(runtime.Str("page-limit"))); err == nil && n > 0 { + pageLimit = min(n, messagesSearchMaxPageLimit) + } + } else if runtime.Bool("page-all") { + pageLimit = messagesSearchMaxPageLimit + } + return autoPaginate, pageLimit +} + +func searchMessages(runtime *common.RuntimeContext, req *messagesSearchRequest) ([]interface{}, bool, string, bool, int, error) { + autoPaginate, pageLimit := messagesSearchPaginationConfig(runtime) + pageToken := "" + if tokens := req.params["page_token"]; len(tokens) > 0 { + pageToken = tokens[0] + } + + pageSize := strconv.Itoa(messagesSearchDefaultPageSize) + if sizes := req.params["page_size"]; len(sizes) > 0 { + pageSize = sizes[0] + } + + var ( + allItems []interface{} + lastHasMore bool + lastPageToken string + truncatedByLimit bool + pageCount int + ) + + for { + pageCount++ + params := larkcore.QueryParams{ + "page_size": []string{pageSize}, + } + if pageToken != "" { + params["page_token"] = []string{pageToken} + } + + searchData, err := runtime.DoAPIJSON(http.MethodPost, "/open-apis/im/v1/messages/search", params, req.body) + if err != nil { + return nil, false, "", false, pageLimit, err + } + + items, _ := searchData["items"].([]interface{}) + allItems = append(allItems, items...) + lastHasMore, lastPageToken = common.PaginationMeta(searchData) + + if !autoPaginate || !lastHasMore || lastPageToken == "" { + break + } + if pageCount >= pageLimit { + truncatedByLimit = true + break + } + + pageToken = lastPageToken + } + + return allItems, lastHasMore, lastPageToken, truncatedByLimit, pageLimit, nil +} + +func batchMGetMessages(runtime *common.RuntimeContext, messageIds []string) ([]interface{}, error) { + var items []interface{} + for _, batch := range chunkStrings(messageIds, messagesSearchMGetBatchSize) { + mgetData, err := runtime.DoAPIJSON(http.MethodGet, buildMGetURL(batch), nil, nil) + if err != nil { + return nil, err + } + batchItems, _ := mgetData["items"].([]interface{}) + items = append(items, batchItems...) + } + return items, nil +} + +func batchQueryChatContexts(runtime *common.RuntimeContext, chatIds []string) map[string]map[string]interface{} { + chatContexts := map[string]map[string]interface{}{} + for _, batch := range chunkStrings(chatIds, messagesSearchChatBatchSize) { + chatRes, chatErr := runtime.DoAPIJSON( + http.MethodPost, "/open-apis/im/v1/chats/batch_query", + larkcore.QueryParams{"user_id_type": []string{"open_id"}}, + map[string]interface{}{"chat_ids": batch}, + ) + if chatErr != nil { + continue + } + if chatItems, ok := chatRes["items"].([]interface{}); ok { + for _, ci := range chatItems { + cm, _ := ci.(map[string]interface{}) + if cid, _ := cm["chat_id"].(string); cid != "" { + chatContexts[cid] = cm + } + } + } + } + return chatContexts +} + +func chunkStrings(items []string, chunkSize int) [][]string { + if len(items) == 0 || chunkSize <= 0 { + return nil + } + + chunks := make([][]string, 0, (len(items)+chunkSize-1)/chunkSize) + for start := 0; start < len(items); start += chunkSize { + end := start + chunkSize + if end > len(items) { + end = len(items) + } + chunks = append(chunks, items[start:end]) + } + return chunks +} diff --git a/shortcuts/im/im_messages_search_execute_test.go b/shortcuts/im/im_messages_search_execute_test.go new file mode 100644 index 00000000..6cccd184 --- /dev/null +++ b/shortcuts/im/im_messages_search_execute_test.go @@ -0,0 +1,285 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package im + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "reflect" + "strings" + "testing" + + "github.com/larksuite/cli/shortcuts/common" + "github.com/spf13/cobra" +) + +func newMessagesSearchRuntime(t *testing.T, stringFlags map[string]string, boolFlags map[string]bool, rt http.RoundTripper) *common.RuntimeContext { + t.Helper() + + runtime := newBotShortcutRuntime(t, rt) + cmd := &cobra.Command{Use: "test"} + + stringFlagNames := []string{ + "query", + "page-size", + "page-token", + "page-limit", + } + for _, name := range stringFlagNames { + cmd.Flags().String(name, "", "") + } + boolFlagNames := []string{"page-all"} + for _, name := range boolFlagNames { + cmd.Flags().Bool(name, false, "") + } + if err := cmd.ParseFlags(nil); err != nil { + t.Fatalf("ParseFlags() error = %v", err) + } + for name, value := range stringFlags { + if err := cmd.Flags().Set(name, value); err != nil { + t.Fatalf("Flags().Set(%q) error = %v", name, err) + } + } + for name, value := range boolFlags { + if err := cmd.Flags().Set(name, map[bool]string{true: "true", false: "false"}[value]); err != nil { + t.Fatalf("Flags().Set(%q) error = %v", name, err) + } + } + runtime.Cmd = cmd + runtime.Format = "pretty" + return runtime +} + +func TestImMessagesSearchExecuteAutoPaginationBatches(t *testing.T) { + var ( + searchPageTokens []string + mgetBatchSizes []int + chatBatchSizes []int + ) + + runtime := newMessagesSearchRuntime(t, map[string]string{ + "query": "incident", + "page-limit": "2", + }, map[string]bool{ + "page-all": true, + }, shortcutRoundTripFunc(func(req *http.Request) (*http.Response, error) { + switch { + case strings.Contains(req.URL.Path, "tenant_access_token"): + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "tenant_access_token": "tenant-token", + "expire": 7200, + }), nil + case strings.Contains(req.URL.Path, "/open-apis/im/v1/messages/search"): + pageToken := req.URL.Query().Get("page_token") + searchPageTokens = append(searchPageTokens, pageToken) + switch pageToken { + case "": + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildSearchResultItems(1, 50), + "has_more": true, + "page_token": "tok_p2", + }, + }), nil + case "tok_p2": + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildSearchResultItems(51, 55), + "has_more": true, + "page_token": "tok_p3", + }, + }), nil + default: + return nil, fmt.Errorf("unexpected search page_token: %q", pageToken) + } + case strings.Contains(req.URL.Path, "/open-apis/im/v1/messages/mget"): + ids := req.URL.Query()["message_ids"] + mgetBatchSizes = append(mgetBatchSizes, len(ids)) + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildMessageDetails(ids), + }, + }), nil + case strings.Contains(req.URL.Path, "/open-apis/im/v1/chats/batch_query"): + var body struct { + ChatIDs []string `json:"chat_ids"` + } + rawBody, err := io.ReadAll(req.Body) + if err != nil { + t.Fatalf("ReadAll() error = %v", err) + } + if err := json.Unmarshal(rawBody, &body); err != nil { + t.Fatalf("json.Unmarshal() error = %v", err) + } + chatBatchSizes = append(chatBatchSizes, len(body.ChatIDs)) + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildChatContexts(body.ChatIDs), + }, + }), nil + default: + return nil, fmt.Errorf("unexpected request: %s", req.URL.String()) + } + })) + + if err := ImMessagesSearch.Execute(context.Background(), runtime); err != nil { + t.Fatalf("ImMessagesSearch.Execute() error = %v", err) + } + + if !reflect.DeepEqual(searchPageTokens, []string{"", "tok_p2"}) { + t.Fatalf("search page tokens = %#v, want %#v", searchPageTokens, []string{"", "tok_p2"}) + } + if !reflect.DeepEqual(mgetBatchSizes, []int{50, 5}) { + t.Fatalf("mget batch sizes = %#v, want %#v", mgetBatchSizes, []int{50, 5}) + } + if !reflect.DeepEqual(chatBatchSizes, []int{50, 5}) { + t.Fatalf("chat batch sizes = %#v, want %#v", chatBatchSizes, []int{50, 5}) + } + + outBuf, _ := runtime.Factory.IOStreams.Out.(*bytes.Buffer) + if outBuf == nil { + t.Fatal("stdout buffer missing") + } + output := outBuf.String() + if !strings.Contains(output, "55 search result(s)") { + t.Fatalf("stdout = %q, want search results summary", output) + } + if !strings.Contains(output, "warning: stopped after fetching 2 page(s)") { + t.Fatalf("stdout = %q, want page limit warning", output) + } +} + +func TestImMessagesSearchExecuteExplicitPageLimitWithoutPageAll(t *testing.T) { + var searchCalls int + + runtime := newMessagesSearchRuntime(t, map[string]string{ + "query": "incident", + "page-limit": "2", + }, nil, shortcutRoundTripFunc(func(req *http.Request) (*http.Response, error) { + switch { + case strings.Contains(req.URL.Path, "tenant_access_token"): + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "tenant_access_token": "tenant-token", + "expire": 7200, + }), nil + case strings.Contains(req.URL.Path, "/open-apis/im/v1/messages/search"): + searchCalls++ + pageToken := req.URL.Query().Get("page_token") + if searchCalls == 1 { + if pageToken != "" { + return nil, fmt.Errorf("unexpected first page token: %q", pageToken) + } + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildSearchResultItems(1, 1), + "has_more": true, + "page_token": "tok_p2", + }, + }), nil + } + if pageToken != "tok_p2" { + return nil, fmt.Errorf("unexpected second page token: %q", pageToken) + } + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildSearchResultItems(2, 2), + "has_more": false, + "page_token": "", + }, + }), nil + case strings.Contains(req.URL.Path, "/open-apis/im/v1/messages/mget"): + ids := req.URL.Query()["message_ids"] + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildMessageDetails(ids), + }, + }), nil + case strings.Contains(req.URL.Path, "/open-apis/im/v1/chats/batch_query"): + var body struct { + ChatIDs []string `json:"chat_ids"` + } + rawBody, err := io.ReadAll(req.Body) + if err != nil { + t.Fatalf("ReadAll() error = %v", err) + } + if err := json.Unmarshal(rawBody, &body); err != nil { + t.Fatalf("json.Unmarshal() error = %v", err) + } + return shortcutJSONResponse(200, map[string]interface{}{ + "code": 0, + "data": map[string]interface{}{ + "items": buildChatContexts(body.ChatIDs), + }, + }), nil + default: + return nil, fmt.Errorf("unexpected request: %s", req.URL.String()) + } + })) + + if err := ImMessagesSearch.Execute(context.Background(), runtime); err != nil { + t.Fatalf("ImMessagesSearch.Execute() error = %v", err) + } + if searchCalls != 2 { + t.Fatalf("searchCalls = %d, want 2", searchCalls) + } +} + +func buildSearchResultItems(start, end int) []interface{} { + items := make([]interface{}, 0, end-start+1) + for i := start; i <= end; i++ { + items = append(items, map[string]interface{}{ + "meta_data": map[string]interface{}{ + "message_id": fmt.Sprintf("om_%03d", i), + }, + }) + } + return items +} + +func buildMessageDetails(ids []string) []interface{} { + items := make([]interface{}, 0, len(ids)) + for _, id := range ids { + suffix := strings.TrimPrefix(id, "om_") + items = append(items, map[string]interface{}{ + "message_id": id, + "msg_type": "text", + "create_time": "1710000000", + "chat_id": "oc_" + suffix, + "sender": map[string]interface{}{ + "id": "cli_bot", + "name": "Bot", + "sender_type": "bot", + }, + "body": map[string]interface{}{ + "content": fmt.Sprintf(`{"text":"message %s"}`, suffix), + }, + }) + } + return items +} + +func buildChatContexts(chatIDs []string) []interface{} { + items := make([]interface{}, 0, len(chatIDs)) + for _, chatID := range chatIDs { + items = append(items, map[string]interface{}{ + "chat_id": chatID, + "chat_mode": "group", + "name": "Chat " + strings.TrimPrefix(chatID, "oc_"), + }) + } + return items +} diff --git a/shortcuts/im/im_messages_send.go b/shortcuts/im/im_messages_send.go index 98609316..f3364c54 100644 --- a/shortcuts/im/im_messages_send.go +++ b/shortcuts/im/im_messages_send.go @@ -18,10 +18,12 @@ import ( var ImMessagesSend = common.Shortcut{ Service: "im", Command: "+messages-send", - Description: "Send a message to a chat or direct message with bot identity; bot-only; sends to chat-id or user-id with text/markdown/post/media, supports idempotency key", + Description: "Send a message to a chat or direct message; user/bot; sends to chat-id or user-id with text/markdown/post/media, supports idempotency key", Risk: "write", Scopes: []string{"im:message:send_as_bot"}, - AuthTypes: []string{"bot"}, + UserScopes: []string{"im:message.send_as_user"}, + BotScopes: []string{"im:message:send_as_bot"}, + AuthTypes: []string{"bot", "user"}, Flags: []common.Flag{ {Name: "chat-id", Desc: "(required, mutually exclusive with --user-id) chat ID (oc_xxx)"}, {Name: "user-id", Desc: "(required, mutually exclusive with --chat-id) user open_id (ou_xxx)"}, @@ -188,7 +190,7 @@ var ImMessagesSend = common.Shortcut{ return output.ErrValidation("%v", err) } } - + // Resolve content type if markdown != "" { msgType, content = "post", resolveMarkdownAsPost(ctx, runtime, markdown) } else if mt, c, err := resolveMediaContent(ctx, runtime, text, imageVal, fileVal, videoVal, videoCoverVal, audioVal); err != nil { diff --git a/shortcuts/mail/helpers.go b/shortcuts/mail/helpers.go index 4c8c3fcf..193f4f46 100644 --- a/shortcuts/mail/helpers.go +++ b/shortcuts/mail/helpers.go @@ -18,6 +18,7 @@ import ( "strconv" "strings" + "github.com/larksuite/cli/internal/auth" "github.com/larksuite/cli/internal/output" "github.com/larksuite/cli/internal/validate" "github.com/larksuite/cli/shortcuts/common" @@ -217,24 +218,24 @@ func mailboxPath(mailboxID string, segments ...string) string { } // fetchMailboxPrimaryEmail retrieves mailbox primary_email_address from -// user_mailboxes.profile. Returns empty string on failure (non-fatal). -func fetchMailboxPrimaryEmail(runtime *common.RuntimeContext, mailboxID string) string { +// user_mailboxes.profile. Returns the email address or an error. +func fetchMailboxPrimaryEmail(runtime *common.RuntimeContext, mailboxID string) (string, error) { if mailboxID == "" { mailboxID = "me" } data, err := runtime.CallAPI("GET", mailboxPath(mailboxID, "profile"), nil, nil) if err != nil { - return "" + return "", err } if email := extractPrimaryEmail(data); email != "" { - return email + return email, nil } if nested, ok := data["data"].(map[string]interface{}); ok { if email := extractPrimaryEmail(nested); email != "" { - return email + return email, nil } } - return "" + return "", fmt.Errorf("profile API returned no primary_email_address") } func extractPrimaryEmail(data map[string]interface{}) string { @@ -251,7 +252,8 @@ func extractPrimaryEmail(data map[string]interface{}) string { // fetchCurrentUserEmail retrieves the current mailbox primary email. func fetchCurrentUserEmail(runtime *common.RuntimeContext) string { - return fetchMailboxPrimaryEmail(runtime, "me") + email, _ := fetchMailboxPrimaryEmail(runtime, "me") + return email } // fetchSelfEmailSet returns a set containing the primary email of the given @@ -263,7 +265,7 @@ func fetchSelfEmailSet(runtime *common.RuntimeContext, mailboxID string) map[str mailboxID = "me" } set := make(map[string]bool) - if email := fetchMailboxPrimaryEmail(runtime, mailboxID); email != "" { + if email, _ := fetchMailboxPrimaryEmail(runtime, mailboxID); email != "" { set[strings.ToLower(email)] = true } return set @@ -679,6 +681,9 @@ func addUniqueID(dst *[]string, seen map[string]bool, id string) { } func listMailboxFolders(runtime *common.RuntimeContext, mailboxID string) ([]folderInfo, error) { + if err := validateFolderReadScope(runtime); err != nil { + return nil, err + } data, err := runtime.CallAPI("GET", mailboxPath(mailboxID, "folders"), nil, nil) if err != nil { return nil, output.ErrValidation("unable to resolve --folder: failed to list folders (%v). %s", err, resolveLookupHint("folder", mailboxID)) @@ -700,6 +705,9 @@ func listMailboxFolders(runtime *common.RuntimeContext, mailboxID string) ([]fol } func listMailboxLabels(runtime *common.RuntimeContext, mailboxID string) ([]labelInfo, error) { + if err := validateLabelReadScope(runtime); err != nil { + return nil, err + } data, err := runtime.CallAPI("GET", mailboxPath(mailboxID, "labels"), nil, nil) if err != nil { return nil, output.ErrValidation("unable to resolve --label: failed to list labels (%v). %s", err, resolveLookupHint("label", mailboxID)) @@ -1854,6 +1862,79 @@ func checkAttachmentSizeLimit(filePaths []string, extraBytes int64, extraCount . return nil } +// validateConfirmSendScope checks that the user's token includes the +// mail:user_mailbox.message:send scope when --confirm-send is set. +// This scope is not declared in the shortcut's static Scopes (to keep the +// default draft-only path accessible without the sensitive send permission), +// so we validate it dynamically here. +func validateConfirmSendScope(runtime *common.RuntimeContext) error { + if !runtime.Bool("confirm-send") { + return nil + } + appID := runtime.Config.AppID + userOpenId := runtime.UserOpenId() + if appID == "" || userOpenId == "" { + return nil + } + stored := auth.GetStoredToken(appID, userOpenId) + if stored == nil { + return nil + } + required := []string{"mail:user_mailbox.message:send"} + if missing := auth.MissingScopes(stored.Scope, required); len(missing) > 0 { + return output.ErrWithHint(output.ExitAuth, "missing_scope", + fmt.Sprintf("--confirm-send requires scope: %s", strings.Join(missing, ", ")), + fmt.Sprintf("run `lark-cli auth login --scope \"%s\"` to grant the send permission", strings.Join(missing, " "))) + } + return nil +} + +// validateFolderReadScope checks that the user's token includes the +// mail:user_mailbox.folder:read scope. Called on-demand by listMailboxFolders +// before hitting the folders API. System folders are resolved locally and +// never reach this check. +func validateFolderReadScope(runtime *common.RuntimeContext) error { + appID := runtime.Config.AppID + userOpenId := runtime.UserOpenId() + if appID == "" || userOpenId == "" { + return nil + } + stored := auth.GetStoredToken(appID, userOpenId) + if stored == nil { + return nil + } + required := []string{"mail:user_mailbox.folder:read"} + if missing := auth.MissingScopes(stored.Scope, required); len(missing) > 0 { + return output.ErrWithHint(output.ExitAuth, "missing_scope", + fmt.Sprintf("folder resolution requires scope: %s", strings.Join(missing, ", ")), + fmt.Sprintf("run `lark-cli auth login --scope \"%s\"` to grant folder read permission", strings.Join(missing, " "))) + } + return nil +} + +// validateLabelReadScope checks that the user's token includes the +// mail:user_mailbox.message:modify scope. Called on-demand by listMailboxLabels +// before hitting the labels API. System labels are resolved locally and +// never reach this check. +func validateLabelReadScope(runtime *common.RuntimeContext) error { + appID := runtime.Config.AppID + userOpenId := runtime.UserOpenId() + if appID == "" || userOpenId == "" { + return nil + } + stored := auth.GetStoredToken(appID, userOpenId) + if stored == nil { + return nil + } + required := []string{"mail:user_mailbox.message:modify"} + if missing := auth.MissingScopes(stored.Scope, required); len(missing) > 0 { + return output.ErrWithHint(output.ExitAuth, "missing_scope", + fmt.Sprintf("label resolution requires scope: %s", strings.Join(missing, ", ")), + fmt.Sprintf("run `lark-cli auth login --scope \"%s\"` to grant label access permission", strings.Join(missing, " "))) + } + return nil +} + func validateComposeHasAtLeastOneRecipient(to, cc, bcc string) error { if strings.TrimSpace(to) == "" && strings.TrimSpace(cc) == "" && strings.TrimSpace(bcc) == "" { return fmt.Errorf("at least one recipient (--to, --cc, or --bcc) is required") diff --git a/shortcuts/mail/mail_confirm_send_scope_test.go b/shortcuts/mail/mail_confirm_send_scope_test.go new file mode 100644 index 00000000..e93fb215 --- /dev/null +++ b/shortcuts/mail/mail_confirm_send_scope_test.go @@ -0,0 +1,52 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package mail + +import ( + "errors" + "testing" + + "github.com/larksuite/cli/internal/output" +) + +func TestConfirmSendMissingScopeReply(t *testing.T) { + f, stdout, _, _ := mailShortcutTestFactory(t) + err := runMountedMailShortcut(t, MailReply, []string{ + "+reply", "--message-id", "msg_001", "--body", "hello", "--confirm-send", + }, f, stdout) + assertMissingSendScope(t, err) +} + +func TestConfirmSendMissingScopeReplyAll(t *testing.T) { + f, stdout, _, _ := mailShortcutTestFactory(t) + err := runMountedMailShortcut(t, MailReplyAll, []string{ + "+reply-all", "--message-id", "msg_001", "--body", "hello", "--confirm-send", + }, f, stdout) + assertMissingSendScope(t, err) +} + +func TestConfirmSendMissingScopeForward(t *testing.T) { + f, stdout, _, _ := mailShortcutTestFactory(t) + err := runMountedMailShortcut(t, MailForward, []string{ + "+forward", "--message-id", "msg_001", "--to", "alice@example.com", "--confirm-send", + }, f, stdout) + assertMissingSendScope(t, err) +} + +func assertMissingSendScope(t *testing.T, err error) { + t.Helper() + if err == nil { + t.Fatal("expected error when token lacks send scope with --confirm-send, got nil") + } + var exitErr *output.ExitError + if !errors.As(err, &exitErr) { + t.Fatalf("expected ExitError, got %T: %v", err, err) + } + if exitErr.Code != output.ExitAuth { + t.Errorf("expected exit code %d (ExitAuth), got %d", output.ExitAuth, exitErr.Code) + } + if exitErr.Detail == nil || exitErr.Detail.Type != "missing_scope" { + t.Errorf("expected detail type missing_scope, got %+v", exitErr.Detail) + } +} diff --git a/shortcuts/mail/mail_draft_create.go b/shortcuts/mail/mail_draft_create.go index 8b932795..d6f70690 100644 --- a/shortcuts/mail/mail_draft_create.go +++ b/shortcuts/mail/mail_draft_create.go @@ -43,8 +43,8 @@ var MailDraftCreate = common.Shortcut{ {Name: "cc", Desc: "Optional. Full Cc recipient list. Separate multiple addresses with commas. Display-name format is supported."}, {Name: "bcc", Desc: "Optional. Full Bcc recipient list. Separate multiple addresses with commas. Display-name format is supported."}, {Name: "plain-text", Type: "bool", Desc: "Force plain-text mode, ignoring HTML auto-detection. Cannot be used with --inline."}, - {Name: "attach", Desc: "Optional. Regular attachment file paths. Separate multiple paths with commas. Each path must point to a readable local file."}, - {Name: "inline", Desc: "Optional. Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, + {Name: "attach", Desc: "Optional. Regular attachment file paths (relative path only). Separate multiple paths with commas. Each path must point to a readable local file."}, + {Name: "inline", Desc: "Optional. Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. All file_path values must be relative paths. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, }, DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { input, err := parseDraftCreateInput(runtime) diff --git a/shortcuts/mail/mail_draft_edit.go b/shortcuts/mail/mail_draft_edit.go index b28d69d1..99061b8b 100644 --- a/shortcuts/mail/mail_draft_edit.go +++ b/shortcuts/mail/mail_draft_edit.go @@ -32,7 +32,7 @@ var MailDraftEdit = common.Shortcut{ {Name: "set-to", Desc: "Replace the entire To recipient list with the addresses provided here. Separate multiple addresses with commas. Display-name format is supported."}, {Name: "set-cc", Desc: "Replace the entire Cc recipient list with the addresses provided here. Separate multiple addresses with commas. Display-name format is supported."}, {Name: "set-bcc", Desc: "Replace the entire Bcc recipient list with the addresses provided here. Separate multiple addresses with commas. Display-name format is supported."}, - {Name: "patch-file", Desc: "Edit entry point for body edits, incremental recipient changes, header edits, attachment changes, or inline-image changes. All body edits MUST go through --patch-file. Two body ops: set_body (full replacement including quote) and set_reply_body (replaces only user-authored content, auto-preserves quote block). Run --inspect first to check has_quoted_content, then --print-patch-template for the JSON structure."}, + {Name: "patch-file", Desc: "Edit entry point for body edits, incremental recipient changes, header edits, attachment changes, or inline-image changes. All body edits MUST go through --patch-file. Two body ops: set_body (full replacement including quote) and set_reply_body (replaces only user-authored content, auto-preserves quote block). Run --inspect first to check has_quoted_content, then --print-patch-template for the JSON structure. Relative path only."}, {Name: "print-patch-template", Type: "bool", Desc: "Print the JSON template and supported operations for the --patch-file flag. Recommended first step before generating a patch file. No draft read or write is performed."}, {Name: "inspect", Type: "bool", Desc: "Inspect the draft without modifying it. Returns the draft projection including subject, recipients, body summary, has_quoted_content (whether the draft contains a reply/forward quote block), attachments_summary (with part_id and cid for each attachment), and inline_summary. Run this BEFORE editing body to check has_quoted_content: if true, use set_reply_body in --patch-file to preserve the quote; if false, use set_body."}, }, @@ -307,10 +307,10 @@ func buildDraftEditPatchTemplate() map[string]interface{} { {"op": "set_reply_body", "shape": map[string]interface{}{"value": "string (user-authored content only, WITHOUT the quote block; the quote block is re-appended automatically)"}}, {"op": "set_header", "shape": map[string]interface{}{"name": "string", "value": "string"}}, {"op": "remove_header", "shape": map[string]interface{}{"name": "string"}}, - {"op": "add_attachment", "shape": map[string]interface{}{"path": "string"}}, + {"op": "add_attachment", "shape": map[string]interface{}{"path": "string(relative path)"}}, {"op": "remove_attachment", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}}}, - {"op": "add_inline", "shape": map[string]interface{}{"path": "string", "cid": "string", "filename": "string(optional)", "content_type": "string(optional)"}}, - {"op": "replace_inline", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}, "path": "string", "cid": "string(optional)", "filename": "string(optional)", "content_type": "string(optional)"}}, + {"op": "add_inline", "shape": map[string]interface{}{"path": "string(relative path)", "cid": "string", "filename": "string(optional)", "content_type": "string(optional)"}}, + {"op": "replace_inline", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}, "path": "string(relative path)", "cid": "string(optional)", "filename": "string(optional)", "content_type": "string(optional)"}}, {"op": "remove_inline", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}}}, }, "supported_ops_by_group": []map[string]interface{}{ @@ -340,10 +340,10 @@ func buildDraftEditPatchTemplate() map[string]interface{} { { "group": "attachments_and_inline", "ops": []map[string]interface{}{ - {"op": "add_attachment", "shape": map[string]interface{}{"path": "string"}}, + {"op": "add_attachment", "shape": map[string]interface{}{"path": "string(relative path)"}}, {"op": "remove_attachment", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}}}, - {"op": "add_inline", "shape": map[string]interface{}{"path": "string", "cid": "string", "filename": "string(optional)", "content_type": "string(optional)"}}, - {"op": "replace_inline", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}, "path": "string", "cid": "string(optional)", "filename": "string(optional)", "content_type": "string(optional)"}}, + {"op": "add_inline", "shape": map[string]interface{}{"path": "string(relative path)", "cid": "string", "filename": "string(optional)", "content_type": "string(optional)"}}, + {"op": "replace_inline", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}, "path": "string(relative path)", "cid": "string(optional)", "filename": "string(optional)", "content_type": "string(optional)"}}, {"op": "remove_inline", "shape": map[string]interface{}{"target": map[string]interface{}{"part_id": "string(optional)", "cid": "string(optional)"}}}, }, }, @@ -360,6 +360,7 @@ func buildDraftEditPatchTemplate() map[string]interface{} { }, "notes": []string{ "`ops` is executed in order", + "all file paths (--patch-file and `path` fields in ops) must be relative — no absolute paths or .. traversal", "all body edits MUST go through --patch-file; there is no --set-body flag", "`set_body` replaces the ENTIRE body including any reply/forward quote block; when the draft has both text/plain and text/html, it updates the HTML body and regenerates the plain-text summary, so the input should be HTML", "`set_reply_body` replaces only the user-authored portion of the body and automatically re-appends the trailing reply/forward quote block (generated by +reply or +forward); the value you pass should contain ONLY the new user-authored content WITHOUT the quote block — the quote block will be re-inserted automatically; if the user wants to modify content INSIDE the quote block, use `set_body` instead for full replacement; if the draft has no quote block, it behaves identically to `set_body`", diff --git a/shortcuts/mail/mail_forward.go b/shortcuts/mail/mail_forward.go index 7af87421..905bc8af 100644 --- a/shortcuts/mail/mail_forward.go +++ b/shortcuts/mail/mail_forward.go @@ -20,7 +20,7 @@ var MailForward = common.Shortcut{ Command: "+forward", Description: "Forward a message and save as draft (default). Use --confirm-send to send immediately after user confirmation. Original message block included automatically.", Risk: "write", - Scopes: []string{"mail:user_mailbox.message:send", "mail:user_mailbox.message:modify", "mail:user_mailbox.message:readonly", "mail:user_mailbox:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, + Scopes: []string{"mail:user_mailbox.message:modify", "mail:user_mailbox.message:readonly", "mail:user_mailbox:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, AuthTypes: []string{"user"}, Flags: []common.Flag{ {Name: "message-id", Desc: "Required. Message ID to forward", Required: true}, @@ -30,8 +30,8 @@ var MailForward = common.Shortcut{ {Name: "cc", Desc: "CC email address(es), comma-separated"}, {Name: "bcc", Desc: "BCC email address(es), comma-separated"}, {Name: "plain-text", Type: "bool", Desc: "Force plain-text mode, ignoring all HTML auto-detection. Cannot be used with --inline."}, - {Name: "attach", Desc: "Attachment file path(s), comma-separated (appended after original attachments)"}, - {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, + {Name: "attach", Desc: "Attachment file path(s), comma-separated, appended after original attachments (relative path only)"}, + {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. All file_path values must be relative paths. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, {Name: "confirm-send", Type: "bool", Desc: "Send the forward immediately instead of saving as draft. Only use after the user has explicitly confirmed recipients and content."}, }, DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { @@ -55,6 +55,9 @@ var MailForward = common.Shortcut{ return api }, Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + if err := validateConfirmSendScope(runtime); err != nil { + return err + } if runtime.Bool("confirm-send") { if err := validateComposeHasAtLeastOneRecipient(runtime.Str("to"), runtime.Str("cc"), runtime.Str("bcc")); err != nil { return err diff --git a/shortcuts/mail/mail_reply.go b/shortcuts/mail/mail_reply.go index 465b6b60..b89bc5d6 100644 --- a/shortcuts/mail/mail_reply.go +++ b/shortcuts/mail/mail_reply.go @@ -18,7 +18,7 @@ var MailReply = common.Shortcut{ Command: "+reply", Description: "Reply to a message and save as draft (default). Use --confirm-send to send immediately after user confirmation. Sets Re: subject, In-Reply-To, and References headers automatically.", Risk: "write", - Scopes: []string{"mail:user_mailbox.message:send", "mail:user_mailbox.message:modify", "mail:user_mailbox.message:readonly", "mail:user_mailbox:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, + Scopes: []string{"mail:user_mailbox.message:modify", "mail:user_mailbox.message:readonly", "mail:user_mailbox:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, AuthTypes: []string{"user"}, Flags: []common.Flag{ {Name: "message-id", Desc: "Required. Message ID to reply to", Required: true}, @@ -28,8 +28,8 @@ var MailReply = common.Shortcut{ {Name: "cc", Desc: "Additional CC email address(es), comma-separated"}, {Name: "bcc", Desc: "BCC email address(es), comma-separated"}, {Name: "plain-text", Type: "bool", Desc: "Force plain-text mode, ignoring all HTML auto-detection. Cannot be used with --inline."}, - {Name: "attach", Desc: "Attachment file path(s), comma-separated"}, - {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, + {Name: "attach", Desc: "Attachment file path(s), comma-separated (relative path only)"}, + {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. All file_path values must be relative paths. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, {Name: "confirm-send", Type: "bool", Desc: "Send the reply immediately instead of saving as draft. Only use after the user has explicitly confirmed recipients and content."}, }, DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { @@ -52,6 +52,9 @@ var MailReply = common.Shortcut{ return api }, Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + if err := validateConfirmSendScope(runtime); err != nil { + return err + } return validateComposeInlineAndAttachments(runtime.Str("attach"), runtime.Str("inline"), runtime.Bool("plain-text"), "") }, Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { diff --git a/shortcuts/mail/mail_reply_all.go b/shortcuts/mail/mail_reply_all.go index adfb81ae..6b82365e 100644 --- a/shortcuts/mail/mail_reply_all.go +++ b/shortcuts/mail/mail_reply_all.go @@ -18,7 +18,7 @@ var MailReplyAll = common.Shortcut{ Command: "+reply-all", Description: "Reply to all recipients and save as draft (default). Use --confirm-send to send immediately after user confirmation. Includes all original To and CC automatically.", Risk: "write", - Scopes: []string{"mail:user_mailbox.message:send", "mail:user_mailbox.message:modify", "mail:user_mailbox.message:readonly", "mail:user_mailbox:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, + Scopes: []string{"mail:user_mailbox.message:modify", "mail:user_mailbox.message:readonly", "mail:user_mailbox:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, AuthTypes: []string{"user"}, Flags: []common.Flag{ {Name: "message-id", Desc: "Required. Message ID to reply to all recipients", Required: true}, @@ -29,8 +29,8 @@ var MailReplyAll = common.Shortcut{ {Name: "bcc", Desc: "BCC email address(es), comma-separated"}, {Name: "remove", Desc: "Address(es) to exclude from the outgoing reply, comma-separated"}, {Name: "plain-text", Type: "bool", Desc: "Force plain-text mode, ignoring all HTML auto-detection. Cannot be used with --inline."}, - {Name: "attach", Desc: "Attachment file path(s), comma-separated"}, - {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, + {Name: "attach", Desc: "Attachment file path(s), comma-separated (relative path only)"}, + {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. All file_path values must be relative paths. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, {Name: "confirm-send", Type: "bool", Desc: "Send the reply immediately instead of saving as draft. Only use after the user has explicitly confirmed recipients and content."}, }, DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { @@ -53,6 +53,9 @@ var MailReplyAll = common.Shortcut{ return api }, Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + if err := validateConfirmSendScope(runtime); err != nil { + return err + } return validateComposeInlineAndAttachments(runtime.Str("attach"), runtime.Str("inline"), runtime.Bool("plain-text"), "") }, Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { diff --git a/shortcuts/mail/mail_send.go b/shortcuts/mail/mail_send.go index 71c429df..43b63826 100644 --- a/shortcuts/mail/mail_send.go +++ b/shortcuts/mail/mail_send.go @@ -28,8 +28,8 @@ var MailSend = common.Shortcut{ {Name: "cc", Desc: "CC email address(es), comma-separated"}, {Name: "bcc", Desc: "BCC email address(es), comma-separated"}, {Name: "plain-text", Type: "bool", Desc: "Force plain-text mode, ignoring HTML auto-detection. Cannot be used with --inline."}, - {Name: "attach", Desc: "Attachment file path(s), comma-separated"}, - {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, + {Name: "attach", Desc: "Attachment file path(s), comma-separated (relative path only)"}, + {Name: "inline", Desc: "Inline images as a JSON array. Each entry: {\"cid\":\"\",\"file_path\":\"\"}. All file_path values must be relative paths. Cannot be used with --plain-text. CID images are embedded via in the HTML body. CID is a unique identifier, e.g. a random hex string like \"a1b2c3d4e5f6a7b8c9d0\"."}, {Name: "confirm-send", Type: "bool", Desc: "Send the email immediately instead of saving as draft. Only use after the user has explicitly confirmed recipients and content."}, }, DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { diff --git a/shortcuts/mail/mail_shortcut_test.go b/shortcuts/mail/mail_shortcut_test.go index 2c9d389a..88fa89e0 100644 --- a/shortcuts/mail/mail_shortcut_test.go +++ b/shortcuts/mail/mail_shortcut_test.go @@ -42,7 +42,7 @@ func mailShortcutTestFactory(t *testing.T) (*cmdutil.Factory, *bytes.Buffer, *by RefreshToken: "test-refresh-token", ExpiresAt: time.Now().Add(1 * time.Hour).UnixMilli(), RefreshExpiresAt: time.Now().Add(24 * time.Hour).UnixMilli(), - Scope: "mail:user_mailbox.messages:write mail:user_mailbox.messages:read mail:user_mailbox.message:send mail:user_mailbox.message:modify mail:user_mailbox.message:readonly mail:user_mailbox.message.address:read mail:user_mailbox.message.subject:read mail:user_mailbox.message.body:read mail:user_mailbox:readonly", + Scope: "mail:user_mailbox.messages:write mail:user_mailbox.messages:read mail:user_mailbox.message:modify mail:user_mailbox.message:readonly mail:user_mailbox.message.address:read mail:user_mailbox.message.subject:read mail:user_mailbox.message.body:read mail:user_mailbox:readonly", GrantedAt: time.Now().Add(-1 * time.Hour).UnixMilli(), } if err := auth.SetStoredToken(token); err != nil { diff --git a/shortcuts/mail/mail_watch.go b/shortcuts/mail/mail_watch.go index 21386b7e..c5699427 100644 --- a/shortcuts/mail/mail_watch.go +++ b/shortcuts/mail/mail_watch.go @@ -7,6 +7,7 @@ import ( "bytes" "context" "encoding/json" + "errors" "fmt" "io" "net/http" @@ -16,6 +17,7 @@ import ( "regexp" "sort" "strings" + "sync" "syscall" larkcore "github.com/larksuite/oapi-sdk-go/v3/core" @@ -79,8 +81,8 @@ var MailWatch = common.Shortcut{ Command: "+watch", Description: "Watch for incoming mail events via WebSocket (requires scope mail:event and bot event mail.user_mailbox.event.message_received_v1 added). Run with --print-output-schema to see per-format field reference before parsing output.", Risk: "read", - Scopes: []string{"mail:event", "mail:user_mailbox.message:readonly", "mail:user_mailbox.folder:read", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, - AuthTypes: []string{"user", "bot"}, + Scopes: []string{"mail:event", "mail:user_mailbox.message:readonly", "mail:user_mailbox.message.address:read", "mail:user_mailbox.message.subject:read", "mail:user_mailbox.message.body:read"}, + AuthTypes: []string{"user"}, Flags: []common.Flag{ {Name: "format", Default: "data", Desc: "json: NDJSON stream with ok/data envelope; data: bare NDJSON stream"}, {Name: "msg-format", Default: "metadata", Desc: "message payload mode: metadata(headers + meta, for triage/notification) | minimal(IDs and state only, no headers, for tracking read/folder changes) | plain_text_full(all metadata fields + full plain-text body) | event(raw WebSocket event, no API call, for debug) | full(full message including HTML body and attachments)"}, @@ -138,6 +140,11 @@ var MailWatch = common.Shortcut{ Desc(fmt.Sprintf("Subscribe mailbox events (effective_folder_ids=%s, effective_label_ids=%s)", effectiveFolderDisplay, effectiveLabelDisplay)). Body(map[string]interface{}{"event_type": 1}) + if mailbox == "me" { + d.GET(mailboxPath("me", "profile")). + Desc("Resolve mailbox address for event filtering (requires scope mail:user_mailbox:readonly)") + } + if len(resolvedLabelIDs) > 0 { d.Set("filter_label_ids", strings.Join(resolvedLabelIDs, ",")) } @@ -244,11 +251,24 @@ var MailWatch = common.Shortcut{ } info("Mailbox subscribed.") - // mailboxFilter: only apply event-level filtering when an explicit email address is given - // "me" is a server-side alias and cannot be matched against event.mail_address - mailboxFilter := "" - if mailbox != "me" { - mailboxFilter = mailbox + var unsubOnce sync.Once + var unsubErr error + unsubscribe := func() error { + unsubOnce.Do(func() { + _, unsubErr = runtime.CallAPI("POST", mailboxPath(mailbox, "event", "unsubscribe"), nil, map[string]interface{}{"event_type": 1}) + }) + return unsubErr + } + + // Resolve "me" to the actual email address so we can filter events. + mailboxFilter := mailbox + if mailbox == "me" { + resolved, profileErr := fetchMailboxPrimaryEmail(runtime, "me") + if profileErr != nil { + unsubscribe() //nolint:errcheck // best-effort cleanup; primary error is profileErr + return enhanceProfileError(profileErr) + } + mailboxFilter = resolved } eventCount := 0 @@ -257,10 +277,10 @@ var MailWatch = common.Shortcut{ // Extract event body eventBody := extractMailEventBody(data) - // Filter by --mailbox (only when an explicit email address was provided) + // Filter by --mailbox if mailboxFilter != "" { mailAddr, _ := eventBody["mail_address"].(string) - if mailAddr != mailboxFilter { + if !strings.EqualFold(mailAddr, mailboxFilter) { return } } @@ -414,12 +434,19 @@ var MailWatch = common.Shortcut{ }() <-sigCh info(fmt.Sprintf("\nShutting down... (received %d events)", eventCount)) + info("Unsubscribing mailbox events...") + if unsubErr := unsubscribe(); unsubErr != nil { + fmt.Fprintf(errOut, "Warning: unsubscribe failed: %v\n", unsubErr) + } else { + info("Mailbox unsubscribed.") + } signal.Stop(sigCh) os.Exit(0) }() info("Connected. Waiting for mail events... (Ctrl+C to stop)") if err := cli.Start(ctx); err != nil { + unsubscribe() //nolint:errcheck // best-effort cleanup return output.ErrNetwork("WebSocket connection failed: %v", err) } return nil @@ -692,6 +719,25 @@ func wrapWatchSubscribeError(err error) error { return output.ErrWithHint(output.ExitAPI, "api_error", fmt.Sprintf("subscribe mailbox events failed: %v", err), hint) } +// enhanceProfileError wraps a profile API error with actionable hints. +// Permission errors get a scope-specific hint; other errors (network, 5xx) +// are reported as-is so diagnostics aren't misleading. +func enhanceProfileError(err error) error { + var exitErr *output.ExitError + if errors.As(err, &exitErr) && exitErr.Detail != nil { + errType := exitErr.Detail.Type + lower := strings.ToLower(exitErr.Detail.Message) + if errType == "permission" || errType == "missing_scope" || + strings.Contains(lower, "permission") || strings.Contains(lower, "scope") { + return output.ErrWithHint(output.ExitAuth, "missing_scope", + "unable to resolve mailbox address: "+exitErr.Detail.Message, + "run `lark-cli auth login --scope \"mail:user_mailbox:readonly\"` to grant mailbox profile access") + } + } + // Preserve original error (and its exit code) for non-permission failures. + return err +} + // decodeBodyFieldsForFile returns a shallow copy of outputData with body_html and // body_plain_text decoded from base64url, so that files saved via --output-dir contain // human-readable content instead of raw base64 strings. diff --git a/shortcuts/mail/mail_watch_test.go b/shortcuts/mail/mail_watch_test.go index 9717de3b..02476fbd 100644 --- a/shortcuts/mail/mail_watch_test.go +++ b/shortcuts/mail/mail_watch_test.go @@ -87,8 +87,8 @@ func TestMailWatchDryRunDefaultMetadataFetchesMessage(t *testing.T) { runtime := runtimeForMailWatchTest(t, map[string]string{}) apis := dryRunAPIsForMailWatchTest(t, MailWatch.DryRun(context.Background(), runtime)) - if len(apis) != 2 { - t.Fatalf("expected 2 dry-run apis, got %d", len(apis)) + if len(apis) != 3 { + t.Fatalf("expected 3 dry-run apis, got %d", len(apis)) } if apis[0].Method != "POST" { t.Fatalf("unexpected method: %s", apis[0].Method) @@ -96,10 +96,13 @@ func TestMailWatchDryRunDefaultMetadataFetchesMessage(t *testing.T) { if apis[0].URL != mailboxPath("me", "event", "subscribe") { t.Fatalf("unexpected url: %s", apis[0].URL) } - if apis[1].URL != mailboxPath("me", "messages", "{message_id}") { - t.Fatalf("unexpected fetch url: %s", apis[1].URL) + if apis[1].Method != "GET" || apis[1].URL != mailboxPath("me", "profile") { + t.Fatalf("unexpected profile api: %s %s", apis[1].Method, apis[1].URL) } - if got := apis[1].Params["format"]; got != "metadata" { + if apis[2].URL != mailboxPath("me", "messages", "{message_id}") { + t.Fatalf("unexpected fetch url: %s", apis[2].URL) + } + if got := apis[2].Params["format"]; got != "metadata" { t.Fatalf("unexpected fetch format: %#v", got) } } @@ -110,16 +113,16 @@ func TestMailWatchDryRunMetadataFormatFetchesMessage(t *testing.T) { }) apis := dryRunAPIsForMailWatchTest(t, MailWatch.DryRun(context.Background(), runtime)) - if len(apis) != 2 { - t.Fatalf("expected 2 dry-run apis, got %d", len(apis)) + if len(apis) != 3 { + t.Fatalf("expected 3 dry-run apis, got %d", len(apis)) } - if apis[1].Method != "GET" { - t.Fatalf("unexpected fetch method: %s", apis[1].Method) + if apis[2].Method != "GET" { + t.Fatalf("unexpected fetch method: %s", apis[2].Method) } - if apis[1].URL != mailboxPath("me", "messages", "{message_id}") { - t.Fatalf("unexpected fetch url: %s", apis[1].URL) + if apis[2].URL != mailboxPath("me", "messages", "{message_id}") { + t.Fatalf("unexpected fetch url: %s", apis[2].URL) } - if got := apis[1].Params["format"]; got != "metadata" { + if got := apis[2].Params["format"]; got != "metadata" { t.Fatalf("unexpected fetch format: %#v", got) } } @@ -130,10 +133,10 @@ func TestMailWatchDryRunMinimalFormatFetchesMessage(t *testing.T) { }) apis := dryRunAPIsForMailWatchTest(t, MailWatch.DryRun(context.Background(), runtime)) - if len(apis) != 2 { - t.Fatalf("expected 2 dry-run apis, got %d", len(apis)) + if len(apis) != 3 { + t.Fatalf("expected 3 dry-run apis, got %d", len(apis)) } - if got := apis[1].Params["format"]; got != "metadata" { + if got := apis[2].Params["format"]; got != "metadata" { t.Fatalf("unexpected fetch format: %#v", got) } } @@ -173,10 +176,10 @@ func TestMailWatchDryRunPlainTextFullFormatFetchesMessage(t *testing.T) { }) apis := dryRunAPIsForMailWatchTest(t, MailWatch.DryRun(context.Background(), runtime)) - if len(apis) != 2 { - t.Fatalf("expected 2 dry-run apis, got %d", len(apis)) + if len(apis) != 3 { + t.Fatalf("expected 3 dry-run apis, got %d", len(apis)) } - if got := apis[1].Params["format"]; got != "plain_text_full" { + if got := apis[2].Params["format"]; got != "plain_text_full" { t.Fatalf("unexpected fetch format: %#v", got) } } @@ -187,10 +190,10 @@ func TestMailWatchDryRunFullFormatUsesFull(t *testing.T) { }) apis := dryRunAPIsForMailWatchTest(t, MailWatch.DryRun(context.Background(), runtime)) - if len(apis) != 2 { - t.Fatalf("expected 2 dry-run apis, got %d", len(apis)) + if len(apis) != 3 { + t.Fatalf("expected 3 dry-run apis, got %d", len(apis)) } - if got := apis[1].Params["format"]; got != "full" { + if got := apis[2].Params["format"]; got != "full" { t.Fatalf("unexpected fetch format: %#v", got) } } @@ -202,13 +205,13 @@ func TestMailWatchDryRunEventFormatWithLabelFilterFetchesMessage(t *testing.T) { }) apis := dryRunAPIsForMailWatchTest(t, MailWatch.DryRun(context.Background(), runtime)) - if len(apis) != 2 { - t.Fatalf("expected 2 dry-run apis, got %d", len(apis)) + if len(apis) != 3 { + t.Fatalf("expected 3 dry-run apis, got %d", len(apis)) } - if apis[1].URL != mailboxPath("me", "messages", "{message_id}") { - t.Fatalf("unexpected fetch url: %s", apis[1].URL) + if apis[2].URL != mailboxPath("me", "messages", "{message_id}") { + t.Fatalf("unexpected fetch url: %s", apis[2].URL) } - if got := apis[1].Params["format"]; got != "metadata" { + if got := apis[2].Params["format"]; got != "metadata" { t.Fatalf("unexpected fetch format: %#v", got) } } diff --git a/shortcuts/minutes/minutes_download.go b/shortcuts/minutes/minutes_download.go new file mode 100644 index 00000000..9a8c5545 --- /dev/null +++ b/shortcuts/minutes/minutes_download.go @@ -0,0 +1,339 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package minutes + +import ( + "context" + "fmt" + "io" + "mime" + "net/http" + "os" + "path/filepath" + "regexp" + "strings" + "time" + + "github.com/larksuite/cli/internal/output" + "github.com/larksuite/cli/internal/validate" + "github.com/larksuite/cli/shortcuts/common" +) + +const ( + // disableClientTimeout removes the global 30s client timeout for large media downloads. + // The download is bounded by the caller's context (e.g. Ctrl+C). A fixed timeout + // would cut off legitimate large file transfers. + disableClientTimeout = 0 + + maxBatchSize = 50 + maxDownloadRedirects = 5 +) + +// validMinuteToken matches minute tokens: lowercase alphanumeric characters only. +var validMinuteToken = regexp.MustCompile(`^[a-z0-9]+$`) + +var MinutesDownload = common.Shortcut{ + Service: "minutes", + Command: "+download", + Description: "Download audio/video media file of a minute", + Risk: "read", + Scopes: []string{"minutes:minutes.media:export"}, + AuthTypes: []string{"user", "bot"}, + HasFormat: true, + Flags: []common.Flag{ + {Name: "minute-tokens", Desc: "minute tokens, comma-separated for batch download (max 50)", Required: true}, + {Name: "output", Desc: "output path: file path for single token, directory for batch (default: current dir)"}, + {Name: "overwrite", Type: "bool", Desc: "overwrite existing output file"}, + {Name: "url-only", Type: "bool", Desc: "only print the download URL(s) without downloading"}, + }, + Validate: func(ctx context.Context, runtime *common.RuntimeContext) error { + tokens := common.SplitCSV(runtime.Str("minute-tokens")) + if len(tokens) == 0 { + return output.ErrValidation("--minute-tokens is required") + } + if len(tokens) > maxBatchSize { + return output.ErrValidation("--minute-tokens: too many tokens (%d), maximum is %d", len(tokens), maxBatchSize) + } + for _, token := range tokens { + if !validMinuteToken.MatchString(token) { + return output.ErrValidation("invalid minute token %q: must contain only lowercase alphanumeric characters (e.g. obcnq3b9jl72l83w4f149w9c)", token) + } + } + return nil + }, + DryRun: func(ctx context.Context, runtime *common.RuntimeContext) *common.DryRunAPI { + tokens := common.SplitCSV(runtime.Str("minute-tokens")) + return common.NewDryRunAPI(). + GET("/open-apis/minutes/v1/minutes/:minute_token/media"). + Set("minute_tokens", tokens) + }, + Execute: func(ctx context.Context, runtime *common.RuntimeContext) error { + tokens := common.SplitCSV(runtime.Str("minute-tokens")) + outputPath := runtime.Str("output") + overwrite := runtime.Bool("overwrite") + urlOnly := runtime.Bool("url-only") + errOut := runtime.IO().ErrOut + single := len(tokens) == 1 + + // Batch mode: --output must be a directory, not an existing file. + if !single && outputPath != "" { + if fi, err := os.Stat(outputPath); err == nil && !fi.IsDir() { + return output.ErrValidation("--output %q is a file; batch mode expects a directory path", outputPath) + } + } + + if !single { + fmt.Fprintf(errOut, "[minutes +download] batch: %d token(s)\n", len(tokens)) + } + + type result struct { + MinuteToken string `json:"minute_token"` + SavedPath string `json:"saved_path,omitempty"` + SizeBytes int64 `json:"size_bytes,omitempty"` + DownloadURL string `json:"download_url,omitempty"` + Error string `json:"error,omitempty"` + } + + results := make([]result, len(tokens)) + seen := make(map[string]int) + usedNames := make(map[string]bool) + + // Clone the factory client for download use. We clone the struct (not the + // pointer) to avoid mutating the shared singleton's Timeout. The original + // transport chain is preserved so security headers and test mocks still work. + // SSRF protection: ValidateDownloadSourceURL (URL-level) + CheckRedirect + // (redirect-level). Transport-level IP check is intentionally omitted because + // download URLs originate from the trusted Lark API, not user input. + baseClient, err := runtime.Factory.HttpClient() + if err != nil { + return output.ErrNetwork("failed to get HTTP client: %s", err) + } + clonedClient := *baseClient + clonedClient.Timeout = disableClientTimeout + clonedClient.CheckRedirect = func(req *http.Request, via []*http.Request) error { + if len(via) >= maxDownloadRedirects { + return fmt.Errorf("too many redirects") + } + if len(via) > 0 { + prev := via[len(via)-1] + if strings.EqualFold(prev.URL.Scheme, "https") && strings.EqualFold(req.URL.Scheme, "http") { + return fmt.Errorf("redirect from https to http is not allowed") + } + } + return validate.ValidateDownloadSourceURL(req.Context(), req.URL.String()) + } + dlClient := &clonedClient + + ticker := time.NewTicker(time.Second / 5) // rate-limit to 5 req/s + defer ticker.Stop() + + for i, token := range tokens { + if i > 0 { + select { + case <-ctx.Done(): + return ctx.Err() + case <-ticker.C: + } + } + + if err := validate.ResourceName(token, "--minute-tokens"); err != nil { + results[i] = result{MinuteToken: token, Error: err.Error()} + continue + } + if firstIdx, dup := seen[token]; dup { + results[i] = result{MinuteToken: token, Error: fmt.Sprintf("duplicate token, same as index %d", firstIdx)} + continue + } + seen[token] = i + + downloadURL, err := fetchDownloadURL(ctx, runtime, token) + if err != nil { + results[i] = result{MinuteToken: token, Error: err.Error()} + continue + } + + if urlOnly { + results[i] = result{MinuteToken: token, DownloadURL: downloadURL} + continue + } + + fmt.Fprintf(errOut, "Downloading media: %s\n", common.MaskToken(token)) + + // single token: --output is a file path; batch: --output is a directory + opts := downloadOpts{overwrite: overwrite, usedNames: usedNames} + if single { + opts.outputPath = outputPath + } else { + opts.outputDir = outputPath + } + + dl, err := downloadMediaFile(ctx, dlClient, downloadURL, token, opts) + if err != nil { + results[i] = result{MinuteToken: token, Error: err.Error()} + continue + } + results[i] = result{MinuteToken: token, SavedPath: dl.savedPath, SizeBytes: dl.sizeBytes} + } + + // output + if single { + r := results[0] + if r.Error != "" { + return output.ErrAPI(0, r.Error, nil) + } + if urlOnly { + runtime.Out(map[string]interface{}{"download_url": r.DownloadURL}, nil) + } else { + runtime.Out(map[string]interface{}{"saved_path": r.SavedPath, "size_bytes": r.SizeBytes}, nil) + } + return nil + } + + // batch output + successCount := 0 + for _, r := range results { + if r.Error == "" { + successCount++ + } + } + fmt.Fprintf(errOut, "[minutes +download] done: %d total, %d succeeded, %d failed\n", len(results), successCount, len(results)-successCount) + + runtime.OutFormat(map[string]interface{}{"downloads": results}, &output.Meta{Count: len(results)}, nil) + if successCount == 0 && len(results) > 0 { + return output.ErrAPI(0, fmt.Sprintf("all %d downloads failed", len(results)), nil) + } + return nil + }, +} + +// fetchDownloadURL retrieves the pre-signed download URL for a minute token. +func fetchDownloadURL(ctx context.Context, runtime *common.RuntimeContext, minuteToken string) (string, error) { + data, err := runtime.DoAPIJSON(http.MethodGet, + fmt.Sprintf("/open-apis/minutes/v1/minutes/%s/media", validate.EncodePathSegment(minuteToken)), + nil, nil) + if err != nil { + return "", err + } + downloadURL := common.GetString(data, "download_url") + if downloadURL == "" { + return "", output.Errorf(output.ExitAPI, "api_error", "API returned empty download_url for %s", minuteToken) + } + return downloadURL, nil +} + +type downloadResult struct { + savedPath string + sizeBytes int64 +} + +type downloadOpts struct { + outputPath string // explicit output file path (single mode only) + outputDir string // output directory (batch mode) + overwrite bool + usedNames map[string]bool // tracks used filenames to deduplicate in batch mode +} + +// downloadMediaFile streams a media file from a pre-signed URL to disk. +// Filename resolution: opts.outputPath > Content-Disposition filename > Content-Type ext > .media. +func downloadMediaFile(ctx context.Context, client *http.Client, downloadURL, minuteToken string, opts downloadOpts) (*downloadResult, error) { + if err := validate.ValidateDownloadSourceURL(ctx, downloadURL); err != nil { + return nil, output.ErrValidation("blocked download URL: %s", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, downloadURL, nil) + if err != nil { + return nil, output.ErrNetwork("invalid download URL: %s", err) + } + + resp, err := client.Do(req) + if err != nil { + return nil, output.ErrNetwork("download failed: %s", err) + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 4096)) + if len(body) > 0 { + return nil, output.ErrNetwork("download failed: HTTP %d: %s", resp.StatusCode, strings.TrimSpace(string(body))) + } + return nil, output.ErrNetwork("download failed: HTTP %d", resp.StatusCode) + } + + // resolve output path + outputPath := opts.outputPath + if outputPath == "" { + filename := resolveFilenameFromResponse(resp, minuteToken) + // Deduplicate filenames in batch mode: prefix with token on collision. + if opts.usedNames != nil { + if opts.usedNames[filename] { + filename = minuteToken + "-" + filename + } + opts.usedNames[filename] = true + } + outputPath = filepath.Join(opts.outputDir, filename) + } + + safePath, err := validate.SafeOutputPath(outputPath) + if err != nil { + return nil, output.ErrValidation("unsafe output path: %s", err) + } + if err := common.EnsureWritableFile(safePath, opts.overwrite); err != nil { + return nil, err + } + if err := os.MkdirAll(filepath.Dir(safePath), 0700); err != nil { + return nil, output.Errorf(output.ExitInternal, "api_error", "cannot create parent directory: %s", err) + } + + sizeBytes, err := validate.AtomicWriteFromReader(safePath, resp.Body, 0600) + if err != nil { + return nil, output.Errorf(output.ExitInternal, "api_error", "cannot create file: %s", err) + } + return &downloadResult{savedPath: safePath, sizeBytes: sizeBytes}, nil +} + +// resolveFilenameFromResponse derives the filename from HTTP response headers. +// Priority: Content-Disposition filename > Content-Type extension > .media. +func resolveFilenameFromResponse(resp *http.Response, minuteToken string) string { + if cd := resp.Header.Get("Content-Disposition"); cd != "" { + if _, params, err := mime.ParseMediaType(cd); err == nil { + if filename := params["filename"]; filename != "" { + return filename + } + } + } + if ext := extFromContentType(resp.Header.Get("Content-Type")); ext != "" { + return minuteToken + ext + } + return minuteToken + ".media" +} + +// preferredExt overrides Go's mime.ExtensionsByType which returns alphabetically sorted +// results (e.g. .m4v before .mp4 for video/mp4). +var preferredExt = map[string]string{ + "video/mp4": ".mp4", + "audio/mp4": ".m4a", + "audio/mpeg": ".mp3", +} + +// newDownloadClient wraps the base HTTP client with SSRF protection +// (redirect safety + transport-level IP validation). When the base transport +// is not *http.Transport (e.g. test mocks), it falls back to cloning +// http.DefaultTransport via NewDownloadHTTPClient. +// extFromContentType returns a file extension for the given Content-Type, or "" if unknown. +func extFromContentType(contentType string) string { + if contentType == "" { + return "" + } + mediaType, _, err := mime.ParseMediaType(contentType) + if err != nil { + return "" + } + if ext, ok := preferredExt[mediaType]; ok { + return ext + } + if exts, err := mime.ExtensionsByType(mediaType); err == nil && len(exts) > 0 { + return exts[0] + } + return "" +} diff --git a/shortcuts/minutes/minutes_download_test.go b/shortcuts/minutes/minutes_download_test.go new file mode 100644 index 00000000..5e6a4738 --- /dev/null +++ b/shortcuts/minutes/minutes_download_test.go @@ -0,0 +1,439 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package minutes + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "os" + "strings" + "sync" + "testing" + + "github.com/spf13/cobra" + + "github.com/larksuite/cli/internal/cmdutil" + "github.com/larksuite/cli/internal/core" + "github.com/larksuite/cli/internal/httpmock" + "github.com/larksuite/cli/shortcuts/common" +) + +// --------------------------------------------------------------------------- +// helpers +// --------------------------------------------------------------------------- + +var warmOnce sync.Once + +func warmTokenCache(t *testing.T) { + t.Helper() + warmOnce.Do(func() { + f, _, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/auth/v3/tenant_access_token/internal", + Body: map[string]interface{}{ + "code": 0, "msg": "ok", + "tenant_access_token": "t-test-token", "expire": 7200, + }, + }) + reg.Register(&httpmock.Stub{ + URL: "/open-apis/test/v1/warm", + Body: map[string]interface{}{"code": 0, "msg": "ok", "data": map[string]interface{}{}}, + }) + s := common.Shortcut{ + Service: "test", + Command: "+warm", + AuthTypes: []string{"bot"}, + Execute: func(_ context.Context, rctx *common.RuntimeContext) error { + _, err := rctx.CallAPI("GET", "/open-apis/test/v1/warm", nil, nil) + return err + }, + } + parent := &cobra.Command{Use: "test"} + s.Mount(parent, f) + parent.SetArgs([]string{"+warm"}) + parent.SilenceErrors = true + parent.SilenceUsage = true + parent.Execute() + }) +} + +func mountAndRun(t *testing.T, s common.Shortcut, args []string, f *cmdutil.Factory, stdout *bytes.Buffer) error { + t.Helper() + warmTokenCache(t) + parent := &cobra.Command{Use: "minutes"} + s.Mount(parent, f) + parent.SetArgs(args) + parent.SilenceErrors = true + parent.SilenceUsage = true + if stdout != nil { + stdout.Reset() + } + return parent.Execute() +} + +func defaultConfig() *core.CliConfig { + return &core.CliConfig{ + AppID: "test-app", AppSecret: "test-secret", Brand: core.BrandFeishu, + UserOpenId: "ou_testuser", + } +} + +func mediaStub(token, downloadURL string) *httpmock.Stub { + return &httpmock.Stub{ + Method: "GET", + URL: "/open-apis/minutes/v1/minutes/" + token + "/media", + Body: map[string]interface{}{ + "code": 0, "msg": "ok", + "data": map[string]interface{}{"download_url": downloadURL}, + }, + } +} + +func downloadStub(url string, body []byte, contentType string) *httpmock.Stub { + return &httpmock.Stub{ + URL: url, + RawBody: body, + Headers: http.Header{"Content-Type": []string{contentType}}, + } +} + +// chdir changes the working directory and restores it when the test finishes. +func chdir(t *testing.T, dir string) { + t.Helper() + orig, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get cwd: %v", err) + } + if err := os.Chdir(dir); err != nil { + t.Fatalf("failed to chdir to %s: %v", dir, err) + } + t.Cleanup(func() { os.Chdir(orig) }) +} + +// --------------------------------------------------------------------------- +// Unit tests: resolveOutputFromResponse +// --------------------------------------------------------------------------- + +func TestResolveFilenameFromResponse_ContentDisposition(t *testing.T) { + resp := &http.Response{ + Header: http.Header{ + "Content-Disposition": []string{`attachment; filename="meeting_recording.mp4"`}, + "Content-Type": []string{"video/mp4"}, + }, + } + got := resolveFilenameFromResponse(resp, "tok001") + if got != "meeting_recording.mp4" { + t.Errorf("expected Content-Disposition filename, got %q", got) + } +} + +func TestResolveFilenameFromResponse_ContentType(t *testing.T) { + resp := &http.Response{ + Header: http.Header{ + "Content-Type": []string{"video/mp4"}, + }, + } + got := resolveFilenameFromResponse(resp, "tok001") + if !strings.HasPrefix(got, "tok001") { + t.Errorf("expected token prefix, got %q", got) + } + if ext := got[len("tok001"):]; ext == "" { + t.Errorf("expected extension after token, got %q", got) + } +} + +func TestResolveFilenameFromResponse_Fallback(t *testing.T) { + resp := &http.Response{Header: http.Header{}} + got := resolveFilenameFromResponse(resp, "tok001") + if got != "tok001.media" { + t.Errorf("expected fallback %q, got %q", "tok001.media", got) + } +} + +func TestResolveFilenameFromResponse_InvalidContentDisposition(t *testing.T) { + resp := &http.Response{ + Header: http.Header{ + "Content-Disposition": []string{"invalid;;;"}, + "Content-Type": []string{"audio/mpeg"}, + }, + } + got := resolveFilenameFromResponse(resp, "tok001") + if !strings.HasPrefix(got, "tok001") { + t.Errorf("expected token prefix from Content-Type fallback, got %q", got) + } +} + +func TestResolveFilenameFromResponse_EmptyDispositionFilename(t *testing.T) { + resp := &http.Response{ + Header: http.Header{ + "Content-Disposition": []string{"attachment"}, + "Content-Type": []string{"video/mp4"}, + }, + } + got := resolveFilenameFromResponse(resp, "tok001") + if got == "" { + t.Error("expected non-empty filename") + } + if !strings.HasPrefix(got, "tok001") { + t.Errorf("expected token prefix, got %q", got) + } +} + +// --------------------------------------------------------------------------- +// Validation tests +// --------------------------------------------------------------------------- + +func TestDownload_Validation_NoFlags(t *testing.T) { + f, _, _, _ := cmdutil.TestFactory(t, defaultConfig()) + err := mountAndRun(t, MinutesDownload, []string{"+download", "--as", "user"}, f, nil) + if err == nil { + t.Fatal("expected validation error for no flags") + } +} + +func TestDownload_Validation_InvalidToken(t *testing.T) { + f, _, _, _ := cmdutil.TestFactory(t, defaultConfig()) + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "obcn***invalid", "--as", "user", + }, f, nil) + if err == nil { + t.Fatal("expected validation error for invalid token") + } + if !strings.Contains(err.Error(), "invalid minute token") { + t.Errorf("expected 'invalid minute token' error, got: %v", err) + } +} + +func TestDownload_Validation_OutputWithBatch(t *testing.T) { + f, _, _, _ := cmdutil.TestFactory(t, defaultConfig()) + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "t1,t2", "--output", "file.mp4", "--as", "user", + }, f, nil) + if err == nil { + t.Fatal("expected validation error for --output with --minute-tokens") + } +} + +// --------------------------------------------------------------------------- +// Integration tests: single mode +// --------------------------------------------------------------------------- + +func TestDownload_DryRun(t *testing.T) { + f, stdout, _, _ := cmdutil.TestFactory(t, defaultConfig()) + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001", "--dry-run", "--as", "user", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + out := stdout.String() + if !strings.Contains(out, "media") { + t.Errorf("dry-run should show media API path, got: %s", out) + } + if !strings.Contains(out, "tok001") { + t.Errorf("dry-run should show minute_token, got: %s", out) + } +} + +func TestDownload_UrlOnly(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/presigned/download")) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001", "--url-only", "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !strings.Contains(stdout.String(), "https://example.com/presigned/download") { + t.Errorf("url-only should output download URL, got: %s", stdout.String()) + } +} + +func TestDownload_FullDownload(t *testing.T) { + chdir(t, t.TempDir()) + + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/presigned/download")) + reg.Register(downloadStub("example.com/presigned/download", []byte("fake-video-content"), "video/mp4")) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001", "--output", "output.mp4", "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + data, err := os.ReadFile("output.mp4") + if err != nil { + t.Fatalf("failed to read output file: %v", err) + } + if string(data) != "fake-video-content" { + t.Errorf("file content = %q, want %q", string(data), "fake-video-content") + } +} + +func TestDownload_OverwriteProtection(t *testing.T) { + chdir(t, t.TempDir()) + if err := os.WriteFile("existing.mp4", []byte("old"), 0644); err != nil { + t.Fatalf("setup failed: %v", err) + } + + f, _, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/presigned/download")) + reg.Register(downloadStub("example.com/presigned/download", []byte("new-content"), "video/mp4")) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001", "--output", "existing.mp4", "--as", "bot", + }, f, nil) + if err == nil { + t.Fatal("expected error for existing file without --overwrite") + } + if !strings.Contains(err.Error(), "exists") { + t.Errorf("error should mention file exists, got: %v", err) + } + + data, _ := os.ReadFile("existing.mp4") + if string(data) != "old" { + t.Errorf("original file should be preserved, got %q", string(data)) + } +} + +func TestDownload_HttpError(t *testing.T) { + chdir(t, t.TempDir()) + + f, _, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/presigned/download")) + reg.Register(&httpmock.Stub{ + URL: "example.com/presigned/download", + Status: 403, + RawBody: []byte("Forbidden"), + }) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001", "--output", "output.mp4", "--as", "bot", + }, f, nil) + if err == nil { + t.Fatal("expected error for HTTP 403") + } + if !strings.Contains(err.Error(), "403") { + t.Errorf("error should contain status code, got: %v", err) + } +} + +// --------------------------------------------------------------------------- +// Integration tests: batch mode +// --------------------------------------------------------------------------- + +func TestDownload_Batch_UrlOnly(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/download/1")) + reg.Register(mediaStub("tok002", "https://example.com/download/2")) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001,tok002", "--url-only", "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + out := stdout.String() + if !strings.Contains(out, "download/1") || !strings.Contains(out, "download/2") { + t.Errorf("batch url-only should show both URLs, got: %s", out) + } +} + +func TestDownload_Batch_Download(t *testing.T) { + chdir(t, t.TempDir()) + + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/download/1")) + reg.Register(mediaStub("tok002", "https://example.com/download/2")) + reg.Register(downloadStub("example.com/download/1", []byte("content-1"), "video/mp4")) + reg.Register(downloadStub("example.com/download/2", []byte("content-2"), "video/mp4")) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001,tok002", "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // verify output structure + var result struct { + Data struct { + Downloads []struct { + MinuteToken string `json:"minute_token"` + SavedPath string `json:"saved_path"` + } `json:"downloads"` + } `json:"data"` + } + if err := json.Unmarshal(stdout.Bytes(), &result); err != nil { + t.Fatalf("failed to parse output: %v\nraw: %s", err, stdout.String()) + } + if len(result.Data.Downloads) != 2 { + t.Fatalf("expected 2 downloads, got %d", len(result.Data.Downloads)) + } +} + +func TestDownload_Batch_PartialFailure(t *testing.T) { + chdir(t, t.TempDir()) + + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + reg.Register(mediaStub("tok001", "https://example.com/download/1")) + reg.Register(downloadStub("example.com/download/1", []byte("content-1"), "video/mp4")) + reg.Register(&httpmock.Stub{ + Method: "GET", + URL: "/open-apis/minutes/v1/minutes/tok002/media", + Status: 200, + Body: map[string]interface{}{ + "code": 99999, "msg": "permission denied", + "data": map[string]interface{}{}, + }, + }) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001,tok002", "--as", "bot", + }, f, stdout) + // partial failure should not cause an overall error + if err != nil { + t.Fatalf("partial failure should not return error, got: %v", err) + } + out := stdout.String() + if !strings.Contains(out, "tok001") || !strings.Contains(out, "tok002") { + t.Errorf("output should contain both tokens, got: %s", out) + } +} + +func TestDownload_Batch_DuplicateToken(t *testing.T) { + f, stdout, _, reg := cmdutil.TestFactory(t, defaultConfig()) + // register media stub only once — dedup means only one API call + reg.Register(mediaStub("tok001", "https://example.com/download/1")) + + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001,tok001", "--url-only", "--as", "bot", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + out := stdout.String() + if !strings.Contains(out, "duplicate") { + t.Errorf("second token should report duplicate, got: %s", out) + } +} + +func TestDownload_Batch_DryRun(t *testing.T) { + f, stdout, _, _ := cmdutil.TestFactory(t, defaultConfig()) + err := mountAndRun(t, MinutesDownload, []string{ + "+download", "--minute-tokens", "tok001,tok002", "--dry-run", "--as", "user", + }, f, stdout) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + out := stdout.String() + if !strings.Contains(out, "tok001") || !strings.Contains(out, "tok002") { + t.Errorf("dry-run should show tokens, got: %s", out) + } +} diff --git a/shortcuts/minutes/shortcuts.go b/shortcuts/minutes/shortcuts.go new file mode 100644 index 00000000..9c1431f2 --- /dev/null +++ b/shortcuts/minutes/shortcuts.go @@ -0,0 +1,13 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package minutes + +import "github.com/larksuite/cli/shortcuts/common" + +// Shortcuts returns all minutes shortcuts. +func Shortcuts() []common.Shortcut { + return []common.Shortcut{ + MinutesDownload, + } +} diff --git a/shortcuts/register.go b/shortcuts/register.go index 30fd506d..3f8048fb 100644 --- a/shortcuts/register.go +++ b/shortcuts/register.go @@ -17,6 +17,7 @@ import ( "github.com/larksuite/cli/shortcuts/event" "github.com/larksuite/cli/shortcuts/im" "github.com/larksuite/cli/shortcuts/mail" + "github.com/larksuite/cli/shortcuts/minutes" "github.com/larksuite/cli/shortcuts/sheets" "github.com/larksuite/cli/shortcuts/task" "github.com/larksuite/cli/shortcuts/vc" @@ -36,6 +37,7 @@ func init() { allShortcuts = append(allShortcuts, base.Shortcuts()...) allShortcuts = append(allShortcuts, event.Shortcuts()...) allShortcuts = append(allShortcuts, mail.Shortcuts()...) + allShortcuts = append(allShortcuts, minutes.Shortcuts()...) allShortcuts = append(allShortcuts, task.Shortcuts()...) allShortcuts = append(allShortcuts, vc.Shortcuts()...) allShortcuts = append(allShortcuts, whiteboard.Shortcuts()...) diff --git a/shortcuts/register_test.go b/shortcuts/register_test.go index 48b72c39..2d169617 100644 --- a/shortcuts/register_test.go +++ b/shortcuts/register_test.go @@ -4,6 +4,10 @@ package shortcuts import ( + "encoding/json" + "os" + "path/filepath" + "strings" "testing" "github.com/larksuite/cli/internal/cmdutil" @@ -88,3 +92,39 @@ func TestRegisterShortcutsReusesExistingServiceCommand(t *testing.T) { t.Fatal("base workspace shortcut not mounted on existing service command") } } + +func TestGenerateShortcutsJSON(t *testing.T) { + output := os.Getenv("SHORTCUTS_OUTPUT") + if output == "" { + t.Skip("set SHORTCUTS_OUTPUT env to generate shortcuts.json") + } + + shortcuts := AllShortcuts() + + type entry struct { + Verb string `json:"verb"` + Description string `json:"description"` + Scopes []string `json:"scopes"` + } + grouped := make(map[string][]entry) + for _, s := range shortcuts { + verb := strings.TrimPrefix(s.Command, "+") + grouped[s.Service] = append(grouped[s.Service], entry{ + Verb: verb, + Description: s.Description, + Scopes: s.ScopesForIdentity("user"), + }) + } + + data, err := json.MarshalIndent(grouped, "", " ") + if err != nil { + t.Fatalf("marshal shortcuts: %v", err) + } + if err := os.MkdirAll(filepath.Dir(output), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(output, data, 0o644); err != nil { + t.Fatalf("write file: %v", err) + } + t.Logf("wrote %d bytes to %s", len(data), output) +} diff --git a/shortcuts/sheets/helpers.go b/shortcuts/sheets/helpers.go index 8604cbee..ba1ca642 100644 --- a/shortcuts/sheets/helpers.go +++ b/shortcuts/sheets/helpers.go @@ -23,6 +23,8 @@ var ( cellRefPattern = regexp.MustCompile(`^([A-Za-z]+)([1-9][0-9]*)$`) ) +var sheetRangeSeparatorReplacer = strings.NewReplacer(`\!`, "!", `\!`, "!", "!", "!") + // getFirstSheetID queries the spreadsheet and returns the first sheet's ID. func getFirstSheetID(runtime *common.RuntimeContext, spreadsheetToken string) (string, error) { data, err := runtime.CallAPI("GET", fmt.Sprintf("/open-apis/sheets/v3/spreadsheets/%s/sheets/query", validate.EncodePathSegment(spreadsheetToken)), nil, nil) @@ -56,7 +58,7 @@ func extractSpreadsheetToken(input string) string { } func normalizeSheetRange(sheetID, input string) string { - input = strings.TrimSpace(input) + input = normalizeSheetRangeSeparators(input) if input == "" || strings.Contains(input, "!") || sheetID == "" { return input } @@ -80,7 +82,7 @@ func normalizePointRange(sheetID, input string) string { func normalizeWriteRange(sheetID, input string, values interface{}) string { rows, cols := matrixDimensions(values) - input = strings.TrimSpace(input) + input = normalizeSheetRangeSeparators(input) if input == "" { return buildRectRange(sheetID, "A1", rows, cols) } @@ -97,7 +99,7 @@ func normalizeWriteRange(sheetID, input string, values interface{}) string { } func validateSheetRangeInput(sheetID, input string) error { - input = strings.TrimSpace(input) + input = normalizeSheetRangeSeparators(input) if input == "" || strings.Contains(input, "!") || sheetID != "" { return nil } @@ -108,7 +110,7 @@ func validateSheetRangeInput(sheetID, input string) error { } func looksLikeRelativeRange(input string) bool { - input = strings.TrimSpace(input) + input = normalizeSheetRangeSeparators(input) if input == "" { return false } @@ -120,13 +122,21 @@ func looksLikeRelativeRange(input string) bool { } func splitSheetRange(input string) (sheetID, subRange string, ok bool) { - parts := strings.SplitN(strings.TrimSpace(input), "!", 2) + parts := strings.SplitN(normalizeSheetRangeSeparators(input), "!", 2) if len(parts) != 2 || parts[0] == "" || parts[1] == "" { return "", "", false } return parts[0], parts[1], true } +func normalizeSheetRangeSeparators(input string) string { + input = strings.TrimSpace(input) + if input == "" { + return input + } + return sheetRangeSeparatorReplacer.Replace(input) +} + func buildRectRange(sheetID, anchor string, rows, cols int) string { if sheetID == "" { return "" diff --git a/shortcuts/sheets/sheet_ranges_test.go b/shortcuts/sheets/sheet_ranges_test.go new file mode 100644 index 00000000..b5eb2b6e --- /dev/null +++ b/shortcuts/sheets/sheet_ranges_test.go @@ -0,0 +1,148 @@ +// Copyright (c) 2026 Lark Technologies Pte. Ltd. +// SPDX-License-Identifier: MIT + +package sheets + +import ( + "context" + "encoding/json" + "strings" + "testing" + + "github.com/larksuite/cli/shortcuts/common" + "github.com/spf13/cobra" +) + +func mustMarshalSheetsDryRun(t *testing.T, v interface{}) string { + t.Helper() + + b, err := json.Marshal(v) + if err != nil { + t.Fatalf("json.Marshal() error = %v", err) + } + return string(b) +} + +func newSheetsTestRuntime(t *testing.T, stringFlags map[string]string, boolFlags map[string]bool) *common.RuntimeContext { + t.Helper() + + cmd := &cobra.Command{Use: "test"} + for name := range stringFlags { + cmd.Flags().String(name, "", "") + } + for name := range boolFlags { + cmd.Flags().Bool(name, false, "") + } + if err := cmd.ParseFlags(nil); err != nil { + t.Fatalf("ParseFlags() error = %v", err) + } + for name, value := range stringFlags { + if err := cmd.Flags().Set(name, value); err != nil { + t.Fatalf("Flags().Set(%q) error = %v", name, err) + } + } + for name, value := range boolFlags { + if err := cmd.Flags().Set(name, map[bool]string{true: "true", false: "false"}[value]); err != nil { + t.Fatalf("Flags().Set(%q) error = %v", name, err) + } + } + return &common.RuntimeContext{Cmd: cmd} +} + +func TestNormalizeSheetRangeSeparators(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input string + want string + }{ + {name: "standard", input: "sheet_123!A1:B2", want: "sheet_123!A1:B2"}, + {name: "escaped ascii", input: `sheet_123\!A1:B2`, want: "sheet_123!A1:B2"}, + {name: "fullwidth", input: "sheet_123!A1:B2", want: "sheet_123!A1:B2"}, + {name: "escaped fullwidth", input: `sheet_123\!A1:B2`, want: "sheet_123!A1:B2"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if got := normalizeSheetRangeSeparators(tt.input); got != tt.want { + t.Fatalf("normalizeSheetRangeSeparators(%q) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestValidateSheetRangeInputAcceptsEscapedSeparator(t *testing.T) { + t.Parallel() + + if err := validateSheetRangeInput("", `sheet_123\!A1:B2`); err != nil { + t.Fatalf("validateSheetRangeInput() error = %v, want nil", err) + } +} + +func TestSheetReadDryRunNormalizesEscapedSeparator(t *testing.T) { + t.Parallel() + + runtime := newSheetsTestRuntime(t, map[string]string{ + "spreadsheet-token": "sht_test", + "range": `sheet_123\!A1`, + "sheet-id": "", + }, nil) + + got := mustMarshalSheetsDryRun(t, SheetRead.DryRun(context.Background(), runtime)) + if !strings.Contains(got, `"range":"sheet_123!A1:A1"`) { + t.Fatalf("SheetRead.DryRun() = %s, want normalized escaped separator", got) + } +} + +func TestSheetWriteDryRunNormalizesEscapedSeparator(t *testing.T) { + t.Parallel() + + runtime := newSheetsTestRuntime(t, map[string]string{ + "spreadsheet-token": "sht_test", + "range": `sheet_123\!A1:B2`, + "values": `[[1,2],[3,4]]`, + }, nil) + + got := mustMarshalSheetsDryRun(t, SheetWrite.DryRun(context.Background(), runtime)) + if !strings.Contains(got, `"range":"sheet_123!A1:B2"`) { + t.Fatalf("SheetWrite.DryRun() = %s, want normalized escaped separator", got) + } +} + +func TestSheetAppendDryRunNormalizesEscapedSeparator(t *testing.T) { + t.Parallel() + + runtime := newSheetsTestRuntime(t, map[string]string{ + "spreadsheet-token": "sht_test", + "range": `sheet_123\!A1:B2`, + "values": `[["foo","bar"]]`, + }, nil) + + got := mustMarshalSheetsDryRun(t, SheetAppend.DryRun(context.Background(), runtime)) + if !strings.Contains(got, `"range":"sheet_123!A1:B2"`) { + t.Fatalf("SheetAppend.DryRun() = %s, want normalized escaped separator", got) + } +} + +func TestSheetFindDryRunNormalizesEscapedSeparator(t *testing.T) { + t.Parallel() + + runtime := newSheetsTestRuntime(t, map[string]string{ + "spreadsheet-token": "sht_test", + "sheet-id": "sheet_123", + "find": "target", + "range": `sheet_123\!A1:B2`, + }, map[string]bool{ + "ignore-case": false, + "match-entire-cell": false, + "search-by-regex": false, + "include-formulas": false, + }) + + got := mustMarshalSheetsDryRun(t, SheetFind.DryRun(context.Background(), runtime)) + if !strings.Contains(got, `"range":"sheet_123!A1:B2"`) { + t.Fatalf("SheetFind.DryRun() = %s, want normalized escaped separator", got) + } +} diff --git a/shortcuts/vc/artifact-Empty Artifacts-tok003/transcript.txt b/shortcuts/vc/artifact-Empty Artifacts-tok003/transcript.txt deleted file mode 100644 index 97dd8118..00000000 --- a/shortcuts/vc/artifact-Empty Artifacts-tok003/transcript.txt +++ /dev/null @@ -1 +0,0 @@ -{"code":0,"data":{},"msg":"ok"} \ No newline at end of file diff --git a/shortcuts/vc/artifact-No Note Meeting-tok002/transcript.txt b/shortcuts/vc/artifact-No Note Meeting-tok002/transcript.txt deleted file mode 100644 index 97dd8118..00000000 --- a/shortcuts/vc/artifact-No Note Meeting-tok002/transcript.txt +++ /dev/null @@ -1 +0,0 @@ -{"code":0,"data":{},"msg":"ok"} \ No newline at end of file diff --git a/shortcuts/vc/artifact-Test Minutes-tok001/transcript.txt b/shortcuts/vc/artifact-Test Minutes-tok001/transcript.txt deleted file mode 100644 index 97dd8118..00000000 --- a/shortcuts/vc/artifact-Test Minutes-tok001/transcript.txt +++ /dev/null @@ -1 +0,0 @@ -{"code":0,"data":{},"msg":"ok"} \ No newline at end of file diff --git a/skills/lark-base/references/formula-field-guide.md b/skills/lark-base/references/formula-field-guide.md index bf673676..6ffe315a 100644 --- a/skills/lark-base/references/formula-field-guide.md +++ b/skills/lark-base/references/formula-field-guide.md @@ -1,4 +1,4 @@ -# Bitable Formula Writing Guide +# Base Formula Writing Guide ## Mandatory Read Acknowledgement @@ -121,7 +121,7 @@ When using comparison operators (`>`, `>=`, `<`, `<=`, `=`, `!=`), **both sides ## Section 4: Operators -Bitable formulas **only allow** the following operators. `like`, `in`, `<>`, `**`, `^` etc. are prohibited. +Base formulas **only allow** the following operators. `like`, `in`, `<>`, `**`, `^` etc. are prohibited. | Category | Operators | Description | | ------------- | -------------------------- | -------------------------------------------------------------------------- | diff --git a/skills/lark-base/references/lark-base-field-create.md b/skills/lark-base/references/lark-base-field-create.md index 75e6b0f9..398b0732 100644 --- a/skills/lark-base/references/lark-base-field-create.md +++ b/skills/lark-base/references/lark-base-field-create.md @@ -24,6 +24,11 @@ lark-cli base +field-create \ --base-token app_xxx \ --table-id tbl_xxx \ --json '{"name":"状态","type":"select","multiple":false,"options":[{"name":"Todo","hue":"Blue","lightness":"Lighter"},{"name":"Done","hue":"Green","lightness":"Light"}]}' + +lark-cli base +field-create \ + --base-token app_xxx \ + --table-id tbl_xxx \ + --json '{"name":"负责人","type":"user","multiple":false,"description":"用于标记记录的直接负责人;协作约定可参考[团队字段约定](https://example.com/field-spec)"}' ``` ## 参数 @@ -33,7 +38,6 @@ lark-cli base +field-create \ | `--base-token ` | 是 | Base Token | | `--table-id ` | 是 | 表 ID 或表名 | | `--json ` | 是 | 字段属性 JSON 对象 | - ## API 入参详情 **HTTP 方法和路径:** @@ -46,6 +50,7 @@ POST /open-apis/base/v3/bases/:base_token/tables/:table_id/fields - `--json` 必须是 **JSON 对象**,顶层直接传字段定义,不要再套一层。 - 顶层最少包含:`name`、`type`。 +- 如需字段说明,直接传 `description`;支持纯文本,也支持 Markdown 链接,如 `协作约定可参考[团队字段约定](https://example.com/field-spec)`。 - `type` 不同,必填子字段不同: - `select`:用 `multiple` + `options`(`options` 里只传 `name/hue/lightness`,不要传 `id`)。 - `link`:必须有 `link_table`,可选 `bidirectional`、`bidirectional_link_field_name`。 @@ -66,6 +71,17 @@ POST /open-apis/base/v3/bases/:base_token/tables/:table_id/fields } ``` +**字段说明示例** + +```json +{ + "name": "负责人", + "type": "user", + "multiple": false, + "description": "用于标记记录的直接负责人;协作约定可参考[团队字段约定](https://example.com/field-spec)" +} +``` + ## 返回重点 - 返回 `field` 和 `created: true`。 @@ -78,7 +94,7 @@ POST /open-apis/base/v3/bases/:base_token/tables/:table_id/fields ## 坑点 - ⚠️ 这是写入操作,执行前必须确认。 -- ⚠️ 当 `--json.type` 是 `formula` 或 `lookup` 时,先读对应 guide,再创建。 +- ⚠️ 当 `type` 是 `formula` 或 `lookup` 时,先读对应 guide,再创建。 ## 参考 diff --git a/skills/lark-base/references/lark-base-field-update.md b/skills/lark-base/references/lark-base-field-update.md index f010738b..b4c3d0f1 100644 --- a/skills/lark-base/references/lark-base-field-update.md +++ b/skills/lark-base/references/lark-base-field-update.md @@ -12,6 +12,12 @@ lark-cli base +field-update \ --table-id tbl_xxx \ --field-id fld_xxx \ --json '{"name":"状态","type":"select","multiple":false,"options":[{"name":"Todo","hue":"Blue","lightness":"Lighter"},{"name":"Doing","hue":"Orange","lightness":"Light"},{"name":"Done","hue":"Green","lightness":"Light"}]}' + +lark-cli base +field-update \ + --base-token app_xxx \ + --table-id tbl_xxx \ + --field-id fld_xxx \ + --json '{"name":"负责人","type":"user","multiple":false,"description":"用于标记记录的直接负责人;协作约定可参考[团队字段约定](https://example.com/field-spec)"}' ``` ## 参数 @@ -22,7 +28,6 @@ lark-cli base +field-update \ | `--table-id ` | 是 | 表 ID 或表名 | | `--field-id ` | 是 | 字段 ID 或字段名 | | `--json ` | 是 | 字段属性 JSON 对象 | - ## API 入参详情 **HTTP 方法和路径:** @@ -35,6 +40,7 @@ PUT /open-apis/base/v3/bases/:base_token/tables/:table_id/fields/:field_id - `--json` 必须是 **JSON 对象**,顶层直接传字段定义。 - 更新语义是 `PUT`(全量字段配置更新),不要只传零散片段;至少显式包含 `name`、`type`,并补齐该类型所需关键配置。 +- 如需字段说明,直接传 `description`;支持纯文本,也支持 Markdown 链接,如 `协作约定可参考[团队字段约定](https://example.com/field-spec)`。 - `select` 更新时:`options` 仍按对象数组传,避免混入无效字段。 - `link` 更新限制: - 不能把非 `link` 字段改成 `link`,也不能把 `link` 改成非 `link`。 @@ -55,6 +61,17 @@ PUT /open-apis/base/v3/bases/:base_token/tables/:table_id/fields/:field_id } ``` +**字段说明示例** + +```json +{ + "name": "负责人", + "type": "user", + "multiple": false, + "description": "用于标记记录的直接负责人;协作约定可参考[团队字段约定](https://example.com/field-spec)" +} +``` + ## 返回重点 - 返回 `field` 和 `updated: true`。 @@ -69,7 +86,7 @@ PUT /open-apis/base/v3/bases/:base_token/tables/:table_id/fields/:field_id - ⚠️ 这是全量字段属性更新语义,不是 patch。 - ⚠️ 这是写入操作,执行前必须确认。 -- ⚠️ 当 `--json.type` 是 `formula` 或 `lookup` 时,先阅读对应指南再执行。 +- ⚠️ 当 `type` 是 `formula` 或 `lookup` 时,先阅读对应指南再执行。 ## 参考 diff --git a/skills/lark-base/references/lark-base-shortcut-field-properties.md b/skills/lark-base/references/lark-base-shortcut-field-properties.md index 8b98ab01..09fbfde5 100644 --- a/skills/lark-base/references/lark-base-shortcut-field-properties.md +++ b/skills/lark-base/references/lark-base-shortcut-field-properties.md @@ -8,15 +8,24 @@ - `--json` 必须是 JSON 对象。 - 顶层统一使用:`type` + `name` + 类型特有字段。 +- 如需字段说明,直接传 `description`;支持纯文本,也支持 Markdown 链接。 - 不要使用旧结构:`field_name`、`property`、`ui_type`、数字枚举 `type`。 - `+field-update` 是 `PUT` 语义,建议先 `+field-get` 再全量提交目标字段配置。 - `type=formula` 或 `type=lookup` 创建时,必须先读对应 guide。 +```json +{ + "type": "text", + "name": "需求背景", + "description": "记录需求背景与已知约束;填写口径可参考[说明模板](https://example.com/spec)" +} +``` + ## 2. 各类型格式与示例 ### 2.1 text -**要求**:`name` 必填;`style.type` 可选,默认 `plain`。 +**要求**:`name` 必填;可选传 `description`;`style.type` 可选,默认 `plain`。 ```json { @@ -36,6 +45,7 @@ "properties": { "type": { "type": "string", "const": "text", "description": "Text field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "style": { "type": "object", "properties": { "type": { "type": "string", "enum": ["plain", "phone", "url", "email", "barcode"], "description": "Text style type" } }, @@ -101,6 +111,7 @@ "properties": { "type": { "type": "string", "const": "number", "description": "Number field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "style": { "anyOf": [ { @@ -197,6 +208,7 @@ "properties": { "type": { "type": "string", "const": "select", "description": "Select field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "multiple": { "type": "boolean", "default": false, "description": "Allow multiple" }, "options": { "type": "array", @@ -250,6 +262,7 @@ "properties": { "type": { "type": "string", "const": "datetime", "description": "Date time type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "style": { "type": "object", "properties": { "format": { "type": "string", "enum": ["yyyy/MM/dd", "yyyy/MM/dd HH:mm", "yyyy/MM/dd HH:mm Z", "yyyy-MM-dd", "yyyy-MM-dd HH:mm", "yyyy-MM-dd HH:mm Z", "MM-dd", "MM/dd/yyyy", "dd/MM/yyyy"], "default": "yyyy/MM/dd", "description": "Date format" } }, @@ -273,6 +286,7 @@ "properties": { "type": { "type": "string", "const": "created_at", "description": "Created time type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "style": { "type": "object", "properties": { "format": { "type": "string", "enum": ["yyyy/MM/dd", "yyyy/MM/dd HH:mm", "yyyy/MM/dd HH:mm Z", "yyyy-MM-dd", "yyyy-MM-dd HH:mm", "yyyy-MM-dd HH:mm Z", "MM-dd", "MM/dd/yyyy", "dd/MM/yyyy"], "default": "yyyy/MM/dd", "description": "Date format" } }, @@ -296,6 +310,7 @@ "properties": { "type": { "type": "string", "const": "updated_at", "description": "Modified time type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "style": { "type": "object", "properties": { "format": { "type": "string", "enum": ["yyyy/MM/dd", "yyyy/MM/dd HH:mm", "yyyy/MM/dd HH:mm Z", "yyyy-MM-dd", "yyyy-MM-dd HH:mm", "yyyy-MM-dd HH:mm Z", "MM-dd", "MM/dd/yyyy", "dd/MM/yyyy"], "default": "yyyy/MM/dd", "description": "Date format" } }, @@ -330,7 +345,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "user", "description": "User field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "multiple": { "type": "boolean", "default": true, "description": "Allow multiple" } }, + "properties": { "type": { "type": "string", "const": "user", "description": "User field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "multiple": { "type": "boolean", "default": true, "description": "Allow multiple" } }, "required": ["type", "name"], "additionalProperties": false, "description": "User field", @@ -343,7 +358,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "created_by", "description": "Created by type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" } }, + "properties": { "type": { "type": "string", "const": "created_by", "description": "Created by type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" } }, "required": ["type", "name"], "additionalProperties": false, "description": "Created by field", @@ -356,7 +371,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "updated_by", "description": "Modified by type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" } }, + "properties": { "type": { "type": "string", "const": "updated_by", "description": "Modified by type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" } }, "required": ["type", "name"], "additionalProperties": false, "description": "Modified by field", @@ -386,6 +401,7 @@ "properties": { "type": { "type": "string", "const": "link", "description": "Link field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "link_table": { "type": "string", "minLength": 1, "maxLength": 100, "description": "Linked table" }, "bidirectional": { "type": "boolean", "default": false, "description": "Bidirectional link" }, "bidirectional_link_field_name": { "$ref": "#/properties/name", "description": "Bidirectional link field name" } @@ -414,7 +430,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "formula", "description": "Formula field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "expression": { "type": "string", "description": "Formula expression" } }, + "properties": { "type": { "type": "string", "const": "formula", "description": "Formula field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "expression": { "type": "string", "description": "Formula expression" } }, "required": ["type", "name", "expression"], "additionalProperties": false, "description": "Formula field", @@ -451,6 +467,7 @@ "properties": { "type": { "type": "string", "const": "lookup", "description": "Lookup field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "from": { "type": "string", "minLength": 1, "maxLength": 100, "description": "Source data table" }, "select": { "type": "string", "minLength": 1, "maxLength": 100, "description": "Field to aggregate from source table" }, "where": { @@ -545,6 +562,7 @@ "properties": { "type": { "type": "string", "const": "auto_number", "description": "Auto number type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, + "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" }, "style": { "type": "object", "properties": { @@ -620,7 +638,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "attachment", "description": "Attachment field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" } }, + "properties": { "type": { "type": "string", "const": "attachment", "description": "Attachment field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" } }, "required": ["type", "name"], "additionalProperties": false, "description": "Attachment field", @@ -633,7 +651,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "location", "description": "Location field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" } }, + "properties": { "type": { "type": "string", "const": "location", "description": "Location field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" } }, "required": ["type", "name"], "additionalProperties": false, "description": "Location field", @@ -646,7 +664,7 @@ ```json { "type": "object", - "properties": { "type": { "type": "string", "const": "checkbox", "description": "Checkbox field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" } }, + "properties": { "type": { "type": "string", "const": "checkbox", "description": "Checkbox field type" }, "name": { "type": "string", "minLength": 1, "maxLength": 1000, "description": "Field name" }, "description": { "type": "string", "description": "Field description; supports plain text or Markdown links" } }, "required": ["type", "name"], "additionalProperties": false, "description": "Checkbox field", diff --git a/skills/lark-base/references/lookup-field-guide.md b/skills/lark-base/references/lookup-field-guide.md index d5df607b..e99e5771 100644 --- a/skills/lark-base/references/lookup-field-guide.md +++ b/skills/lark-base/references/lookup-field-guide.md @@ -1,4 +1,4 @@ -# Bitable Lookup Field Configuration Guide +# Base Lookup Field Configuration Guide ## Mandatory Read Acknowledgement diff --git a/skills/lark-calendar/SKILL.md b/skills/lark-calendar/SKILL.md index 46dd4eb2..056fc084 100644 --- a/skills/lark-calendar/SKILL.md +++ b/skills/lark-calendar/SKILL.md @@ -1,7 +1,7 @@ --- name: lark-calendar version: 1.0.0 -description: "飞书日历(calendar):提供日历与日程(会议)的全面管理能力。核心场景包括:查看/搜索日程、创建/更新日程、管理参会人、查询忙闲状态及推荐空闲时段。高频操作请优先使用 Shortcuts:+agenda(快速概览今日/近期行程)、+create(创建日程并按需邀请参会人)、+freebusy(查询用户主日历的忙闲信息和rsvp的状态)、+suggestion(针对时间未确定的预约日程需求,提供多个时间推荐方案)。" +description: "飞书日历(calendar):提供日历与日程(会议)的全面管理能力。核心场景包括:查看/搜索日程、创建/更新日程、管理参会人、查询忙闲状态及推荐空闲时段。高频操作请优先使用 Shortcuts:+agenda(快速概览今日/近期行程)、+create(创建日程并按需邀请参会人)、+freebusy(查询用户主日历的忙闲信息和rsvp的状态)、+rsvp(回复日程邀请)、+suggestion(针对时间未确定的预约日程需求,提供多个时间推荐方案)。" metadata: requires: bins: ["lark-cli"] @@ -81,6 +81,7 @@ Shortcut 是对常用操作的高级封装(`lark-cli calendar + [flags]` | [`+agenda`](references/lark-calendar-agenda.md) | 查看日程安排(默认今天) | | [`+create`](references/lark-calendar-create.md) | 创建日程并邀请参会人(ISO 8601 时间) | | [`+freebusy`](references/lark-calendar-freebusy.md) | 查询用户主日历的忙闲信息和rsvp的状态 | +| [`+rsvp`](references/lark-calendar-rsvp.md) | 回复日程(接受/拒绝/待定) | | [`+suggestion`](references/lark-calendar-suggestion.md) | 针对时间未确定的预约日程需求,提供多个时间推荐方案 | ## +suggestion 使用 diff --git a/skills/lark-calendar/references/lark-calendar-rsvp.md b/skills/lark-calendar/references/lark-calendar-rsvp.md new file mode 100644 index 00000000..c6328056 --- /dev/null +++ b/skills/lark-calendar/references/lark-calendar-rsvp.md @@ -0,0 +1,42 @@ +# calendar +rsvp + +> **前置条件:** 先阅读 [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) 了解认证、全局参数和安全规则。 + +回复指定的日程,更新当前用户的 RSVP 状态(接受、拒绝或待定)。 + +需要的scopes: ["calendar:calendar.event:reply"] + +## 命令 + +```bash +# 回复日程为接受 (使用主日历) +lark-cli calendar +rsvp --event-id evt_xxx --rsvp-status accept + +# 回复日程为拒绝 +lark-cli calendar +rsvp --event-id evt_xxx --rsvp-status decline + +# 回复日程为待定 +lark-cli calendar +rsvp --event-id evt_xxx --rsvp-status tentative + +# 指定其他日历下的日程 +lark-cli calendar +rsvp --calendar-id cal_xxx --event-id evt_xxx --rsvp-status accept +``` + +## 参数 + +| 参数 | 必填 | 说明 | +|------|------|------| +| `--event-id ` | **是** | 日程 ID | +| `--rsvp-status ` | **是** | 回复状态,可选值:`accept` (接受), `decline` (拒绝), `tentative` (待定) | +| `--calendar-id ` | 否 | 日历 ID(省略则使用主日历) | +| `--dry-run` | 否 | 预览 API 调用,不执行 | + +## 提示 + +- 只能回复你被邀请的日程。 +- 调用前通常需要通过 `+agenda` 等命令获取到具体的 `event-id`。 + +## 参考 + +- [lark-calendar](../SKILL.md) -- 日历全部命令 +- [lark-shared](../../lark-shared/SKILL.md) -- 认证和全局参数 diff --git a/skills/lark-doc/references/lark-doc-create.md b/skills/lark-doc/references/lark-doc-create.md index d0ce0783..b26a162f 100644 --- a/skills/lark-doc/references/lark-doc-create.md +++ b/skills/lark-doc/references/lark-doc-create.md @@ -437,7 +437,7 @@ lark-cli docs +create --title "空白画板示例" --markdown ' diff --git a/skills/lark-drive/SKILL.md b/skills/lark-drive/SKILL.md index 147163bf..a8bcd58a 100644 --- a/skills/lark-drive/SKILL.md +++ b/skills/lark-drive/SKILL.md @@ -164,6 +164,11 @@ Shortcut 是对常用操作的高级封装(`lark-cli drive + [flags]`) | [`+upload`](references/lark-drive-upload.md) | Upload a local file to Drive | | [`+download`](references/lark-drive-download.md) | Download a file from Drive to local | | [`+add-comment`](references/lark-drive-add-comment.md) | Add a full-document comment, or a local comment to selected docx text (also supports wiki URL resolving to doc/docx) | +| [`+export`](references/lark-drive-export.md) | Export a doc/docx/sheet/bitable to a local file with limited polling | +| [`+export-download`](references/lark-drive-export-download.md) | Download an exported file by file_token | +| [`+import`](references/lark-drive-import.md) | Import a local file to Drive as a cloud document (docx, sheet, bitable) | +| [`+move`](references/lark-drive-move.md) | Move a file or folder to another location in Drive | +| [`+task_result`](references/lark-drive-task-result.md) | Poll async task result for import, export, move, or delete operations | ## API Resources @@ -177,6 +182,8 @@ lark-cli drive [flags] # 调用 API ### files - `copy` — 复制文件 + - `create_folder` — 新建文件夹 + - `list` — 获取文件夹下的清单 ### file.comments @@ -208,11 +215,21 @@ lark-cli drive [flags] # 调用 API - `subscription` — 订阅用户、应用维度事件(本次开放评论添加事件) - `subscription_status` — 查询用户、应用对指定事件的订阅状态 +### file.statistics + + - `get` — 获取文件统计信息 + +### file.view_records + + - `list` — 获取文档的访问者记录 + ## 权限表 | 方法 | 所需 scope | |------|-----------| | `files.copy` | `docs:document:copy` | +| `files.create_folder` | `space:folder:create` | +| `files.list` | `space:document:retrieve` | | `file.comments.batch_query` | `docs:document.comment:read` | | `file.comments.create_v2` | `docs:document.comment:create` | | `file.comments.list` | `docs:document.comment:read` | @@ -228,4 +245,5 @@ lark-cli drive [flags] # 调用 API | `user.remove_subscription` | `docs:event:subscribe` | | `user.subscription` | `docs:event:subscribe` | | `user.subscription_status` | `docs:event:subscribe` | - +| `file.statistics.get` | `drive:drive.metadata:readonly` | +| `file.view_records.list` | `drive:file:view_record:readonly` | diff --git a/skills/lark-drive/references/lark-drive-export-download.md b/skills/lark-drive/references/lark-drive-export-download.md new file mode 100644 index 00000000..42c4fdc8 --- /dev/null +++ b/skills/lark-drive/references/lark-drive-export-download.md @@ -0,0 +1,50 @@ + +# drive +export-download + +> **前置条件:** 先阅读 [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) 了解认证、全局参数和安全规则。 + +根据导出任务产物的 `file_token` 下载本地文件。通常与 `drive +task_result --scenario export` 配合使用。 + +## 命令 + +```bash +# 使用服务端返回的文件名下载到当前目录 +lark-cli drive +export-download \ + --file-token "" + +# 下载到指定目录 +lark-cli drive +export-download \ + --file-token "" \ + --output-dir ./exports + +# 指定本地文件名 +lark-cli drive +export-download \ + --file-token "" \ + --file-name "weekly-report.pdf" \ + --output-dir ./exports + +# 允许覆盖 +lark-cli drive +export-download \ + --file-token "" \ + --overwrite +``` + +## 参数 + +| 参数 | 必填 | 说明 | +|------|------|------| +| `--file-token` | 是 | 导出完成后的产物 token | +| `--file-name` | 否 | 覆盖默认文件名 | +| `--output-dir` | 否 | 本地输出目录,默认当前目录 | +| `--overwrite` | 否 | 覆盖已存在文件 | + +## 使用顺序 + +1. 用 `drive +export` 发起导出 +2. 如果返回 `ticket` / `next_command`,用 `drive +task_result --scenario export --ticket --file-token ` 继续查 +3. 查到 `file_token` 后,用 `drive +export-download` 下载 + +## 参考 + +- [lark-drive](../SKILL.md) -- 云空间全部命令 +- [lark-shared](../../lark-shared/SKILL.md) -- 认证和全局参数 diff --git a/skills/lark-drive/references/lark-drive-export.md b/skills/lark-drive/references/lark-drive-export.md new file mode 100644 index 00000000..f60917c0 --- /dev/null +++ b/skills/lark-drive/references/lark-drive-export.md @@ -0,0 +1,100 @@ + +# drive +export + +> **前置条件:** 先阅读 [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) 了解认证、全局参数和安全规则。 + +把 `doc` / `docx` / `sheet` / `bitable` 导出到本地文件。这个 shortcut 内置有限轮询: + +- 如果导出任务在轮询窗口内完成,会直接下载到本地目录 +- 如果轮询结束仍未完成,会返回 `ticket`、`ready=false`、`timed_out=true` 和 `next_command` +- 后续继续查结果时,改用 `drive +task_result --scenario export` +- 拿到 `file_token` 后,改用 `drive +export-download` + +## 命令 + +```bash +# 导出新版文档为 pdf,默认保存到当前目录 +lark-cli drive +export \ + --token "" \ + --doc-type docx \ + --file-extension pdf + +# 导出旧版文档为 docx +lark-cli drive +export \ + --token "" \ + --doc-type doc \ + --file-extension docx + +# 导出 docx 为 markdown +# 注意:markdown 只支持 docx,底层走 /open-apis/docs/v1/content +lark-cli drive +export \ + --token "" \ + --doc-type docx \ + --file-extension markdown + +# 导出电子表格为 xlsx +lark-cli drive +export \ + --token "" \ + --doc-type sheet \ + --file-extension xlsx \ + --output-dir ./exports + +# 导出电子表格或多维表格为 csv 时,必须传 sub_id +lark-cli drive +export \ + --token "" \ + --doc-type "" \ + --file-extension csv \ + --sub-id "" \ + --output-dir ./exports + +# 允许覆盖已存在文件 +lark-cli drive +export \ + --token "" \ + --doc-type docx \ + --file-extension pdf \ + --overwrite +``` + +## 参数 + +| 参数 | 必填 | 说明 | +|------|------|------| +| `--token` | 是 | 源文档 token | +| `--doc-type` | 是 | 源文档类型:`doc` / `docx` / `sheet` / `bitable` | +| `--file-extension` | 是 | 导出格式:`docx` / `pdf` / `xlsx` / `csv` / `markdown` | +| `--sub-id` | 条件必填 | 当 `sheet` / `bitable` 导出为 `csv` 时必填 | +| `--output-dir` | 否 | 本地输出目录,默认当前目录 | +| `--overwrite` | 否 | 覆盖已存在文件 | + +## 关键约束 + +- `markdown` 只支持 `docx` +- `sheet` / `bitable` 导出为 `csv` 时必须带 `--sub-id` +- shortcut 内部固定有限轮询:最多 10 次,每次间隔 5 秒 +- 轮询超时不是失败;会返回 `ticket`、`timed_out=true` 和 `next_command`,供后续继续查询 + +## 推荐续跑方式 + +```bash +# 第一步:先尝试直接导出 +lark-cli drive +export \ + --token "" \ + --doc-type docx \ + --file-extension pdf + +# 如果返回 ready=false / timed_out=true,再继续查 +lark-cli drive +task_result \ + --scenario export \ + --ticket "" \ + --file-token "" + +# 查到 file_token 后下载 +lark-cli drive +export-download \ + --file-token "" \ + --output-dir ./exports +``` + +## 参考 + +- [lark-drive](../SKILL.md) -- 云空间全部命令 +- [lark-shared](../../lark-shared/SKILL.md) -- 认证和全局参数 diff --git a/skills/lark-drive/references/lark-drive-import.md b/skills/lark-drive/references/lark-drive-import.md new file mode 100644 index 00000000..58041bda --- /dev/null +++ b/skills/lark-drive/references/lark-drive-import.md @@ -0,0 +1,80 @@ +# drive +import + +> **前置条件:** 先阅读 [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) 了解认证、全局参数和安全规则。 + +将本地文件(如 Word、TXT、Markdown、Excel 等)导入并转换为飞书在线云文档(docx、sheet、bitable)。底层统一通过 `POST /open-apis/drive/v1/import_tasks` 接口创建导入任务,并在 shortcut 内做有限次数轮询 `GET /open-apis/drive/v1/import_tasks/:ticket`。 + +## 命令 + +```bash +# 导入 Markdown 为新版文档 (docx) +lark-cli drive +import --file ./README.md --type docx + +# 导入 Excel 为电子表格 (sheet) +lark-cli drive +import --file ./data.xlsx --type sheet + +# 导入到指定文件夹,并指定导入后的文件名 +lark-cli drive +import --file ./data.csv --type bitable --folder-token --name "导入数据表" + +# 预览底层调用链(上传 -> 创建任务 -> 轮询) +lark-cli drive +import --file ./README.md --type docx --dry-run +``` + +## 参数 + +| 参数 | 必填 | 说明 | +|------|------|------| +| `--file` | 是 | 本地文件路径,根据文件后缀名自动推断 `file_extension`,最大支持 20MB | +| `--type` | 是 | 导入目标云文档格式。可选值:`docx` (新版文档)、`sheet` (电子表格)、`bitable` (多维表格) | +| `--folder-token` | 否 | 目标文件夹 token,不传则请求中的 `point.mount_key` 为空字符串,Import API 会将其解释为导入到云空间根目录 | +| `--name` | 否 | 导入后的在线云文档名称,不传默认使用本地文件名去掉扩展名后的结果 | + +## 行为说明 + +- **三步执行**:此 shortcut 内部封装了完整流程: + 1. 自动调用素材上传接口 (`/open-apis/drive/v1/medias/upload_all`) 获取源文件的 `file_token` + 2. 调用 `import_tasks` 接口发起导入任务,自动根据本地文件提取扩展名并构造挂载点(`mount_point`)参数 + 3. 自动轮询查询导入任务状态;如果在内置轮询窗口内完成,则直接返回导入结果;如果仍未完成,则返回 `ticket`、当前状态和后续查询命令 +- **默认根目录行为**:不传 `--folder-token` 时,shortcut 会保留空的 `point.mount_key`,Lark Import API 会将其视为“导入到调用者根目录”。 + +### 支持的文件类型转换 + +本地文件扩展名与目标云文档类型的对应关系如下: + +| 本地文件扩展名 | 可导入为 | 说明 | +|--------------|---------|------| +| `.docx`, `.doc` | `docx` | Microsoft Word 文档 | +| `.txt` | `docx` | 纯文本文件 | +| `.md`, `.markdown`, `.mark` | `docx` | Markdown 文档 | +| `.html` | `docx` | HTML 文档 | +| `.xlsx`, `.xls` | `sheet`, `bitable` | Microsoft Excel 表格 | +| `.csv` | `sheet`, `bitable` | CSV 数据文件 | + +> [!IMPORTANT] +> 文件扩展名与目标文档类型必须匹配,否则会返回验证错误: +> - 文档类文件(.docx, .doc, .txt, .md, .html)**只能**导入为 `docx` +> - 表格类文件(.xlsx, .xls, .csv)**只能**导入为 `sheet` 或 `bitable` +> - 例如:`.csv` 文件不能导入为 `docx`,`.md` 文件不能导入为 `sheet` + +- 若导入任务执行失败,会返回失败时的 `job_status` 及错误信息。 +- 若内置轮询超时但任务仍在处理中,shortcut 会成功返回,并带上: + - `ready=false` + - `timed_out=true` + - `next_command`:可直接复制执行的后续查询命令,例如 `lark-cli drive +task_result --scenario import --ticket ` +- 如果文件超过 20MB 上限,或者文件扩展名不被支持,执行时将抛出验证错误。 + +### 超时后的继续查询 + +当 `+import` 的内置轮询窗口结束但任务尚未完成时,使用返回结果中的 `ticket` 继续查询: + +```bash +lark-cli drive +task_result --scenario import --ticket +``` + +> [!CAUTION] +> `drive +import` 是**写入操作** —— 执行前必须确认用户意图。 + +## 参考 + +- [lark-drive](../SKILL.md) -- 云空间全部命令 +- [lark-shared](../../lark-shared/SKILL.md) -- 认证和全局参数 diff --git a/skills/lark-drive/references/lark-drive-move.md b/skills/lark-drive/references/lark-drive-move.md new file mode 100644 index 00000000..57d93132 --- /dev/null +++ b/skills/lark-drive/references/lark-drive-move.md @@ -0,0 +1,92 @@ + +# drive +move + +> **前置条件:** 先阅读 [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) 了解认证、全局参数和安全规则。 + +将文件或文件夹移动到用户云空间的其他位置。 + +## 命令 + +```bash +# 移动文件到指定文件夹 +lark-cli drive +move \ + --file-token \ + --type file \ + --folder-token + +# 移动文档到指定文件夹 +lark-cli drive +move \ + --file-token \ + --type docx \ + --folder-token + +# 移动文件夹(异步操作,会自动有限轮询任务状态) +lark-cli drive +move \ + --file-token \ + --type folder \ + --folder-token + +# 移动到根文件夹(不指定 --folder-token) +lark-cli drive +move \ + --file-token \ + --type file +``` + +## 参数 + +| 参数 | 必填 | 说明 | +|------|------|------| +| `--file-token` | 是 | 需要移动的文件或文件夹 token | +| `--type` | 是 | 文件类型,可选值:`file` (普通文件)、`docx` (新版文档)、`bitable` (多维表格)、`doc` (旧版文档)、`sheet` (电子表格)、`mindnote` (思维笔记)、`folder` (文件夹)、`slides` (幻灯片) | +| `--folder-token` | 否 | 目标文件夹 token,不指定则移动到根文件夹 | + +## 文件类型说明 + +| 类型 | 说明 | +|------|------| +| `file` | 普通文件 | +| `docx` | 新版云文档 | +| `doc` | 旧版云文档 | +| `sheet` | 电子表格 | +| `bitable` | 多维表格 | +| `mindnote` | 思维笔记 | +| `slides` | 幻灯片 | +| `folder` | 文件夹(移动文件夹是异步操作) | + +## 行为说明 + +- **普通文件移动**:同步操作,立即完成 +- **文件夹移动**:异步操作,接口返回 `task_id`,shortcut 会先做有限轮询;如果在轮询窗口内完成,则直接返回成功结果 +- **轮询超时不是失败**:文件夹移动内置最多轮询 30 次、每次间隔 2 秒;如果轮询结束任务仍未完成,会返回 `task_id`、`status`、`ready=false`、`timed_out=true` 和 `next_command` +- **继续查询**:当看到 `next_command` 时,改用 `lark-cli drive +task_result --scenario task_check --task-id ` 继续查询 +- **目标文件夹**:如果不指定 `--folder-token`,文件将被移动到用户的根文件夹("我的空间") +- **权限要求**:需要被移动文件的可管理权限、被移动文件所在位置的编辑权限、目标位置的编辑权限 + +## 推荐续跑方式 + +```bash +# 第一步:先直接移动文件夹 +lark-cli drive +move \ + --file-token \ + --type folder \ + --folder-token + +# 如果返回 ready=false / timed_out=true,再继续查 +lark-cli drive +task_result \ + --scenario task_check \ + --task-id +``` + +## 限制 + +- 被移动的文件不支持 wiki 文档 +- 该接口不支持并发调用 +- 调用频率上限为 5 QPS 且 10000 次/天 + +> [!CAUTION] +> 这是**写入操作** —— 执行前必须确认用户意图。 + +## 参考 + +- [lark-drive](../SKILL.md) -- 云空间全部命令 +- [lark-shared](../../lark-shared/SKILL.md) -- 认证和全局参数 diff --git a/skills/lark-drive/references/lark-drive-task-result.md b/skills/lark-drive/references/lark-drive-task-result.md new file mode 100644 index 00000000..4c42aaed --- /dev/null +++ b/skills/lark-drive/references/lark-drive-task-result.md @@ -0,0 +1,170 @@ + +# drive +task_result + +> **前置条件:** 先阅读 [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) 了解认证、全局参数和安全规则。 + +查询异步任务结果。该 shortcut 聚合了导入、导出、移动/删除文件夹等多种异步任务的结果查询,统一接口方便调用。 + +## 命令 + +```bash +# 查询导入任务结果 +lark-cli drive +task_result \ + --scenario import \ + --ticket + +# 查询导出任务结果 +lark-cli drive +task_result \ + --scenario export \ + --ticket \ + --file-token + +# 查询移动/删除文件夹任务状态 +lark-cli drive +task_result \ + --scenario task_check \ + --task-id +``` + +## 参数 + +| 参数 | 必填 | 说明 | +|------|------|------| +| `--scenario` | 是 | 任务场景,可选值:`import` (导入任务)、`export` (导出任务)、`task_check` (移动/删除文件夹任务) | +| `--ticket` | 条件必填 | 异步任务 ticket,**import/export 场景必填** | +| `--task-id` | 条件必填 | 异步任务 ID,**task_check 场景必填** | +| `--file-token` | 条件必填 | 导出任务对应的源文档 token,**export 场景必填** | + +## 场景说明 + +| 场景 | 说明 | 所需参数 | +|------|------|----------| +| `import` | 文档导入任务(如将本地文件导入为云文档) | `--ticket` | +| `export` | 文档导出任务(如云文档导出为 PDF/Word) | `--ticket`、`--file-token` | +| `task_check` | 文件夹移动/删除任务 | `--task-id` | + +## 返回结果 + +### Import 场景返回 + +```json +{ + "scenario": "import", + "ticket": "", + "type": "sheet", + "ready": true, + "failed": false, + "job_status": 0, + "job_status_label": "success", + "job_error_msg": "success", + "token": "", + "url": "https://example.feishu.cn/sheets/", + "extra": ["2000"] +} +``` + +**字段说明:** +- `ready`: 是否已经导入完成,可直接使用 `token` / `url` +- `failed`: 是否已经失败 +- `job_status`: 服务端返回的原始状态码 +- `job_status_label`: 便于阅读的状态标签,例如 `success` / `processing` +- `token`: 导入后的文档 token +- `url`: 导入后的文档链接 + +### Export 场景返回 + +```json +{ + "scenario": "export", + "ticket": "", + "ready": true, + "failed": false, + "file_extension": "pdf", + "type": "doc", + "file_name": "docName", + "file_token": "", + "file_size": 34356, + "job_error_msg": "success", + "job_status": 0, + "job_status_label": "success" +} +``` + +**字段说明:** +- `ready`: 是否已经完成导出,可直接使用 `file_token` +- `failed`: 是否已经失败 +- `job_status`: 服务端返回的原始状态码 +- `job_status_label`: 便于阅读的状态标签,例如 `success` / `processing` +- `file_token`: 导出文件的 token,用于下载 +- `file_extension`: 导出文件扩展名 +- `file_size`: 导出文件大小(字节) + +### Task_check 场景返回 + +```json +{ + "scenario": "task_check", + "task_id": "", + "status": "success", + "ready": true, + "failed": false +} +``` + +**字段说明:** +- `status`: 任务状态,`success`=成功,`failed`=失败,`pending`=处理中 +- `ready`: 是否已经完成 +- `failed`: 是否已经失败 + +## 使用场景 + +### 配合 +import 使用 + +```bash +# 1. 创建导入任务 +lark-cli drive +import --file ./data.xlsx --type sheet +# 若任务很快完成:直接返回 token / url +# 若内置轮询超时:返回 ready=false、ticket 和 next_command + +# 2. 轮询导入结果 +lark-cli drive +task_result --scenario import --ticket +``` + +### 配合 +move 使用 + +```bash +# 1. 移动文件夹(异步操作) +lark-cli drive +move --file-token --type folder --folder-token +# 若轮询窗口内完成:直接返回 ready=true +# 若内置轮询结束仍未完成:返回 ready=false、task_id 和 next_command + +# 2. 轮询移动结果 +lark-cli drive +task_result --scenario task_check --task-id +``` + +### 配合 +export 使用 + +```bash +# 1. 发起导出 +lark-cli drive +export --token --doc-type docx --file-extension pdf +# 若轮询窗口内完成:直接下载本地文件 +# 若内置轮询结束仍未完成:返回 ready=false、ticket 和 next_command + +# 2. 继续查询导出结果 +lark-cli drive +task_result --scenario export --ticket --file-token + +# 3. 拿到 file_token 后下载 +lark-cli drive +export-download --file-token +``` + +## 权限要求 + +| 场景 | 所需 scope | +|------|-----------| +| import | `drive:drive.metadata:readonly` | +| export | `drive:drive.metadata:readonly` | +| task_check | `drive:drive.metadata:readonly` | + +## 参考 + +- [lark-drive](../SKILL.md) -- 云空间全部命令 +- [lark-shared](../../lark-shared/SKILL.md) -- 认证和全局参数 diff --git a/skills/lark-im/SKILL.md b/skills/lark-im/SKILL.md index e33ee515..310aca4a 100644 --- a/skills/lark-im/SKILL.md +++ b/skills/lark-im/SKILL.md @@ -61,10 +61,10 @@ Shortcut 是对常用操作的高级封装(`lark-cli im + [flags]`)。 | [`+chat-search`](references/lark-im-chat-search.md) | Search visible group chats by keyword and/or member open_ids (e.g. look up chat_id by group name); user/bot; supports member/type filters, sorting, and pagination | | [`+chat-update`](references/lark-im-chat-update.md) | Update group chat name or description; user/bot; updates a chat's name or description | | [`+messages-mget`](references/lark-im-messages-mget.md) | Batch get messages by IDs; user/bot; fetches up to 50 om_ message IDs, formats sender names, expands thread replies | -| [`+messages-reply`](references/lark-im-messages-reply.md) | Reply to a message (supports thread replies) with bot identity; bot-only; supports text/markdown/post/media replies, reply-in-thread, idempotency key | +| [`+messages-reply`](references/lark-im-messages-reply.md) | Reply to a message (supports thread replies); user/bot; supports text/markdown/post/media replies, reply-in-thread, idempotency key | | [`+messages-resources-download`](references/lark-im-messages-resources-download.md) | Download images/files from a message; user/bot; downloads image/file resources by message-id and file-key to a safe relative output path | -| [`+messages-search`](references/lark-im-messages-search.md) | Search messages across chats (supports keyword, sender, time range filters) with user identity; user-only; filters by chat/sender/attachment/time, enriches results via mget and chats batch_query | -| [`+messages-send`](references/lark-im-messages-send.md) | Send a message to a chat or direct message with bot identity; bot-only; sends to chat-id or user-id with text/markdown/post/media, supports idempotency key | +| [`+messages-search`](references/lark-im-messages-search.md) | Search messages across chats (supports keyword, sender, time range filters) with user identity; user-only; filters by chat/sender/attachment/time, supports auto-pagination via `--page-all` / `--page-limit`, enriches results via batched mget and chats batch_query | +| [`+messages-send`](references/lark-im-messages-send.md) | Send a message to a chat or direct message; user/bot; sends to chat-id or user-id with text/markdown/post/media, supports idempotency key | | [`+threads-messages-list`](references/lark-im-threads-messages-list.md) | List messages in a thread; user/bot; accepts om_/omt_ input, resolves message IDs to thread_id, supports sort/pagination | ## API Resources @@ -87,6 +87,7 @@ lark-cli im [flags] # 调用 API ### chat.members - `create` — 将用户或机器人拉入群聊。Identity: supports `user` and `bot`; the caller must be in the target chat; for `bot` calls, added users must be within the app's availability; for internal chats the operator must belong to the same tenant; if only owners/admins can add members, the caller must be an owner/admin, or a chat-creator bot with `im:chat:operate_as_owner`. + - `delete` — 将用户或机器人移出群聊。Identity: supports `user` and `bot`; only group owner, admin, or creator bot can remove others; max 50 users or 5 bots per request. - `get` — 获取群成员列表。Identity: supports `user` and `bot`; the caller must be in the target chat and must belong to the same tenant for internal chats. ### messages @@ -123,6 +124,7 @@ lark-cli im [flags] # 调用 API | `chats.list` | `im:chat:read` | | `chats.update` | `im:chat:update` | | `chat.members.create` | `im:chat.members:write_only` | +| `chat.members.delete` | `im:chat.members:write_only` | | `chat.members.get` | `im:chat.members:read` | | `messages.delete` | `im:message:recall` | | `messages.forward` | `im:message` | @@ -136,4 +138,3 @@ lark-cli im [flags] # 调用 API | `pins.create` | `im:message.pins:write_only` | | `pins.delete` | `im:message.pins:write_only` | | `pins.list` | `im:message.pins:read` | - diff --git a/skills/lark-im/references/lark-im-messages-reply.md b/skills/lark-im/references/lark-im-messages-reply.md index 2e92310d..5b8cd8b9 100644 --- a/skills/lark-im/references/lark-im-messages-reply.md +++ b/skills/lark-im/references/lark-im-messages-reply.md @@ -2,7 +2,7 @@ > **Prerequisite:** Read [`../lark-shared/SKILL.md`](../../lark-shared/SKILL.md) first to understand authentication, global parameters, and safety rules. -Reply to a specific message. Only supports bot identity. Also supports thread replies. +Reply to a specific message. Supports both user identity (`--as user`) and bot identity (`--as bot`). Also supports thread replies. This skill maps to the shortcut: `lark-cli im +messages-reply` (internally calls `POST /open-apis/im/v1/messages/:message_id/reply`). @@ -12,16 +12,96 @@ Replies sent by this tool are visible to other people. Before calling it, you ** 1. Which message to reply to 2. The reply content -3. Which identity to use (bot only) +3. Which identity to use (user or bot) **Do not** send a reply without explicit user approval. When using `--as bot`, the reply is sent in the app's name, so make sure the app has already been added to the target chat. +When using `--as user`, the reply is sent as the authorized end user and requires the `im:message.send_as_user` scope. + +## Choose The Right Content Flag + +| Need | Recommended flag | Why | +|------|------|------| +| Reply with plain text exactly as written | `--text` | Wrapped directly to `{"text":"..."}` | +| Reply with simple Markdown and accept conversion | `--markdown` | Automatically converted to `post` JSON | +| Precisely control the reply payload | `--content` | You provide the exact JSON | +| Reply with media | `--image` / `--file` / `--video` / `--audio` | Shortcut uploads local files automatically | + +### `--text` vs `--markdown` + +- Use `--text` when the reply should remain plain text and you want exact control over line breaks, spacing, indentation, code samples, or literal Markdown characters. +- Use `--markdown` when you want a lightweight formatted reply and you accept that the shortcut will normalize and rewrite parts of the content before sending. +- Use `--content` when you need exact `post` JSON, a card, a title, multiple locales, or any structure that `--markdown` cannot express reliably. + +## What `--markdown` Really Does + +`--markdown` does **not** send arbitrary raw Markdown to the API. + +The shortcut: + +1. Forces `msg_type=post` +2. Resolves remote Markdown images like `![x](https://...)` +3. Normalizes the Markdown for Feishu post rendering +4. Wraps the final content as: + +```json +{"zh_cn":{"content":[[{"tag":"md","text":"..."}]]}} +``` + +So `--markdown` is a convenience mode, not a full Markdown compatibility layer. + +### Current Markdown Caveats + +- It does **not** promise full CommonMark / GitHub Flavored Markdown support. +- It always becomes a `post` payload with a single `zh_cn` locale. +- It does **not** let you set a `post` title. +- Headings are rewritten: + - `# Title` becomes `#### Title` + - `##` to `######` are normalized to `#####` when the content contains H1-H3 +- Consecutive headings are separated with blank lines after heading normalization. +- Block spacing and line breaks may be normalized during conversion. +- Code blocks are preserved as code blocks. +- Excess blank lines are compressed. +- Only remote `http://...`, `https://...`, or already-uploaded `img_xxx` Markdown images are kept reliably. +- Local paths in Markdown image syntax like `![x](./a.png)` are **not** auto-uploaded by `--markdown`. +- If remote Markdown image handling fails, that image is removed with a warning. + +If you need exact output, use `--msg-type post --content ...` instead of `--markdown`. + +## Preserving Formatting + +If the reply contains multiple lines, code blocks, indentation, tabs, or a lot of escaping, prefer `$'...'`. + +### When formatting must be preserved + +Use `--text` plus `$'...'`: + +```bash +lark-cli im +messages-reply --message-id om_xxx --text $'Received\nI will check this today.\nOwner: alice' +``` + +```bash +lark-cli im +messages-reply --message-id om_xxx --text $'```sql\nselect * from jobs;\n```' +``` + +This keeps the reply as plain text instead of converting it to a `post`. + +### When formatting does not need exact preservation + +Use `--markdown`: + +```bash +lark-cli im +messages-reply --message-id om_xxx --markdown $'## Follow-up\n\n- I reproduced it\n- I am fixing it' +``` + +This is better for quick readable formatting, but the final payload may still differ from the source text because headings and spacing are normalized before sending. + ## Commands ```bash -# Reply to a message (plain text, bot identity, --text is recommended) +# Reply to a message (plain text, --text is recommended for normal replies) lark-cli im +messages-reply --message-id om_xxx --text "Received" # Equivalent manual JSON @@ -30,13 +110,16 @@ lark-cli im +messages-reply --message-id om_xxx --content '{"text":"Received"}' # Reply as a bot lark-cli im +messages-reply --message-id om_xxx --text "bot reply" --as bot +# Reply with preserved multi-line text +lark-cli im +messages-reply --message-id om_xxx --text $'Line 1\nLine 2\n indented line' + # Reply inside the thread (message appears in the target thread) lark-cli im +messages-reply --message-id om_xxx --text "Let's discuss this" --reply-in-thread -# Bot identity + thread reply -lark-cli im +messages-reply --message-id om_xxx --text "bot reply" --as bot --reply-in-thread +# Reply with basic Markdown (will be converted to post JSON) +lark-cli im +messages-reply --message-id om_xxx --markdown $'## Reply\n\n- item 1\n- item 2' -# Reply with a rich-text message +# If you need exact post structure, send JSON directly lark-cli im +messages-reply --message-id om_xxx --msg-type post --content '{"zh_cn":{"title":"Reply","content":[[{"tag":"text","text":"Detailed content"}]]}}' # Reply with a local image (uploaded automatically before sending) @@ -52,7 +135,7 @@ lark-cli im +messages-reply --message-id om_xxx --video ./demo.mp4 --video-cover lark-cli im +messages-reply --message-id om_xxx --text "Received" --idempotency-key my-unique-id # Preview the request without executing it -lark-cli im +messages-reply --message-id om_xxx --text "Test" --dry-run +lark-cli im +messages-reply --message-id om_xxx --markdown $'## Test\n\nhello' --dry-run ``` ## Parameters @@ -60,24 +143,33 @@ lark-cli im +messages-reply --message-id om_xxx --text "Test" --dry-run | Parameter | Required | Description | |------|------|------| | `--message-id ` | Yes | ID of the message being replied to (`om_xxx`) | -| `--msg-type ` | No | Message type (default `text`): `text`, `post`, `image`, `file`, `audio`, `media`, `interactive`, `share_chat`, `share_user` | -| `--content ` | One of content options | Reply content as a JSON string; format depends on `msg_type` | -| `--text ` | One of content options | Plain text message (automatically wrapped as `{"text":"..."}` JSON) | -| `--markdown ` | One of content options | Markdown text (auto-wrapped as post format with style optimization; image URLs auto-resolved) | -| `--image ` | One of content options | Local image path, `image_key` (`img_xxx`)| -| `--file ` | One of content options | Local file path, `file_key` (`file_xxx`)| -| `--video ` | One of content options | Local video path, `file_key`; **must be used together with `--video-cover`** | -| `--video-cover ` | **Required with `--video`** | Video cover image path, `image_key` (`img_xxx`) | -| `--audio ` | One of content options | Local audio path, `file_key` | +| `--msg-type ` | No | Message type (default `text`). If you use `--text` / `--markdown` / media flags, the effective type is inferred automatically. Explicitly setting a conflicting `--msg-type` fails validation | +| `--content ` | One content option | Exact reply content as JSON. The JSON must match the effective `--msg-type` | +| `--text ` | One content option | Plain text reply. Best default when you need exact text and formatting preservation | +| `--markdown ` | One content option | Convenience Markdown input. Internally converted to `post` JSON with Feishu-specific normalization | +| `--image ` | One content option | Local image path or `image_key` (`img_xxx`) | +| `--file ` | One content option | Local file path or `file_key` (`file_xxx`) | +| `--video ` | One content option | Local video path or `file_key`; **must be used together with `--video-cover`** | +| `--video-cover ` | **Required with `--video`** | Video cover image path or `image_key` (`img_xxx`) | +| `--audio ` | One content option | Local audio path or `file_key` | | `--reply-in-thread` | No | Reply inside the thread. The reply appears in the target message's thread instead of the main chat stream | | `--idempotency-key ` | No | Idempotency key; the same key sends only one reply within 1 hour | -| `--as ` | No | Identity type: `bot` only | +| `--as ` | No | Identity type: `bot` or `user` (default `bot`) | | `--dry-run` | No | Print the request only, do not execute it | > **Mutual exclusivity rule:** `--text`, `--markdown`, `--content`, and `--image`/`--file`/`--video`/`--audio` cannot be used together. Media flags are also mutually exclusive with each other. > > **Video cover rule:** `--video` **must** be accompanied by `--video-cover`. Omitting `--video-cover` when using `--video` will fail validation. `--video-cover` cannot be used without `--video`. +## Common Mistakes + +- Choosing `--markdown` when you actually need exact plain text. If exact line breaks and spacing matter, use `--text`, usually with `$'...'`. +- Assuming `--markdown` supports all Markdown features. It does not; it is converted into a Feishu `post` payload and rewritten first. +- Putting local image paths inside Markdown like `![x](./a.png)`. `--markdown` does not auto-upload those paths. +- Using `--content` without making the JSON match the effective `--msg-type`. +- Explicitly setting `--msg-type` to something that conflicts with `--text`, `--markdown`, or media flags. +- Mixing `--text`, `--markdown`, or `--content` with media flags in one command. + ## Return Value ```json @@ -108,16 +200,23 @@ The reply appears in the target message's thread and does not show up in the mai ## @Mention Format (text / post) -- @specific user: `name` +- Recommended format: `name` - @all: `` +- The shortcut normalizes common variants like `` and `` into `user_id`, but `user_id` remains the recommended documented form ## Notes - `--message-id` must be a valid message ID in `om_xxx` format -- `--content` must be a valid JSON string -- `--reply-in-thread` is only meaningful in group chats -- `--image`/`--file`/`--video`/`--audio`/`--video-cover` support local file paths; use relative paths within the current working directory. The shortcut automatically uploads the file first and then sends the reply -- If the provided value starts with `img_` or `file_`, it is treated as an existing key and used directly -- When using `--video`, `--video-cover` is **required** as the video cover. Omitting `--video-cover` with `--video` will produce a validation error. `--video-cover` cannot be used without `--video` +- `--content` must be valid JSON +- When using `--content`, you are responsible for making the JSON structure match the effective `msg_type` +- `--reply-in-thread` adds `reply_in_thread=true` to the API request +- `--reply-in-thread` is mainly meaningful in chats that support thread replies +- `--image`/`--file`/`--video`/`--audio`/`--video-cover` support local file paths; the shortcut uploads first and then sends the reply; file/image upload is bot-only, so when using `--as user`, the upload step is automatically performed with bot identity, and only the final send uses user identity +- If the provided media value starts with `img_` or `file_`, it is treated as an existing key and used directly +- `--markdown` always sends `msg_type=post` +- If you explicitly set `--msg-type` and it conflicts with the chosen content flag, validation fails +- When using `--video`, `--video-cover` is required as the video cover +- `--dry-run` uses placeholder image keys for remote Markdown images and placeholder media keys for local uploads - Failures return error codes and messages +- `--as user` uses a user access token (UAT) and requires the `im:message.send_as_user` scope; the reply is sent as the authorized end user - `--as bot` uses a tenant access token (TAT), and requires the `im:message:send_as_bot` scope diff --git a/skills/lark-im/references/lark-im-messages-search.md b/skills/lark-im/references/lark-im-messages-search.md index 4cf3f295..68858395 100644 --- a/skills/lark-im/references/lark-im-messages-search.md +++ b/skills/lark-im/references/lark-im-messages-search.md @@ -6,7 +6,7 @@ Search Feishu messages across conversations. This shortcut automatically perform > **User identity only** (`--as user`). Bot identity is not supported. -This skill maps to the shortcut: `lark-cli im +messages-search` (internally calls `POST /open-apis/im/v1/messages/search` + `GET /open-apis/im/v1/messages/mget`, then batch-fetches chat context). +This skill maps to the shortcut: `lark-cli im +messages-search` (internally calls `POST /open-apis/im/v1/messages/search` + batched `GET /open-apis/im/v1/messages/mget`, then batch-fetches chat context). ## Commands @@ -49,6 +49,12 @@ lark-cli im +messages-search --query "test" --format csv # Pagination lark-cli im +messages-search --query "test" --page-token +# Auto-pagination across multiple pages +lark-cli im +messages-search --query "test" --page-all --format json + +# Auto-pagination with an explicit page cap +lark-cli im +messages-search --query "test" --page-limit 5 --format json + # Preview the request without executing it lark-cli im +messages-search --query "test" --dry-run ``` @@ -69,6 +75,8 @@ lark-cli im +messages-search --query "test" --dry-run | `--end