diff --git a/.do/app.yaml b/.do/app.yaml index 0911ec7..38dddf4 100644 --- a/.do/app.yaml +++ b/.do/app.yaml @@ -22,23 +22,35 @@ name: openpartner region: nyc -# Managed Postgres. For production, bump to a paid plan and upgrade the -# `production` flag. The connection string is injected into services -# that reference ${openpartner-db.DATABASE_URL}. -databases: - - name: openpartner-db - engine: PG - version: '16' - production: false - cluster_name: openpartner-db - db_name: openpartner - db_user: openpartner +# Database is provisioned externally (e.g., a separate Coherence cluster +# with a dedicated `openpartner` database inside it). DATABASE_URL is set +# as an encrypted secret on each service below. Add this app to the +# cluster's Trusted Sources after the first deploy so it can reach pg. +# +# If you'd rather have App Platform provision a dedicated cluster, paste +# this block back in: +# +# databases: +# - name: openpartner-db +# engine: PG +# version: '16' +# production: true +# cluster_name: openpartner-db +# db_name: openpartner +# db_user: openpartner +# +# and change the DATABASE_URL env vars below to: +# value: ${openpartner-db.DATABASE_URL} services: # Core API. Runs migrations on first boot via docker-entrypoint.sh. - name: api dockerfile_path: apps/api/Dockerfile source_dir: / + github: + repo: getcoherence/openpartner + branch: main + deploy_on_push: true http_port: 4601 instance_count: 1 instance_size_slug: basic-xs @@ -54,17 +66,39 @@ services: - key: API_PORT value: '4601' - key: DATABASE_URL - value: ${openpartner-db.DATABASE_URL} + type: SECRET + scope: RUN_TIME + # App-pool connection — connects as openpartner_app (a NOLOGIN-by- + # default role provisioned by migration 20260507020000_app_role.ts). + # Required in multi-tenant deploys for RLS to actually engage at + # runtime; in single-tenant self-host you can leave it unset and + # let everything run as the migration role. + - key: DATABASE_URL_APP + type: SECRET + scope: RUN_TIME + # Password used to provision openpartner_app on migrate. Pair with + # DATABASE_URL_APP. Rotation: change here, redeploy to re-run the + # idempotent role migration which alters the password in place. + - key: OPENPARTNER_APP_DB_PASSWORD + type: SECRET + scope: RUN_TIME - key: OPENPARTNER_MODE value: flat - - key: MAIL_TRANSPORT - value: postmark + # single | multi. Hosted deploys default to multi; flip to single + # if running a single-tenant fork on App Platform. + - key: OPENPARTNER_TENANCY + value: multi - key: POSTMARK_MESSAGE_STREAM value: outbound + # Enables the in-process scheduler (usage reporting + payouts). + # DO App Platform has no native cron, so the api process runs them + # itself. Set to '0' on replica nodes if you ever scale to instance_count > 1. + - key: OPENPARTNER_ENABLE_SCHEDULER + value: '1' - key: ADMIN_API_KEY type: SECRET scope: RUN_TIME - - key: NETWORK_ENCRYPTION_KEY + - key: SECRETS_ENCRYPTION_KEY type: SECRET scope: RUN_TIME - key: POSTMARK_SERVER_TOKEN @@ -76,6 +110,8 @@ services: - key: PORTAL_URL type: SECRET scope: RUN_TIME + # Stripe — set as live values when ready to launch. STRIPE_WEBHOOK_SECRET + # accepts a comma-separated list (one per Event destination). - key: STRIPE_SECRET_KEY type: SECRET scope: RUN_TIME @@ -85,6 +121,24 @@ services: - key: STRIPE_FLAT_PRICE_ID type: SECRET scope: RUN_TIME + # Metered usage prices — created by `setup-stripe.mjs` against the + # live key. Optional in self-host but required for the percentage + # portion of Flex/Revshare/Network billing to actually fire. + - key: STRIPE_FLAT_USAGE_PRICE_ID + type: SECRET + scope: RUN_TIME + - key: STRIPE_REVSHARE_USAGE_PRICE_ID + type: SECRET + scope: RUN_TIME + - key: STRIPE_NETWORK_PRICE_ID + type: SECRET + scope: RUN_TIME + - key: STRIPE_NETWORK_USAGE_PRICE_ID + type: SECRET + scope: RUN_TIME + - key: METRICS_TOKEN + type: SECRET + scope: RUN_TIME # Click router. Separate component so we can point a marketing apex # (go.yourdomain.com) at it without routing through the portal's @@ -92,6 +146,10 @@ services: - name: router dockerfile_path: apps/router/Dockerfile source_dir: / + github: + repo: getcoherence/openpartner + branch: main + deploy_on_push: true http_port: 4701 instance_count: 1 instance_size_slug: basic-xxs @@ -107,7 +165,8 @@ services: - key: ROUTER_PORT value: '4701' - key: DATABASE_URL - value: ${openpartner-db.DATABASE_URL} + type: SECRET + scope: RUN_TIME - key: COOKIE_DOMAIN type: SECRET scope: RUN_TIME @@ -122,21 +181,43 @@ services: static_sites: - name: portal source_dir: / + github: + repo: getcoherence/openpartner + branch: main + deploy_on_push: true + # Force devDependencies (tsc, vite) during build — App Platform sets + # NODE_ENV=production by default which would have pnpm skip them. + # NODE_ENV stays unset on this static site at runtime; nothing executes. build_command: | corepack enable corepack prepare pnpm@9.11.0 --activate - pnpm install --frozen-lockfile + NPM_CONFIG_PRODUCTION=false pnpm install --frozen-lockfile pnpm --filter @openpartner/db build pnpm --filter @openpartner/portal build output_dir: apps/portal/dist catchall_document: index.html envs: - - key: NODE_ENV - value: production + # PostHog product analytics. VITE_-prefixed so Vite inlines the + # value into the static bundle at build time. BUILD_TIME scope is + # required: the static site has no run-time process to read env + # vars from. Without this declaration the values you set in the + # DO UI may not flow into the build (depending on App Platform + # version). Same project key as studio-website for unified + # dashboards. + - key: VITE_POSTHOG_KEY + type: SECRET + scope: BUILD_TIME + - key: VITE_POSTHOG_HOST + scope: BUILD_TIME + value: https://us.i.posthog.com -# App-level routing. /api/* goes to the api service; everything else -# falls through to the static portal. The router is addressed by its -# own subdomain — see `domains` below. +# App-level routing. +# /api/* → api (path stripped before forward) +# /r/* → router (path preserved — router serves /r/:linkKey directly) +# / → portal (App Platform's static-site default) +# +# Once you wire `r.openpartner.dev` to the router component via the +# `domains` block, you can remove the /r rule — the subdomain handles it. ingress: rules: - match: @@ -147,9 +228,9 @@ ingress: preserve_path_prefix: false - match: path: - prefix: / + prefix: /r component: - name: portal + name: router # Wire custom domains once the app is up. Set these in the App Platform # UI, or uncomment + fill below and `doctl apps update`. diff --git a/.env.example b/.env.example index b57bb21..7ad8b3a 100644 --- a/.env.example +++ b/.env.example @@ -5,9 +5,30 @@ # Values: selfhost | flat | revshare OPENPARTNER_MODE=selfhost -# Database +# Tenancy mode — single tenant (self-host bootstrap) vs multi-tenant +# (hosted, /t//... URL routing). Values: single | multi +# Self-host customers leave this at 'single'. Hosted operators set +# 'multi' and use POST /signup to provision tenants. +OPENPARTNER_TENANCY=single + +# Database — privileged connection used by migrations, signup, the +# Stripe webhook handler, and the in-process scheduler. Bypasses RLS. DATABASE_URL=postgres://openpartner:openpartner@localhost:5433/openpartner +# Optional: app-role connection string used by every tenant-scoped +# request. Run as a non-superuser without BYPASSRLS so RLS engages as +# defense-in-depth alongside the per-request app.tenant_id filter. +# When unset, tenant-scoped requests fall back to DATABASE_URL and RLS +# is bypassed (app-level filtering still applies). Self-hosters opt in +# by setting both this and OPENPARTNER_APP_DB_PASSWORD. +DATABASE_URL_APP= + +# Password used to provision the openpartner_app role on migrate. The +# 20260507020000_app_role.ts migration creates the role with this +# password (rotates it if the role already exists). Skipped with a +# notice if unset. Pair with DATABASE_URL_APP pointing at this role. +OPENPARTNER_APP_DB_PASSWORD= + # Service ports ROUTER_PORT=4701 API_PORT=4601 @@ -17,10 +38,9 @@ PORTAL_PORT=5673 # In production: set to your marketing apex (e.g. .yourdomain.com) COOKIE_DOMAIN=localhost -# Session secret — generate a random 32+ char string for prod -SESSION_SECRET=dev-only-replace-in-prod - -# Admin API key for bootstrap (curl/CI). Rotate via POST /api-keys once live. +# Bootstrap / headless admin bearer. Human admins sign in via the /install +# wizard (first run) or by receiving an emailed magic link. Keep this set +# for CI, scripts, and emergency access; rotate once human admins exist. # Generate: node -e "console.log('op_' + require('crypto').randomBytes(24).toString('hex'))" ADMIN_API_KEY=op_devonly_replace_me_with_64_hex_chars_before_any_real_use_xxxx @@ -35,30 +55,55 @@ STRIPE_FLAT_PRICE_ID= VELOCITY_MAX=20 VELOCITY_WINDOW_MS=60000 -# Network federation — AES-256-GCM master key (32 raw bytes, either hex -# (64 chars) or base64 (44 chars)). REQUIRED in production; dev uses a -# weak fallback and logs a warning. +# Public portal URL. Required in production so CORS has an explicit +# origin allowlist (we refuse to boot with it empty). Defaults to +# localhost:5673 in dev. +PORTAL_URL=http://localhost:5673 + +# Optional: extra CORS origins beyond PORTAL_URL, comma-separated. +CORS_EXTRA_ORIGINS= + +# -- Secret encryption at rest -- +# 32-byte master key (hex or base64) used to encrypt SMTP passwords / +# Postmark tokens / future Config secrets. REQUIRED in production; dev +# uses a fixed fallback with a warning so restarts keep decrypting. # Generate: node -e "console.log(require('crypto').randomBytes(32).toString('hex'))" -NETWORK_ENCRYPTION_KEY= +SECRETS_ENCRYPTION_KEY= + +# Mail for admin + partner invite / signin links. +# +# Preferred: configure from the admin Settings → Email delivery page +# (stored encrypted at rest in Config). The env vars below are +# fallbacks used only when UI config is empty — useful for hosted / +# managed tiers where the operator wants to force the transport. +# +# SMTP_HOST + MAIL_FROM → nodemailer SMTP (Gmail, SES, Mailgun, +# SendGrid, Postmark SMTP, Postfix, …) +# POSTMARK_SERVER_TOKEN + MAIL_FROM → Postmark HTTP API +# neither → stdout (dev only) +# +# MAIL_FROM must be quoted if it contains a display name, e.g. +# MAIL_FROM="Acme Partners " +MAIL_FROM= -# Router URL override used when federating Network partnerships. If unset, -# we infer from the vendor's instance URL by swapping API port 4601 → 4701. -NETWORK_ROUTER_URL= +# -- SMTP fallback -- +SMTP_HOST= +SMTP_PORT=587 +SMTP_SECURE= +SMTP_USER= +SMTP_PASSWORD= -# Mail delivery for magic-link auth. -# dev → writes to the DevMessage table; admins read at /admin/dev-mailbox. -# postmark → POSTs to api.postmarkapp.com/email (requires the vars below). -MAIL_TRANSPORT=dev -MAIL_FROM=OpenPartner +# -- Postmark (alternative) -- POSTMARK_SERVER_TOKEN= -# Optional — defaults to the "outbound" transactional stream. POSTMARK_MESSAGE_STREAM=outbound -# Public portal URL that magic links point at. Defaults to localhost:5673 -# for dev. Set this in prod so emails link to your real hostname. -PORTAL_URL=http://localhost:5673 - # Optional: bearer token required to scrape /metrics. Leave blank and # /metrics is open (fine on internal networks). Set it when /metrics is # reachable from the public internet. METRICS_TOKEN= + +# Optional: URL of the OpenPartner Network coordinator. When set, hosted +# multi-tenant signups auto-register with the Network (admin still +# confirms via the magic link); self-host installs see a "Connect to +# Network" button under Settings → Network. Leave empty to opt out. +NETWORK_URL= diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..00ed01b --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,41 @@ +# GitHub CODEOWNERS file. +# +# Lines map a path glob to a GitHub user / team. The last matching line +# wins. When a PR touches a path, GitHub auto-requests review from the +# matching owner(s); when the protected-branch rule "Require review from +# Code Owners" is on, the PR can't merge without their approval. +# +# Keep this list short and meaningful — every entry is a hard +# review-block. Default reviewer covers everything; specific paths +# below add no extra owners (yet) but reserve the slots for when a +# team forms around the repo. + +# Default reviewer for everything not matched below. +* @keithfawcett + +# Build, deploy, and CI configuration. Changes here affect the release +# pipeline and supply chain — surface them clearly. +/.github/ @keithfawcett +/.do/ @keithfawcett +/Dockerfile @keithfawcett +/docker-compose*.yml @keithfawcett +/apps/api/Dockerfile @keithfawcett +/apps/portal/Dockerfile @keithfawcett +/apps/router/Dockerfile @keithfawcett + +# Cryptography + auth + storage of secrets. Anything under here gets +# extra eyes because a regression is hard to catch in review. +/apps/api/src/crypto.ts @keithfawcett +/apps/api/src/auth.ts @keithfawcett +/apps/api/src/db.ts @keithfawcett +/apps/api/src/tenancy.ts @keithfawcett +/packages/db/src/ssl.ts @keithfawcett + +# Database migrations. Bad migrations are the most expensive class of +# mistake — explicitly route to the maintainer even when they're small. +/packages/db/migrations/ @keithfawcett + +# Network-protocol contract. Any wire-shape change must go through the +# maintainer because it's the federation API openpartner-network builds +# against. +/docs/network-protocol.md @keithfawcett diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..2bb7b8a --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +buy_me_a_coffee: keithf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1daaa6c..33681a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,11 @@ concurrency: group: ci-${{ github.ref }} cancel-in-progress: true +# Default to read-only. Jobs that need to write (the docker job pushes to +# GHCR) opt in explicitly via their own permissions block. +permissions: + contents: read + jobs: test: name: typecheck + test @@ -32,10 +37,9 @@ jobs: env: DATABASE_URL: postgres://openpartner:openpartner@localhost:5432/openpartner - # A deterministic 32-byte key so federation code paths that require - # NETWORK_ENCRYPTION_KEY don't throw during boot. - NETWORK_ENCRYPTION_KEY: 0000000000000000000000000000000000000000000000000000000000000000 - ADMIN_API_KEY: op_ci_0000000000000000000000000000000000000000000000000000000000 + # Quoted so YAML doesn't parse these as integers or booleans. + ADMIN_API_KEY: "op_ci_0000000000000000000000000000000000000000000000000000000000" + SECRETS_ENCRYPTION_KEY: "0000000000000000000000000000000000000000000000000000000000000000" steps: - uses: actions/checkout@v4 @@ -51,6 +55,12 @@ jobs: - run: pnpm install --frozen-lockfile + # @openpartner/db is consumed via its "types": "dist/index.d.ts" + # field, so downstream typechecks need its build output to exist. + # Also produces the migration bundle the Migrate step runs. + - name: Build @openpartner/db + run: pnpm --filter @openpartner/db build + - name: Migrate run: pnpm migrate diff --git a/.github/workflows/npm-publish.yml b/.github/workflows/npm-publish.yml new file mode 100644 index 0000000..a534811 --- /dev/null +++ b/.github/workflows/npm-publish.yml @@ -0,0 +1,70 @@ +name: npm publish (sdk) + +on: + push: + # Same tag trigger as CI's docker push — a tagged release pushes + # docker images AND attempts an SDK publish. The publish step is + # idempotent: if packages/sdk's version is already on npm, it + # skips. So a tag bump that doesn't touch the SDK is a no-op here. + tags: ['v*'] + workflow_dispatch: + # Manual trigger for republish-after-failure or first-publish runs + # before any tag exists. + +jobs: + publish: + name: build + publish @openpartner/sdk + runs-on: ubuntu-latest + + permissions: + contents: read + # Required for npm Trusted Publishing AND provenance attestations. + # The OIDC token GitHub mints here is what npm exchanges for a + # short-lived publish credential — no NPM_TOKEN secret needed — + # AND what Sigstore signs the tarball attestation with so users + # can verify the build. + id-token: write + + steps: + - uses: actions/checkout@v4 + + - uses: pnpm/action-setup@v4 + with: + version: 9.11.0 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + cache: pnpm + # registry-url is what enables `pnpm publish` to pick up + # NODE_AUTH_TOKEN below. Without it, publish 401s. + registry-url: 'https://registry.npmjs.org' + + - run: pnpm install --frozen-lockfile + + - name: Build SDK + run: pnpm --filter @openpartner/sdk build + + - name: Skip if version already published + id: check + run: | + local_version=$(node -p "require('./packages/sdk/package.json').version") + published=$(npm view @openpartner/sdk@$local_version version 2>/dev/null || true) + if [ -n "$published" ]; then + echo "@openpartner/sdk@$local_version already on npm — skipping" + echo "skip=true" >> "$GITHUB_OUTPUT" + else + echo "@openpartner/sdk@$local_version not on npm yet — will publish" + echo "skip=false" >> "$GITHUB_OUTPUT" + fi + + - name: Publish + if: steps.check.outputs.skip != 'true' + # Trusted Publishing: no NPM_TOKEN secret. The npm CLI mints a + # short-lived publish credential from this workflow's OIDC token + # (enabled by `id-token: write` above), validated against the + # trusted-publisher config on npmjs.com. Provenance attestation + # is automatic. --no-git-checks: pnpm refuses to publish from a + # detached HEAD by default; tag pushes always run detached so + # we opt out. + run: pnpm --filter @openpartner/sdk publish --access public --provenance --no-git-checks diff --git a/.gitignore b/.gitignore index 50315bc..97bc285 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ coverage .nyc_output .claude/settings.local.json *.tsbuildinfo +.claude/ diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index e4bed0a..6e90cde 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -86,13 +86,35 @@ One codebase, three behaviors, flipped by `OPENPARTNER_MODE`: The core product — attribution, events, commissions — is identical across modes. Only the billing + payout layer changes. -## The Network +## Install + auth -OpenPartner has a built-in two-sided marketplace (`NetworkVendor`, `NetworkCreator`, `NetworkOffering`, `NetworkRequest`). Vendors publish offerings (commission terms, assets, description). Creators apply. On acceptance, the vendor instance provisions a `Partner` row via federation — the creator gets a share link like `go.vendor.com/r/`. +First-run is a three-step wizard at `/install` — admin account (name + email), program name + support email, and mail transport. On submit the first admin gets an emailed magic link; clicking it activates them with a session cookie. `/install` 409s once any admin is activated so a second party can't take over. -Federation credentials are scoped API keys (vendor's key for vendor → hosted network, hosted network's key for network → vendor on provisioning). Keys at rest are AES-256-GCM encrypted with `NETWORK_ENCRYPTION_KEY`. +**Personas** are first-class — neither admins nor partners authenticate with a shared token: -Self-hosted instances opt in by publishing their instance URL + scoped key. Skipping the Network entirely is supported — the vendor-direct flow (manually create Partners through the admin portal) is the original path. +- **Admin** — invited by another admin (or created during install), verifies a magic link, gets a `Session` cookie keyed `(principalKind='admin', principalId)`. Can invite / revoke / reinstate other admins, edit program + mail settings. `ADMIN_API_KEY` env stays valid as a bootstrap / headless / CI bearer; think `doctl`, migrations, or emergency access. +- **Partner** — invited by an admin, same magic-link flow, separate `Partner` table. Creates their own Links + API keys from their dashboard; admin never sees partner credentials. Revocation kills sessions in-transaction and stamps clicks `fraudFlag='revoked'` so future attribution skips them without breaking the end-user click experience. + +Magic-link tokens and sessions share one generalized schema (`MagicLinkToken`, `Session`) keyed by `(principalKind, principalId)`. The verify endpoint branches on `principalKind` — invites for partners stamp `Partner.activatedAt`, invites for admins stamp `Admin.activatedAt`. + +## Settings + secret encryption + +Anything user-facing and runtime-adjustable lives in the `Config` table, editable from the admin Settings UI — not env. This covers: + +- **Program settings** — program name + support email (plaintext, identifiers not secrets) +- **Mail settings** — SMTP or Postmark selection + credentials + +Credentials inside `Config` (SMTP password, Postmark server token) are AES-256-GCM encrypted at rest using `SECRETS_ENCRYPTION_KEY` — the one env-level secret required in production. Public readers of the Settings API get a sanitized view (`hasPassword: true/false`) so the UI can show "saved ✓" without ever exposing plaintext. + +Env vars like `SMTP_HOST` / `POSTMARK_SERVER_TOKEN` / `MAIL_FROM` remain as **fallbacks** — the mailer resolves UI config first, env second, console stdout last (dev only). Hosted deployments can force a transport via env; self-hosters who want to rotate credentials without a redeploy do so from the Settings page. + +## Federating with an external creator network + +OpenPartner OSS is vendor-direct — the admin creates Partner rows, issues share links, and manages their own partner program. A separate hosted service (outside this repo) implements a two-sided creator network: vendors publish offerings, creators apply, and approved partnerships federate back into each vendor's OpenPartner instance as Partner + Link rows. + +The federation contract is thin: the vendor mints a scoped API key (`partners:write`, `links:write`, `partners:read`, `commissions:read`) and hands it to the network. The network calls the vendor's public API as an authenticated client — no shared schema, no inbound connections into the vendor. Data stays on the vendor's instance. Revoke the key and federation stops. + +Not participating in any network is the default. Everything in this repo works standalone. ## Data portability diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 8077f0f..db08160 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -4,4 +4,4 @@ OpenPartner adopts the [Contributor Covenant v2.1](https://www.contributor-coven In short: be respectful, assume good intent, critique ideas not people, and don't be the reason someone stops contributing. -Violations can be reported to `conduct@getcoherence.io`. Reports are kept confidential and we aim to respond within two business days. +Violations can be reported to `support@getcoherence.io`. Reports are kept confidential and we aim to respond within two business days. diff --git a/README.md b/README.md index c59e8b6..5e460ad 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # OpenPartner -**Open-source partner attribution and payouts.** Full attribution from click to revenue, three-tier pricing, your data stays yours. +**Open-source partner attribution and payouts.** Run your own partner program: track every click through to revenue, pay partners out via Stripe Connect, export everything whenever you want. > The open alternative to Dub Partners, Rewardful, and Impact. @@ -8,7 +8,7 @@ Existing partner platforms have two problems: -1. **They stop at the click.** Most tools track who clicked a link. Few reliably track which creator drove which dollar of revenue, 60 days later, across devices, through Safari's cookie blocks. +1. **They stop at the click.** Most tools track who clicked a link. Few reliably track which partner drove which dollar of revenue, 60 days later, across devices, through Safari's cookie blocks. 2. **They lock your data in.** Once two years of attribution history are baked into Impact or Partnerize, switching means starting over. OpenPartner fixes both. @@ -21,15 +21,19 @@ OpenPartner fixes both. - Works across devices by stitching to logged-in user identity - Answers the questions that matter: *which creator drove this revenue?* *which content actually converted?* -### Three-tier pricing +### Pricing -Pick the model that fits your business — at signup, not at year-three renewal: +Pick the tier that fits your business — at signup, not at year-three renewal. You can move between hosted tiers anytime. | Tier | Price | Best for | | --- | --- | --- | -| **Self-hosted** | Free forever | Teams who want to own their infra | -| **Hosted — flat** | $99–$499/mo | Teams who want predictability | -| **Hosted — revenue share** | 3% of GMV, no monthly | Teams who want to start cheaply and scale | +| **Self-hosted** | Free | Teams who want to own their infra. Run the core on your own box. | +| **Self-hosted + Network** | $29/mo + 3% on Network-originated payouts | Self-hosters who want creator-marketplace discovery on top of the core. | +| **Hosted — Flex** | $49/mo + 1.5% of attributed GMV | Teams who want predictable pricing + the Network bundled in. | +| **Hosted — Revshare** | 3% of attributed GMV, no monthly | Teams who want to start cheap and only pay when partners drive revenue. | +| **Enterprise** | Custom | Larger programs that need dedicated infra, SLAs, or procurement support. | + +Self-hosted is the GitHub repo you're reading. Hosted tiers run the same code on OpenPartner infra. Full pricing details: [openpartner.dev/pricing](https://openpartner.dev/pricing). ### Your data stays yours @@ -51,7 +55,9 @@ pnpm dev:router # terminal 2 — click router pnpm dev:portal # terminal 3 — partner dashboard ``` -See [docs/quickstart.md](./docs/quickstart.md) for full local setup, or [docs/deploy.md](./docs/deploy.md) for DigitalOcean App Platform and single-host Docker deployments. +Open `http://localhost:5673/install` — a three-step wizard collects your admin account, program name + support email, and mail transport (SMTP or Postmark). Accept the magic-link email and you're signed in as the first admin. + +See [docs/quickstart.md](./docs/quickstart.md) for the end-to-end walkthrough, or [docs/deploy.md](./docs/deploy.md) for DigitalOcean App Platform and single-host Docker deployments. ## Architecture @@ -84,19 +90,43 @@ v1. End-to-end attribution, payouts, and export are working; API surface is stab ### What's implemented +**Attribution + payouts** + - Edge click router with first-party cookie and SHA-256-hashed IP - Identity stitching (`POST /attribution/identify`) with late-binding backlog attribution -- Event ingest (`POST /attribution/events`) and Stripe webhook mapping -- Four attribution models — `last_click`, `first_click`, `linear`, `position` — with per-campaign selection, re-derivable from raw tables +- Event ingest (`POST /attribution/events`) and Stripe webhook mapping (idempotent on retries) +- Four attribution models — `last_click`, `first_click`, `linear`, `position` — per-campaign, re-derivable from raw tables - Commission accrual + review queue (approve / reverse) + Stripe Connect Standard payouts with idempotent transfers -- Three deployment modes gated by `OPENPARTNER_MODE`: `selfhost`, `flat` (Stripe subscription), `revshare` (3% platform fee) - Click velocity limits with an admin fraud-review queue that replays skipped attributions on unflag -- Scoped API keys (admin and partner tokens with granular `partners:write`, `links:write`, etc.) + magic-link auth for the portal -- Two-sided OpenPartner Network — vendors publish offerings, creators apply, federation provisions partner records on vendor instances with AES-256-GCM encrypted keys -- Creator-chosen share-link slugs (`go.yourdomain.com/r/`) - Outbound webhooks with HMAC-SHA256 signing and per-event redelivery -- Portable JSON + CSV export per table; full bundle export round-trippable into self-hosted via `POST /import` -- Partner dashboard + admin overview + fraud review + partner funnel analytics in the portal + +**Auth + personas** + +- WordPress-style first-run `/install` wizard — admin account, program name, mail transport in one flow +- Admin accounts as first-class personas (not a shared token) — magic-link signin, invite/revoke/reinstate, last-active-admin guard +- Partner accounts via admin-invited magic-link — no admin-visible credentials, partners create their own API keys +- `ADMIN_API_KEY` env stays as bootstrap / headless / CI path +- Revoke flow for both admins and partners: sessions killed, reason stored, optional email notification + +**Configuration** + +- Program name + support email editable from the admin Settings UI (not env) +- Mail transport (SMTP / Postmark / console) editable from the UI, SMTP passwords + Postmark tokens encrypted at rest with `SECRETS_ENCRYPTION_KEY` +- Env vars (`SMTP_*`, `POSTMARK_*`, `MAIL_FROM`) remain as deploy-time fallbacks +- Three deployment modes gated by `OPENPARTNER_MODE`: `selfhost`, `flat` (Hosted Flex — $49/mo + 1.5% metered), `revshare` (Hosted Revshare — 3% metered, no monthly) + +**Integration surface** + +- Scoped API keys (`partners:write`, `links:write`, `commissions:read`, …) — the federation contract that lets an external creator-network service provision Partner + Link rows on this instance over REST - `@openpartner/sdk` on npm with browser and server entries -- Transactional email via Postmark (magic links, vendor approval, creator signups) +- Portable JSON + CSV export per table; full bundle round-trippable via `POST /import` + +**Operations** + +- Partner dashboard + admin overview + fraud review + partner funnel analytics in the portal +- Prometheus `/metrics` + X-Request-Id correlation - Deployment: DigitalOcean App Platform spec + single-host `docker-compose.prod.yml` behind Caddy + +### Not in this repo + +A creator-discovery / two-sided network layer (vendors publish offerings, creators apply, federation provisions partnerships into vendor instances) lives in a separate private repo. OpenPartner OSS instances integrate with it via the scoped-API-key federation contract described above. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..5d10caf --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,49 @@ +# Security policy + +## Reporting a vulnerability + +**Please do not open a public GitHub issue for security vulnerabilities.** + +Use one of these private channels instead: + +- **GitHub private vulnerability reporting** (preferred): +- **Email**: security@openpartner.dev — replies within 48 hours. + +Include as much of the following as you can: + +- A description of the issue and its impact. +- Steps to reproduce, ideally with a minimal proof-of-concept. +- The commit SHA or release version you tested against. +- Whether you intend to publish a write-up, and on what timeline. + +## What to expect + +- Acknowledgement within **48 hours**. +- An initial assessment (severity, scope, owner) within **5 business days**. +- For critical issues, a fix or mitigation shipped within **7 days** of acknowledgement. +- Coordinated disclosure: we'll work with you on a timeline before any public details are shared. +- Credit in the release notes for the fix, if you want it. + +## Scope + +In scope: + +- The OpenPartner application code in this repository (`apps/`, `packages/`). +- The official Docker images published from this repository. +- The hosted production deployment at `app.openpartner.dev` and `network.openpartner.dev`. +- The `@openpartner/sdk` npm package published from this repository. + +Out of scope: + +- Self-hosted deployments running modified code. +- Vulnerabilities in third-party dependencies — please report those upstream and let us know which dependency. +- Findings that require physical access, social engineering of OpenPartner staff, or DoS / volumetric attacks. +- Reports generated solely by automated scanners with no demonstrated impact. + +## Supported versions + +The `main` branch is the only supported version. Security fixes ship to the latest release tag. + +## Hall of fame + +We list reporters who responsibly disclose verified vulnerabilities in `SECURITY-HALL-OF-FAME.md` (created on first entry) — opt in via your report. diff --git a/apps/api/Dockerfile b/apps/api/Dockerfile index cf8e571..9889221 100644 --- a/apps/api/Dockerfile +++ b/apps/api/Dockerfile @@ -62,21 +62,25 @@ ENV NODE_ENV=production \ MAIL_TRANSPORT=postmark WORKDIR /app -# API dist + its workspace deps. -COPY --from=builder /out/package.json ./package.json -COPY --from=builder /out/node_modules ./node_modules -COPY --from=builder /out/dist ./dist +# API dist + its workspace deps. Owned by the unprivileged `node` user +# (UID 1000) that ships with the official node:alpine image so the +# runtime container doesn't run as root. +COPY --from=builder --chown=node:node /out/package.json ./package.json +COPY --from=builder --chown=node:node /out/node_modules ./node_modules +COPY --from=builder --chown=node:node /out/dist ./dist # Migrations need the @openpartner/db package + knex bin. We ship it # alongside so the entrypoint can run them before API boot. -COPY --from=builder /out/packages-db ./packages-db +COPY --from=builder --chown=node:node /out/packages-db ./packages-db # Entrypoint runs migrations then execs the API. `exec` matters — we # want the node process to be PID 1 so signals (SIGTERM from Fly / # Docker) land on the app directly. -COPY apps/api/docker-entrypoint.sh /usr/local/bin/openpartner-api +COPY --chown=node:node apps/api/docker-entrypoint.sh /usr/local/bin/openpartner-api RUN chmod +x /usr/local/bin/openpartner-api +USER node + EXPOSE 4601 HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \ CMD node -e "fetch('http://127.0.0.1:' + (process.env.API_PORT||4601) + '/health').then(r=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))" diff --git a/apps/api/docker-entrypoint.sh b/apps/api/docker-entrypoint.sh index d46c74b..e35693d 100644 --- a/apps/api/docker-entrypoint.sh +++ b/apps/api/docker-entrypoint.sh @@ -15,6 +15,19 @@ if [ "${OPENPARTNER_SKIP_MIGRATIONS:-0}" != "1" ]; then echo "[entrypoint] migration failed — refusing to start" >&2 exit 1 } + + # Reconcile the openpartner_app role's password + grants on every boot. + # The original role-creation migration is one-shot; without this step, + # rotating OPENPARTNER_APP_DB_PASSWORD leaves Postgres out of sync and + # the app pool fails authentication. Noop when the env var is unset. + echo "[entrypoint] ensuring app-role state..." + ( + cd /app/packages-db/dist-migrate \ + && NODE_ENV=production node scripts/ensure-app-role.js + ) || { + echo "[entrypoint] app-role reconciliation failed — refusing to start" >&2 + exit 1 + } fi echo "[entrypoint] starting API on :${API_PORT:-4601}" diff --git a/apps/api/package.json b/apps/api/package.json index 51e2fdb..d2e769b 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -13,15 +13,19 @@ "test": "vitest run --passWithNoTests" }, "dependencies": { + "@aws-sdk/client-s3": "^3.665.0", "@openpartner/db": "workspace:*", "@types/cookie-parser": "^1.4.10", "cookie-parser": "^1.4.7", "cors": "^2.8.5", + "croner": "^10.0.1", "dotenv": "^16.4.5", "express": "^4.19.2", "express-async-errors": "^3.1.1", + "express-rate-limit": "^7.4.1", "helmet": "^7.0.0", "knex": "^3.1.0", + "nodemailer": "^8.0.6", "pg": "^8.11.5", "pino": "^9.4.0", "pino-http": "^9.0.0", @@ -32,11 +36,12 @@ "devDependencies": { "@types/cors": "^2.8.17", "@types/express": "^4.17.21", + "@types/nodemailer": "^8.0.0", "@types/pg": "^8.11.10", "@types/supertest": "^7.2.0", "supertest": "^7.0.0", "tsx": "^4.19.0", "typescript": "5.4.5", - "vitest": "^1.6.0" + "vitest": "^3.2.4" } } diff --git a/apps/api/scripts/setup-stripe.mjs b/apps/api/scripts/setup-stripe.mjs new file mode 100644 index 0000000..0c72446 --- /dev/null +++ b/apps/api/scripts/setup-stripe.mjs @@ -0,0 +1,212 @@ +#!/usr/bin/env node +/** + * Provision the Stripe products + prices + meters OpenPartner needs. + * + * STRIPE_SECRET_KEY=sk_test_... node apps/api/scripts/setup-stripe.mjs + * + * Idempotent: each product is keyed by a metadata tag; meters by event_name; + * metered prices by their meter + product. Re-running after a partial failure + * won't create duplicates. Outputs the env var values you need to add to your + * .env at the end. + * + * Use a test-mode key first (sk_test_...) and verify, then re-run with the + * live key when you're ready. + */ +import Stripe from 'stripe'; + +const key = process.env.STRIPE_SECRET_KEY; +if (!key) { + console.error('Set STRIPE_SECRET_KEY before running.'); + process.exit(1); +} +const isLive = key.startsWith('sk_live_'); +const isTest = key.startsWith('sk_test_'); +if (!isLive && !isTest) { + console.error('STRIPE_SECRET_KEY should start with sk_test_ or sk_live_.'); + process.exit(1); +} + +const stripe = new Stripe(key); + +console.log(`\nProvisioning OpenPartner products in ${isLive ? 'LIVE' : 'TEST'} mode...\n`); + +const PRODUCTS = [ + { + key: 'flex', + name: 'OpenPartner Flex', + description: '$49/mo + 1.5% of attributed GMV. Hosted, fully managed.', + monthlyPrice: 4900, // $49.00 in cents + // 1.5% of GMV = $0.015 per dollar = 1.5 cents per dollar. + // Stripe Prices on metered usage are in cents per unit; we report + // usage in dollars (Event.value), so unit_amount = 1.5 cents. + meteredCentsPerUnit: 1.5, + statementDescriptor: 'OPENPARTNER FLEX', + }, + { + key: 'network_access', + name: 'OpenPartner Network access', + description: '$29/mo for self-hosted customers tapping into the OpenPartner Network. 90-day free trial. 3% on Network-originated payouts.', + monthlyPrice: 2900, // $29.00 in cents + meteredCentsPerUnit: 3, // 3% on Network-originated payouts + statementDescriptor: 'OPENPARTNER NET', + }, + { + key: 'revshare', + name: 'OpenPartner Revshare', + description: '3% of attributed GMV, no monthly fee. Hosted, fully managed.', + monthlyPrice: null, + meteredCentsPerUnit: 3, // 3% of GMV + statementDescriptor: 'OPENPARTNER REV', + }, +]; + +// Meters: usage signals OpenPartner reports per merchant. Reporting goes via +// stripe.billing.meterEvents.create with the matching event_name. +const METERS = [ + { + eventName: 'openpartner_attributed_gmv', + displayName: 'OpenPartner attributed GMV', + purpose: 'Attributed GMV in dollars (used for Flex 1.5% and Revshare 3%)', + }, + { + eventName: 'openpartner_network_payouts', + displayName: 'OpenPartner Network-originated payouts', + purpose: 'Network-originated payouts in dollars (used for Network access 3%)', + }, +]; + +// Each product binds to one meter for its metered price. +const PRODUCT_METERS = { + flex: 'openpartner_attributed_gmv', + revshare: 'openpartner_attributed_gmv', + network_access: 'openpartner_network_payouts', +}; + +// ────────────────────────── Meters ────────────────────────── +console.log('Meters:'); +const metersByEventName = new Map(); +for (const m of METERS) { + const existingMeters = await stripe.billing.meters.list({ status: 'active', limit: 100 }); + const found = existingMeters.data.find((x) => x.event_name === m.eventName); + if (found) { + console.log(` ✓ ${m.eventName} already exists: ${found.id}`); + metersByEventName.set(m.eventName, found); + } else { + const created = await stripe.billing.meters.create({ + display_name: m.displayName, + event_name: m.eventName, + default_aggregation: { formula: 'sum' }, + value_settings: { event_payload_key: 'value' }, + customer_mapping: { event_payload_key: 'stripe_customer_id', type: 'by_id' }, + }); + console.log(` + Created meter ${m.eventName}: ${created.id}`); + metersByEventName.set(m.eventName, created); + } +} + +// ────────────────────────── Products + Prices ────────────────────────── +console.log('\nProducts + prices:'); +const results = []; + +for (const p of PRODUCTS) { + // Product: keyed by metadata. + const existing = await stripe.products.search({ + query: `metadata['openpartner_product']:'${p.key}' AND active:'true'`, + }); + + let product; + if (existing.data.length > 0) { + product = existing.data[0]; + console.log(` ✓ ${p.name} already exists: ${product.id}`); + } else { + product = await stripe.products.create({ + name: p.name, + description: p.description, + statement_descriptor: p.statementDescriptor, + metadata: { openpartner_product: p.key }, + tax_code: 'txcd_10000000', + }); + console.log(` + Created ${p.name}: ${product.id}`); + } + + const existingPrices = await stripe.prices.list({ product: product.id, active: true, limit: 100 }); + + // Monthly recurring price (skip if metered-only product). + let monthlyPriceId = null; + if (p.monthlyPrice != null) { + const found = existingPrices.data.find( + (x) => + x.metadata?.openpartner_price_kind === 'monthly' && + x.unit_amount === p.monthlyPrice && + x.currency === 'usd' && + x.recurring?.interval === 'month' && + x.recurring?.usage_type !== 'metered', + ); + if (found) { + monthlyPriceId = found.id; + console.log(` ✓ Monthly price already exists: ${monthlyPriceId}`); + } else { + const created = await stripe.prices.create({ + product: product.id, + unit_amount: p.monthlyPrice, + currency: 'usd', + recurring: { interval: 'month' }, + tax_behavior: 'exclusive', + metadata: { openpartner_price_kind: 'monthly' }, + }); + monthlyPriceId = created.id; + console.log(` + Created monthly price: ${monthlyPriceId} ($${(p.monthlyPrice / 100).toFixed(2)}/mo)`); + } + } + + // Metered price linked to the product's meter. + let meteredPriceId = null; + if (p.meteredCentsPerUnit != null) { + const meterEventName = PRODUCT_METERS[p.key]; + const meter = metersByEventName.get(meterEventName); + if (!meter) throw new Error(`Missing meter for ${p.key} (event_name=${meterEventName})`); + + const meteredCents = p.meteredCentsPerUnit; + const found = existingPrices.data.find( + (x) => + x.metadata?.openpartner_price_kind === 'metered' && + x.recurring?.meter === meter.id && + x.currency === 'usd', + ); + if (found) { + meteredPriceId = found.id; + console.log(` ✓ Metered price already exists: ${meteredPriceId}`); + } else { + // Use unit_amount_decimal for fractional cents (1.5 cents per unit, etc.). + const created = await stripe.prices.create({ + product: product.id, + currency: 'usd', + unit_amount_decimal: meteredCents.toString(), + recurring: { interval: 'month', usage_type: 'metered', meter: meter.id }, + tax_behavior: 'exclusive', + metadata: { openpartner_price_kind: 'metered' }, + }); + meteredPriceId = created.id; + console.log(` + Created metered price: ${meteredPriceId} (${meteredCents}¢ per unit, meter=${meter.id})`); + } + } + + results.push({ key: p.key, productId: product.id, monthlyPriceId, meteredPriceId }); +} + +// ────────────────────────── Summary ────────────────────────── +console.log('\n────────────────────────────────────────────────────────────'); +console.log('Done. Add these to your OpenPartner .env:\n'); + +const flex = results.find((r) => r.key === 'flex'); +const revshare = results.find((r) => r.key === 'revshare'); +const network = results.find((r) => r.key === 'network_access'); + +if (flex?.monthlyPriceId) console.log(`STRIPE_FLAT_PRICE_ID=${flex.monthlyPriceId}`); +if (flex?.meteredPriceId) console.log(`STRIPE_FLAT_USAGE_PRICE_ID=${flex.meteredPriceId}`); +if (revshare?.meteredPriceId) console.log(`STRIPE_REVSHARE_USAGE_PRICE_ID=${revshare.meteredPriceId}`); +if (network?.monthlyPriceId) console.log(`STRIPE_NETWORK_PRICE_ID=${network.monthlyPriceId}`); +if (network?.meteredPriceId) console.log(`STRIPE_NETWORK_USAGE_PRICE_ID=${network.meteredPriceId}`); + +console.log('\nMode:', isLive ? 'LIVE' : 'TEST'); +console.log('Next: copy the env line(s) into your .env, restart the api.\n'); diff --git a/apps/api/src/__tests__/admin-invite.test.ts b/apps/api/src/__tests__/admin-invite.test.ts new file mode 100644 index 0000000..6fb9ab1 --- /dev/null +++ b/apps/api/src/__tests__/admin-invite.test.ts @@ -0,0 +1,180 @@ +/** + * Admin persona flow: first-run install, admin-to-admin invite, revoke + * guardrails. Uses a capturing mailer so we can read the magic-link + * URLs without Postmark. + */ + +import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import request from 'supertest'; +import { TABLES } from '@openpartner/db'; +import { db } from '../db.js'; +import { createApp } from '../app.js'; +import { __setMailerForTests, type Mailer, type Message } from '../mailer.js'; + +const ADMIN_KEY = 'op_test_admin_persona_0123456789abcdef0123'; +process.env.ADMIN_API_KEY = ADMIN_KEY; +process.env.PORTAL_URL = 'http://localhost:5673'; +process.env.OPENPARTNER_TENANCY = 'single'; + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; + +const TABLES_TO_CLEAN = [ + TABLES.Session, + TABLES.MagicLinkToken, + TABLES.ApiKey, + TABLES.Partner, + TABLES.Admin, + TABLES.Config, +]; + +const app = createApp({ enableLogger: false }); + +class CapturingMailer implements Mailer { + readonly sent: Message[] = []; + async send(_ctx: unknown, msg: Message): Promise { + this.sent.push(msg); + } + findFor(to: string, purpose?: string): Message | undefined { + return this.sent.find( + (m) => + m.to === to && + (purpose == null || m.metadata?.purpose === purpose || m.tag === purpose), + ); + } +} + +let mailer: CapturingMailer; + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); +}); + +beforeEach(async () => { + mailer = new CapturingMailer(); + __setMailerForTests(mailer); + if (skipIntegration) return; + for (const t of TABLES_TO_CLEAN) { + await db(t).del(); + } +}); + +afterEach(() => { + __setMailerForTests(null); +}); + +afterAll(async () => { + await db.destroy(); +}); + +function extractToken(body: string): string { + const match = /token=([^\s&"]+)/.exec(body); + if (!match) throw new Error(`no token in body:\n${body}`); + return decodeURIComponent(match[1]!); +} + +describe.skipIf(skipIntegration)('admin personas', () => { + it('install → first admin accepts → program settings persisted', async () => { + // Status probe reports needsSetup=true with zero admins. + const status = await request(app).get('/install/status'); + expect(status.status).toBe(200); + expect(status.body.needsSetup).toBe(true); + + const setup = await request(app).post('/install').send({ + adminName: 'Ada Admin', + adminEmail: 'ada@example.com', + programName: 'Acme Partners', + supportEmail: 'support@acme.com', + }); + expect(setup.status).toBe(200); + + // Invite email landed. + const invite = mailer.findFor('ada@example.com', 'admin_invite'); + expect(invite).toBeDefined(); + expect(invite!.subject).toContain('Acme Partners'); + + // Activate. + const token = extractToken(invite!.text); + const verify = await request(app).post('/auth/magic/verify').send({ token }); + expect(verify.status).toBe(200); + expect(verify.body.role).toBe('admin'); + expect(verify.body.admin.email).toBe('ada@example.com'); + const cookie = (verify.headers['set-cookie'] as unknown as string[])[0]!.split(';')[0]!; + + // Status now reports installed. + const statusAfter = await request(app).get('/install/status'); + expect(statusAfter.body.needsSetup).toBe(false); + + // /install 409s now — second installer can't take over. + const second = await request(app).post('/install').send({ + adminName: 'Rogue', + adminEmail: 'rogue@example.com', + programName: 'Takeover', + }); + expect(second.status).toBe(409); + + // Program settings were saved. + const settings = await request(app).get('/config/program').set('Cookie', cookie); + expect(settings.body.programName).toBe('Acme Partners'); + expect(settings.body.supportEmail).toBe('support@acme.com'); + + // whoami via cookie resolves to admin with name. + const who = await request(app).get('/auth/whoami').set('Cookie', cookie); + expect(who.status).toBe(200); + expect(who.body.role).toBe('admin'); + expect(who.body.admin.name).toBe('Ada Admin'); + }); + + it('admin-to-admin invite + signin', async () => { + // Bootstrap via env admin key (no install needed for this test). + const create = await request(app) + .post('/admins') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'two@example.com', name: 'Number Two' }); + expect(create.status).toBe(201); + + const invite = mailer.findFor('two@example.com', 'admin_invite'); + expect(invite).toBeDefined(); + + const token = extractToken(invite!.text); + const verify = await request(app).post('/auth/magic/verify').send({ token }); + expect(verify.status).toBe(200); + expect(verify.body.admin.name).toBe('Number Two'); + + // Returning signin sends an admin_signin email (not partner_signin). + mailer.sent.length = 0; + const signin = await request(app).post('/auth/signin').send({ email: 'two@example.com' }); + expect(signin.status).toBe(200); + const signinMsg = mailer.findFor('two@example.com', 'admin_signin'); + expect(signinMsg).toBeDefined(); + }); + + it('cannot revoke the last active admin', async () => { + const create = await request(app) + .post('/admins') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'lone@example.com', name: 'Lone' }); + const token = extractToken(mailer.findFor('lone@example.com')!.text); + await request(app).post('/auth/magic/verify').send({ token }); + + const res = await request(app) + .post(`/admins/${create.body.id}/revoke`) + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(res.status).toBe(409); + expect(res.body.error).toBe('cannot_revoke_last_active_admin'); + }); + + it('duplicate email → 409 email_taken', async () => { + await request(app) + .post('/admins') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'dup@example.com', name: 'First' }); + + const dupe = await request(app) + .post('/admins') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'dup@example.com', name: 'Second' }); + expect(dupe.status).toBe(409); + expect(dupe.body.error).toBe('email_taken'); + }); +}); diff --git a/apps/api/src/__tests__/integration.test.ts b/apps/api/src/__tests__/integration.test.ts index 46d1e2c..f31db7f 100644 --- a/apps/api/src/__tests__/integration.test.ts +++ b/apps/api/src/__tests__/integration.test.ts @@ -11,13 +11,18 @@ import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; import request from 'supertest'; import { ulid } from 'ulid'; -import { TABLES } from '@openpartner/db'; +import { DEFAULT_TENANT_ID, TABLES } from '@openpartner/db'; import { db } from '../db.js'; import { createApp } from '../app.js'; const ADMIN_KEY = 'op_test_admin_key_0123456789abcdef0123'; process.env.ADMIN_API_KEY = ADMIN_KEY; -process.env.OPENPARTNER_MODE = process.env.OPENPARTNER_MODE ?? 'selfhost'; +// Force selfhost — vitest auto-loads .env, so the ?? form would let a +// developer's local .env mode bleed into the suite. +process.env.OPENPARTNER_MODE = 'selfhost'; +// Force single tenancy — every direct insert below gets stamped with the +// default tenant; routes go through tenantMiddleware which sets the same. +process.env.OPENPARTNER_TENANCY = 'single'; const TABLES_TO_CLEAN = [ TABLES.Commission, @@ -28,9 +33,6 @@ const TABLES_TO_CLEAN = [ TABLES.Link, TABLES.Campaign, TABLES.Payout, - TABLES.Session, - TABLES.MagicLinkToken, - TABLES.DevMessage, TABLES.ApiKey, TABLES.Partner, TABLES.Config, @@ -87,6 +89,7 @@ describe.skipIf(skipIntegration)('api integration', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId: linkRes.body.id, partnerId, campaignId, @@ -176,6 +179,7 @@ describe.skipIf(skipIntegration)('api integration', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId: linkRes.body.id, partnerId, campaignId, @@ -271,12 +275,12 @@ describe.skipIf(skipIntegration)('api integration', () => { const click1 = ulid(); const click2 = ulid(); await db(TABLES.Click).insert({ - id: click1, linkId: link1.id, partnerId: p1, campaignId: linearCampaign, + id: click1, tenantId: DEFAULT_TENANT_ID, linkId: link1.id, partnerId: p1, campaignId: linearCampaign, landingUrl: 'x', ipHash: 'x', userAgent: 'x', referer: null, fraudFlag: null, ts: new Date(Date.now() - 10_000), }); await db(TABLES.Click).insert({ - id: click2, linkId: link2.id, partnerId: p2, campaignId: linearCampaign, + id: click2, tenantId: DEFAULT_TENANT_ID, linkId: link2.id, partnerId: p2, campaignId: linearCampaign, landingUrl: 'x', ipHash: 'x', userAgent: 'x', referer: null, fraudFlag: null, ts: new Date(Date.now() - 5_000), }); @@ -370,9 +374,9 @@ describe.skipIf(skipIntegration)('api integration', () => { const click2 = ulid(); const click3 = ulid(); await db(TABLES.Click).insert([ - { id: click1, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'x', ipHash: 'a', userAgent: 'x', referer: null, fraudFlag: null }, - { id: click2, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'x', ipHash: 'b', userAgent: 'x', referer: null, fraudFlag: null }, - { id: click3, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'x', ipHash: 'c', userAgent: 'x', referer: null, fraudFlag: 'velocity' }, + { id: click1, tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'x', ipHash: 'a', userAgent: 'x', referer: null, fraudFlag: null }, + { id: click2, tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'x', ipHash: 'b', userAgent: 'x', referer: null, fraudFlag: null }, + { id: click3, tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'x', ipHash: 'c', userAgent: 'x', referer: null, fraudFlag: 'velocity' }, ]); // User 1 stitches, signs up, pays. @@ -434,6 +438,7 @@ describe.skipIf(skipIntegration)('api integration', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, diff --git a/apps/api/src/__tests__/magic-link.test.ts b/apps/api/src/__tests__/magic-link.test.ts deleted file mode 100644 index 128cada..0000000 --- a/apps/api/src/__tests__/magic-link.test.ts +++ /dev/null @@ -1,314 +0,0 @@ -/** - * Magic-link signup + signin over the live Express + Postgres stack. - * - * We don't send real email — the DevMailer persists to DevMessage, and - * the tests read the stored body to extract the link and consume the - * embedded token. - */ - -import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; -import request from 'supertest'; -import { TABLES } from '@openpartner/db'; -import { db } from '../db.js'; -import { createApp } from '../app.js'; - -const ADMIN_KEY = 'op_test_magic_admin_0123456789abcdef0123'; -process.env.ADMIN_API_KEY = ADMIN_KEY; -process.env.MAIL_TRANSPORT = 'dev'; -process.env.PORTAL_URL = 'http://localhost:5673'; - -const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; -const TABLES_TO_CLEAN = [ - TABLES.Session, - TABLES.MagicLinkToken, - TABLES.DevMessage, - TABLES.ApiKey, - TABLES.NetworkCreator, - TABLES.Config, -]; - -const app = createApp({ enableLogger: false }); - -beforeAll(async () => { - if (skipIntegration) return; - await db.raw('select 1'); -}); - -afterAll(async () => { - await db.destroy(); -}); - -beforeEach(async () => { - if (skipIntegration) return; - for (const t of TABLES_TO_CLEAN) await db(t).del(); -}); - -function extractToken(body: string): string { - const match = body.match(/token=([A-Za-z0-9_-]+(?:%3D)*)/); - if (!match) throw new Error(`no token in body: ${body.slice(0, 200)}`); - return decodeURIComponent(match[1]!); -} - -describe.skipIf(skipIntegration)('magic-link auth', () => { - it('full signup → verify → session-backed whoami', async () => { - const signup = await request(app) - .post('/auth/creator/signup') - .send({ email: 'grace@example.com', handle: 'gracie', name: 'Grace Hopper' }); - expect(signup.status).toBe(200); - - // The DevMailer persists; we read the message via the admin endpoint. - const mailbox = await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`); - expect(mailbox.body.messages).toHaveLength(1); - expect(mailbox.body.messages[0].to).toBe('grace@example.com'); - const token = extractToken(mailbox.body.messages[0].body); - - const verify = await request(app).post('/auth/magic/verify').send({ token }); - expect(verify.status).toBe(200); - expect(verify.body.role).toBe('network_creator'); - expect(verify.body.creator.handle).toBe('gracie'); - expect(verify.body.creator.status).toBe('active'); - - // Cookie was set. - const setCookie = verify.headers['set-cookie'] as unknown as string[] | undefined; - expect(setCookie?.[0]).toMatch(/^op_session=/); - const cookie = setCookie![0]!.split(';')[0]!; - - // Session-backed whoami returns the same creator. - const me = await request(app).get('/auth/whoami').set('Cookie', cookie); - expect(me.status).toBe(200); - expect(me.body.role).toBe('network_creator'); - expect(me.body.creator.handle).toBe('gracie'); - }); - - it('signin for an active creator issues a new session', async () => { - // First sign up + verify. - await request(app) - .post('/auth/creator/signup') - .send({ email: 'ada@example.com', handle: 'ada', name: 'Ada' }); - let msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - await request(app).post('/auth/magic/verify').send({ token: extractToken(msgs[0].body) }); - - // Now request a signin link as the returning creator. - const signin = await request(app).post('/auth/creator/signin').send({ email: 'ada@example.com' }); - expect(signin.status).toBe(200); - msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - expect(msgs[0].subject).toBe('Your OpenPartner sign-in link'); - const verify = await request(app).post('/auth/magic/verify').send({ token: extractToken(msgs[0].body) }); - expect(verify.status).toBe(200); - expect(verify.body.role).toBe('network_creator'); - }); - - it('rejects reused, expired, and unknown tokens', async () => { - await request(app) - .post('/auth/creator/signup') - .send({ email: 'x@example.com', handle: 'xxx', name: 'Xavier' }); - const msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - const token = extractToken(msgs[0].body); - - // First consume: ok - const first = await request(app).post('/auth/magic/verify').send({ token }); - expect(first.status).toBe(200); - - // Second consume: already_consumed - const second = await request(app).post('/auth/magic/verify').send({ token }); - expect(second.status).toBe(400); - expect(second.body.error).toBe('already_consumed'); - - // Unknown token: not_found - const unknown = await request(app).post('/auth/magic/verify').send({ token: 'mlt_unknowntoken12345' }); - expect(unknown.status).toBe(400); - expect(unknown.body.error).toBe('not_found'); - }); - - it('signup enforces unique email and handle', async () => { - await request(app) - .post('/auth/creator/signup') - .send({ email: 'dup@example.com', handle: 'dup', name: 'First' }); - - // Same email, different handle. - const dupEmail = await request(app) - .post('/auth/creator/signup') - .send({ email: 'dup@example.com', handle: 'other', name: 'Second' }); - // Only conflicts once the first token is consumed (creator exists). - // Before that, both are pending tokens — signup endpoint only checks - // against existing CREATOR rows, not pending tokens. So consume first: - const msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - await request(app).post('/auth/magic/verify').send({ token: extractToken(msgs[msgs.length - 1].body) }); - - // Now both email and handle collide with the new creator row. - const conflict = await request(app) - .post('/auth/creator/signup') - .send({ email: 'dup@example.com', handle: 'fresh', name: 'Second' }); - expect(conflict.status).toBe(409); - - // Consume of the earlier "other" token should also fail now. - void dupEmail; - }); - - it('vendor signup: rejects bad keys, creates pending vendor on verify, activates + signin', async () => { - // Bad instance URL → instance_unreachable. - const badUrl = await request(app) - .post('/auth/vendor/signup') - .send({ - email: 'bad@vendor.com', - name: 'Bad', - slug: `bad-${Date.now()}`, - instanceUrl: 'http://127.0.0.1:1', - instanceKey: 'op_nothing', - }); - expect(badUrl.status).toBe(400); - - // Stand up a real vendor instance (same server). Mint a scoped key - // with only the federation scopes. - const appListen = app.listen(0); - const port = (appListen.address() as import('node:net').AddressInfo).port; - const vendorInstanceUrl = `http://127.0.0.1:${port}`; - const scopedMint = await request(app) - .post('/api-keys/scoped') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ scopes: ['partners:write', 'partners:read', 'links:write', 'commissions:read'] }); - const scopedKey = scopedMint.body.plaintext as string; - - const slug = `good-${Date.now()}`; - const signup = await request(app) - .post('/auth/vendor/signup') - .send({ - email: 'good@vendor.com', - name: 'GoodVendor', - slug, - instanceUrl: vendorInstanceUrl, - instanceKey: scopedKey, - }); - expect(signup.status).toBe(200); - - const mailbox = await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`); - const signupMsg = mailbox.body.messages.find((m: { to: string }) => m.to === 'good@vendor.com'); - expect(signupMsg).toBeDefined(); - const token = extractToken(signupMsg.body); - - const verify = await request(app).post('/auth/magic/verify').send({ token }); - expect(verify.status).toBe(200); - expect(verify.body.role).toBe('network_vendor'); - expect(verify.body.status).toBe('pending'); - // No session cookie yet — vendor is pending admin approval. - expect(verify.headers['set-cookie']).toBeUndefined(); - - // Signin right now should no-op (vendor not active) → no magic link. - await db(TABLES.DevMessage).del(); // clear so we can tell if anything arrives - await request(app).post('/auth/signin').send({ email: 'good@vendor.com' }); - const mailbox2 = await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`); - expect(mailbox2.body.messages.filter((m: { to: string }) => m.to === 'good@vendor.com')).toHaveLength(0); - - // Admin activates. - const vendorId = verify.body.vendor.id; - await request(app).post(`/network/vendors/${vendorId}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - // Signin now DOES issue a link. - await request(app).post('/auth/signin').send({ email: 'good@vendor.com' }); - const mailbox3 = await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`); - const signinMsg = mailbox3.body.messages.find( - (m: { to: string; subject: string }) => m.to === 'good@vendor.com' && m.subject.includes('sign-in'), - ); - expect(signinMsg).toBeDefined(); - const signinToken = extractToken(signinMsg.body); - const signinVerify = await request(app).post('/auth/magic/verify').send({ token: signinToken }); - expect(signinVerify.status).toBe(200); - expect(signinVerify.body.role).toBe('network_vendor'); - const cookie = (signinVerify.headers['set-cookie'] as unknown as string[])[0]!.split(';')[0]!; - - // Session-backed whoami. - const me = await request(app).get('/auth/whoami').set('Cookie', cookie); - expect(me.status).toBe(200); - expect(me.body.role).toBe('network_vendor'); - expect(me.body.vendor.slug).toBe(slug); - - await new Promise((resolve) => appListen.close(() => resolve())); - }); - - it('unified /auth/signin tries creator then vendor', async () => { - // Creator email → creator signin link. - await request(app) - .post('/auth/creator/signup') - .send({ email: 'c@example.com', handle: 'cuser', name: 'Cuser Name' }); - let msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - await request(app).post('/auth/magic/verify').send({ token: extractToken(msgs[0].body) }); - - await db(TABLES.DevMessage).del(); - await request(app).post('/auth/signin').send({ email: 'c@example.com' }); - msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - expect(msgs).toHaveLength(1); - expect(msgs[0].metadata?.purpose).toBe('creator_signin'); - - // Unknown email → silent (no message). - await db(TABLES.DevMessage).del(); - await request(app).post('/auth/signin').send({ email: 'nobody@example.com' }); - const after = await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`); - expect(after.body.messages).toHaveLength(0); - }); - - it('vendor signin with an email tied to multiple vendors prefers the active one', async () => { - // Regression for ultrareview #20: before the email column existed, - // findVendorByEmail walked magic-link history and returned the most - // recent vendor_signup's slug — wrong when two vendors shared an - // email. Now it reads NetworkVendor.email directly, preferring - // active status over pending. - const { ulid } = await import('ulid'); - const sharedEmail = 'dup@vendor.test'; - // Seed two vendors directly — save the cost of the full signup flow. - const older = ulid(); - const newer = ulid(); - await db(TABLES.NetworkVendor).insert([ - { - id: older, - name: 'Older', - slug: `dup-older-${older}`, - email: sharedEmail, - instanceUrl: 'https://older.example', - instanceKeyCiphertext: 'ct', - instanceKeyPrefix: 'prefix__', - status: 'pending', - createdAt: new Date(Date.now() - 10_000), - }, - { - id: newer, - name: 'Newer', - slug: `dup-newer-${newer}`, - email: sharedEmail, - instanceUrl: 'https://newer.example', - instanceKeyCiphertext: 'ct', - instanceKeyPrefix: 'prefix__', - status: 'active', - createdAt: new Date(), - }, - ]); - - await db(TABLES.DevMessage).del(); - await request(app).post('/auth/signin').send({ email: sharedEmail }); - const msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body - .messages as Array<{ metadata?: Record; body: string }>; - const signinMsg = msgs.find((m) => m.metadata?.purpose === 'vendor_signin'); - expect(signinMsg).toBeDefined(); - - const verify = await request(app).post('/auth/magic/verify').send({ token: extractToken(signinMsg!.body) }); - expect(verify.status).toBe(200); - // The ACTIVE vendor should win, not the older pending one. - expect(verify.body.vendor.id).toBe(newer); - }); - - it('signout clears the session cookie', async () => { - await request(app) - .post('/auth/creator/signup') - .send({ email: 'out@example.com', handle: 'out', name: 'Out' }); - const msgs = (await request(app).get('/dev/mailbox').set('Authorization', `Bearer ${ADMIN_KEY}`)).body.messages; - const verify = await request(app).post('/auth/magic/verify').send({ token: extractToken(msgs[0].body) }); - const cookie = (verify.headers['set-cookie'] as unknown as string[])[0]!.split(';')[0]!; - - const before = await request(app).get('/auth/whoami').set('Cookie', cookie); - expect(before.body.role).toBe('network_creator'); - - await request(app).post('/auth/signout').set('Cookie', cookie); - - const after = await request(app).get('/auth/whoami').set('Cookie', cookie); - expect(after.status).toBe(401); - }); -}); diff --git a/apps/api/src/__tests__/mailer.test.ts b/apps/api/src/__tests__/mailer.test.ts deleted file mode 100644 index 1620585..0000000 --- a/apps/api/src/__tests__/mailer.test.ts +++ /dev/null @@ -1,125 +0,0 @@ -/** - * PostmarkMailer unit tests — mock fetch, assert payload shape + error - * handling. DevMailer path is already exercised by the magic-link - * integration suite; we don't duplicate it here. - */ - -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; - -describe('PostmarkMailer', () => { - beforeEach(() => { - vi.resetModules(); - }); - afterEach(() => { - vi.unstubAllGlobals(); - delete process.env.MAIL_TRANSPORT; - delete process.env.POSTMARK_SERVER_TOKEN; - delete process.env.MAIL_FROM; - delete process.env.POSTMARK_MESSAGE_STREAM; - }); - - it('POSTs a well-formed payload to the Postmark Email API', async () => { - process.env.MAIL_TRANSPORT = 'postmark'; - process.env.POSTMARK_SERVER_TOKEN = 'test-token'; - process.env.MAIL_FROM = 'OpenPartner '; - process.env.POSTMARK_MESSAGE_STREAM = 'transactional-1'; - - const fetchMock = vi.fn(async (_input: string | URL | Request, _init?: RequestInit) => - new Response(JSON.stringify({ ErrorCode: 0, Message: 'OK' }), { status: 200 }), - ); - vi.stubGlobal('fetch', fetchMock); - - const { getMailer, __resetMailerForTests } = await import('../mailer.js'); - __resetMailerForTests(); - - await getMailer().send({ - to: 'grace@example.com', - subject: 'Hi', - text: 'plain', - html: 'rich', - tag: 'creator_signup', - metadata: { handle: 'gracie' }, - }); - - expect(fetchMock).toHaveBeenCalledOnce(); - const call = fetchMock.mock.calls[0]!; - expect(call[0]).toBe('https://api.postmarkapp.com/email'); - - const init = call[1]!; - const headers = init.headers as Record; - expect(headers['x-postmark-server-token']).toBe('test-token'); - expect(headers['content-type']).toBe('application/json'); - - const body = JSON.parse(String(init.body)); - expect(body.From).toBe('OpenPartner '); - expect(body.To).toBe('grace@example.com'); - expect(body.Subject).toBe('Hi'); - expect(body.TextBody).toBe('plain'); - expect(body.HtmlBody).toBe('rich'); - expect(body.Tag).toBe('creator_signup'); - expect(body.MessageStream).toBe('transactional-1'); - expect(body.Metadata).toEqual({ handle: 'gracie' }); - }); - - it('throws on non-2xx HTTP response', async () => { - process.env.MAIL_TRANSPORT = 'postmark'; - process.env.POSTMARK_SERVER_TOKEN = 'bad-token'; - process.env.MAIL_FROM = 'no-reply@example.com'; - - vi.stubGlobal( - 'fetch', - vi.fn(async () => new Response('unauthorized', { status: 401 })), - ); - - const { getMailer, __resetMailerForTests } = await import('../mailer.js'); - __resetMailerForTests(); - - await expect( - getMailer().send({ to: 'x@example.com', subject: 's', text: 't' }), - ).rejects.toThrow(/postmark send failed: 401/); - }); - - it('throws on Postmark ErrorCode != 0', async () => { - process.env.MAIL_TRANSPORT = 'postmark'; - process.env.POSTMARK_SERVER_TOKEN = 'ok'; - process.env.MAIL_FROM = 'no-reply@example.com'; - - vi.stubGlobal( - 'fetch', - vi.fn(async () => - new Response(JSON.stringify({ ErrorCode: 406, Message: 'recipient suppressed' }), { - status: 200, - }), - ), - ); - - const { getMailer, __resetMailerForTests } = await import('../mailer.js'); - __resetMailerForTests(); - - await expect( - getMailer().send({ to: 'sup@example.com', subject: 's', text: 't' }), - ).rejects.toThrow(/postmark rejected message: 406/); - }); - - it('refuses to start without a token when MAIL_TRANSPORT=postmark', async () => { - process.env.MAIL_TRANSPORT = 'postmark'; - // no POSTMARK_SERVER_TOKEN - process.env.MAIL_FROM = 'no-reply@example.com'; - - const { getMailer, __resetMailerForTests } = await import('../mailer.js'); - __resetMailerForTests(); - - expect(() => getMailer()).toThrow(/POSTMARK_SERVER_TOKEN/); - }); - - it('refuses to start without MAIL_FROM', async () => { - process.env.MAIL_TRANSPORT = 'postmark'; - process.env.POSTMARK_SERVER_TOKEN = 'ok'; - // no MAIL_FROM - - const { getMailer, __resetMailerForTests } = await import('../mailer.js'); - __resetMailerForTests(); - - expect(() => getMailer()).toThrow(/MAIL_FROM/); - }); -}); diff --git a/apps/api/src/__tests__/multi-tenant.test.ts b/apps/api/src/__tests__/multi-tenant.test.ts new file mode 100644 index 0000000..8d21b5b --- /dev/null +++ b/apps/api/src/__tests__/multi-tenant.test.ts @@ -0,0 +1,227 @@ +/** + * Multi-tenant isolation tests. + * + * The route + helper refactor scopes every query to req.db (a transaction + * with `app.tenant_id` pinned). RLS is the second line of defense: even if + * an app-level filter is forgotten, the policy on every tenant-scoped + * table drops the row. These tests exercise the policy layer directly, + * connecting as the openpartner_app role (which lacks BYPASSRLS so the + * policies actually engage) and verifying: + * + * 1. Tenant A's GUC sees only A's rows + * 2. Tenant B's GUC sees only B's rows + * 3. With no GUC set, FORCE RLS hides everything (default deny) + * 4. The platform_admin GUC override sees both + * 5. WITH CHECK rejects writes that mismatch the GUC + * 6. Sessions stamped to tenant A are invisible under tenant B + * + * Skipped if DATABASE_URL isn't set (same gating as the rest of the + * integration suite). Also skipped if the openpartner_app role doesn't + * exist (e.g. a fresh dev install where OPENPARTNER_APP_DB_PASSWORD was + * never set so the migration short-circuited). + */ + +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import { ulid } from 'ulid'; +import { TABLES, type TenantRow } from '@openpartner/db'; + +process.env.OPENPARTNER_MODE = 'selfhost'; +process.env.OPENPARTNER_TENANCY = 'multi'; + +const { db } = await import('../db.js'); + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; + +const TENANT_A = `01TESTAA${ulid().slice(8)}`; +const TENANT_B = `01TESTBB${ulid().slice(8)}`; + +// Tables we'll seed + assert on. Limited to the ones that materially +// matter for an isolation guarantee — Partner is the canonical example +// (carries email + payout target, the things a leak would expose). +const SEED_TABLES_TO_RESET = [TABLES.Partner, TABLES.Session]; + +let appRoleAvailable = false; + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); + + const role = (await db.raw( + `select 1 from pg_roles where rolname = 'openpartner_app' and rolcanlogin and not rolbypassrls`, + )) as { rows: unknown[] }; + appRoleAvailable = role.rows.length > 0; + + // Wipe + re-seed two tenants. We use the privileged db (bypassRls=true + // on this pool) so cleanup isn't policy-gated. + await db('Tenant').whereIn('id', [TENANT_A, TENANT_B]).del(); + await db(TABLES.Tenant).insert([ + { id: TENANT_A, slug: 'acme-test', displayName: 'Acme', status: 'active' }, + { id: TENANT_B, slug: 'globex-test', displayName: 'Globex', status: 'active' }, + ]); +}); + +afterAll(async () => { + if (!skipIntegration) { + for (const t of SEED_TABLES_TO_RESET) { + await db(t).whereIn('tenantId', [TENANT_A, TENANT_B]).del(); + } + await db('Tenant').whereIn('id', [TENANT_A, TENANT_B]).del(); + } + await db.destroy(); +}); + +beforeEach(async () => { + if (skipIntegration) return; + for (const t of SEED_TABLES_TO_RESET) { + await db(t).whereIn('tenantId', [TENANT_A, TENANT_B]).del(); + } + // Two Partners per tenant so we can verify counts, not just visibility. + await db(TABLES.Partner).insert([ + { id: ulid(), tenantId: TENANT_A, email: `a1-${ulid()}@acme.test`, name: 'Acme P1' }, + { id: ulid(), tenantId: TENANT_A, email: `a2-${ulid()}@acme.test`, name: 'Acme P2' }, + { id: ulid(), tenantId: TENANT_B, email: `b1-${ulid()}@globex.test`, name: 'Globex P1' }, + ]); +}); + +/** + * Run `fn` inside a transaction acting as openpartner_app with the given + * GUCs. SET ROLE switches the effective role for the duration of the + * transaction so RLS engages (the privileged role bypasses via + * `row_security = off` set on connection, but the SET ROLE switch + * removes that — RLS sees the new role's attributes). + */ +async function asAppRole( + gucs: { tenantId?: string; platformAdmin?: boolean }, + fn: (trx: import('knex').Knex.Transaction) => Promise, +): Promise { + return db.transaction(async (trx) => { + // SET LOCAL row_security = on to undo the pool-level disable; SET + // ROLE openpartner_app to drop the bypass-RLS attributes the + // privileged owner role has. + await trx.raw(`set local row_security = on`); + await trx.raw(`set local role openpartner_app`); + if (gucs.tenantId) { + await trx.raw(`set local app.tenant_id = '${gucs.tenantId.replace(/'/g, "''")}'`); + } + if (gucs.platformAdmin) { + await trx.raw(`set local app.platform_admin = 'on'`); + } + return fn(trx); + }); +} + +describe.skipIf(skipIntegration)('multi-tenant RLS isolation', () => { + it('skips the suite cleanly when the openpartner_app role isn\'t provisioned', () => { + // Surface the gate so a dev sees in test output why isolation tests + // didn't actually run, rather than silently passing zero assertions. + if (!appRoleAvailable) { + console.warn( + '[multi-tenant.test] openpartner_app role missing/superuser/bypassrls — RLS isolation tests skipped', + ); + } + expect(true).toBe(true); + }); + + it('Acme GUC sees only Acme rows', async () => { + if (!appRoleAvailable) return; + const rows = await asAppRole({ tenantId: TENANT_A }, (trx) => + trx(TABLES.Partner).select('tenantId', 'email'), + ); + expect(rows).toHaveLength(2); + expect(rows.every((r) => r.tenantId === TENANT_A)).toBe(true); + }); + + it('Globex GUC sees only Globex rows', async () => { + if (!appRoleAvailable) return; + const rows = await asAppRole({ tenantId: TENANT_B }, (trx) => + trx(TABLES.Partner).select('tenantId'), + ); + expect(rows).toHaveLength(1); + expect(rows[0]!.tenantId).toBe(TENANT_B); + }); + + it('with no GUC set, FORCE RLS hides every row (default deny)', async () => { + if (!appRoleAvailable) return; + const rows = await asAppRole({}, (trx) => trx(TABLES.Partner).select('id')); + expect(rows).toHaveLength(0); + }); + + it('platform_admin override sees both tenants', async () => { + if (!appRoleAvailable) return; + const rows = await asAppRole({ platformAdmin: true }, (trx) => + trx(TABLES.Partner) + .whereIn('tenantId', [TENANT_A, TENANT_B]) + .select('tenantId'), + ); + const counts = rows.reduce>((acc, r) => { + acc[r.tenantId] = (acc[r.tenantId] ?? 0) + 1; + return acc; + }, {}); + expect(counts[TENANT_A]).toBe(2); + expect(counts[TENANT_B]).toBe(1); + }); + + it('WITH CHECK rejects an INSERT whose tenantId does not match the GUC', async () => { + if (!appRoleAvailable) return; + await expect( + asAppRole({ tenantId: TENANT_A }, (trx) => + trx(TABLES.Partner).insert({ + id: ulid(), + tenantId: TENANT_B, // mismatch on purpose + email: `cross-${ulid()}@x.test`, + name: 'Cross-tenant', + }), + ), + ).rejects.toThrow(/row-level security|new row violates/i); + }); + + it('FORCE RLS subjects the table owner to policies too', async () => { + if (!appRoleAvailable) return; + // Same SET ROLE dance; we're proving that even though the privileged + // owner has full grants, FORCE RLS still gates it once row_security + // is on AND the GUC is set to a tenant. + const acme = await asAppRole({ tenantId: TENANT_A }, (trx) => + trx(TABLES.Partner).count<{ count: string }[]>('* as count').first(), + ); + const globex = await asAppRole({ tenantId: TENANT_B }, (trx) => + trx(TABLES.Partner).count<{ count: string }[]>('* as count').first(), + ); + expect(Number(acme!.count)).toBe(2); + expect(Number(globex!.count)).toBe(1); + }); + + it('a Session stitched to Acme is invisible under Globex GUC', async () => { + if (!appRoleAvailable) return; + const sessionId = ulid(); + // Seed via privileged db (bypasses RLS) so we can stamp tenantId + // directly. + await db(TABLES.Session).insert({ + id: sessionId, + tenantId: TENANT_A, + prefix: 'op_test_', + tokenHash: 'h_'.padEnd(64, 'a'), + principalKind: 'admin', + principalId: ulid(), + expiresAt: new Date(Date.now() + 60_000), + }); + + const seenInB = await asAppRole({ tenantId: TENANT_B }, (trx) => + trx(TABLES.Session).where({ id: sessionId }).first(), + ); + expect(seenInB).toBeUndefined(); + + const seenInA = await asAppRole({ tenantId: TENANT_A }, (trx) => + trx(TABLES.Session).where({ id: sessionId }).first(), + ); + expect(seenInA).toBeDefined(); + }); + + it('Tenant table self-policy: A sees its own row, not B\'s', async () => { + if (!appRoleAvailable) return; + const aRows = await asAppRole({ tenantId: TENANT_A }, (trx) => + trx(TABLES.Tenant).whereIn('id', [TENANT_A, TENANT_B]).select('id'), + ); + expect(aRows).toHaveLength(1); + expect(aRows[0]!.id).toBe(TENANT_A); + }); +}); diff --git a/apps/api/src/__tests__/network-and-signup.test.ts b/apps/api/src/__tests__/network-and-signup.test.ts new file mode 100644 index 0000000..e5b6545 --- /dev/null +++ b/apps/api/src/__tests__/network-and-signup.test.ts @@ -0,0 +1,344 @@ +/** + * End-to-end tests for the creator self-signup flow + Network push. + * + * Spins up a local HTTP receiver in-process to act as the Network. The + * vendor side calls the receiver via the URL we save into + * `network_membership` Config; assertions cover: + * + * 1. Signup without Network: Partner row + magic link, no Network call. + * 2. Signup with Network ON + autoEnroll: pushes /partners/upsert, + * stamps Partner.metadata.network.{creatorId,preExisting}. + * 3. Signup with Network down: Partner row still created, NetworkOutbox + * row enqueued for retry. + * 4. Backfill reconciliation: a vendor that flips Network on after + * having existing partners pushes them all; pre-existing creators + * (matching email already on Network) get preExisting=true stamp. + * 5. Outbox drain: the scheduler-callable drainOutbox retries pending + * rows and removes them on success. + * 6. Admin POST /partners and /partners/:id/revoke also push when + * Network is enabled. + * 7. require_review policy: signup creates Partner with activatedAt=null + * and still pushes (status='pending'). + * + * Skipped if DATABASE_URL isn't set, like the rest of the integration + * suite. + */ + +import { createServer, type Server } from 'node:http'; +import type { AddressInfo } from 'node:net'; +import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import request from 'supertest'; +import { ulid } from 'ulid'; +import { DEFAULT_TENANT_ID, TABLES } from '@openpartner/db'; + +const ADMIN_KEY = 'op_test_network_admin_0123456789abcdef0123'; +process.env.ADMIN_API_KEY = ADMIN_KEY; +process.env.OPENPARTNER_MODE = 'selfhost'; +process.env.OPENPARTNER_TENANCY = 'single'; +process.env.PORTAL_URL = 'http://localhost:5673'; + +const { db } = await import('../db.js'); +const { createApp } = await import('../app.js'); +const { drainOutbox } = await import('../network-client.js'); +const { encryptSecret } = await import('../crypto.js'); + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; +const app = createApp({ enableLogger: false }); + +// In-process Network receiver. Each test configures `mockResponse` to +// either succeed or fail; `received` captures every call. +interface ReceivedCall { + path: string; + method: string; + authorization: string | undefined; + body: unknown; +} +let receiver: Server; +let receiverUrl: string; +let received: ReceivedCall[] = []; +let mockResponse: { status: number; body: unknown } = { + status: 200, + body: { networkCreatorId: 'crt_default', alreadyExisted: false, affiliations: [] }, +}; +// Email-keyed store so we can simulate Network-side dedup for the +// late-join backfill test. When a request comes in with an email we've +// seen, return the same networkCreatorId + alreadyExisted=true. +const networkCreators = new Map(); + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); + await new Promise((resolve) => { + receiver = createServer((req, res) => { + const chunks: Buffer[] = []; + req.on('data', (c) => chunks.push(c)); + req.on('end', () => { + const body = chunks.length > 0 ? JSON.parse(Buffer.concat(chunks).toString('utf8')) : null; + received.push({ + path: req.url ?? '', + method: req.method ?? '', + authorization: req.headers.authorization, + body, + }); + // Email-dedup simulation for backfill test + const email = (body && typeof body === 'object' && 'email' in body) ? (body as { email: string }).email : null; + if (email && networkCreators.has(email)) { + res.writeHead(200, { 'content-type': 'application/json' }); + res.end(JSON.stringify({ + networkCreatorId: networkCreators.get(email), + alreadyExisted: true, + affiliations: [{ vendorId: 'vnd_other', vendorPartnerId: 'pp_other', status: 'active', displayName: 'Other Vendor' }], + })); + return; + } + res.writeHead(mockResponse.status, { 'content-type': 'application/json' }); + res.end(JSON.stringify(mockResponse.body)); + }); + }).listen(0, '127.0.0.1', () => { + const addr = receiver.address() as AddressInfo; + receiverUrl = `http://127.0.0.1:${addr.port}`; + resolve(); + }); + }); +}); + +afterAll(async () => { + if (receiver) await new Promise((r) => receiver.close(() => r())); + await db.destroy(); +}); + +beforeEach(async () => { + if (skipIntegration) return; + received = []; + networkCreators.clear(); + mockResponse = { + status: 200, + body: { networkCreatorId: `crt_${ulid()}`, alreadyExisted: false, affiliations: [] }, + }; + for (const t of [ + TABLES.NetworkOutbox, + TABLES.MagicLinkToken, + TABLES.Partner, + TABLES.Config, + ]) { + await db(t).whereIn('tenantId', [DEFAULT_TENANT_ID]).del(); + } +}); + +afterEach(async () => { + if (skipIntegration) return; + for (const t of [ + TABLES.NetworkOutbox, + TABLES.MagicLinkToken, + TABLES.Partner, + TABLES.Config, + ]) { + await db(t).whereIn('tenantId', [DEFAULT_TENANT_ID]).del(); + } +}); + +async function configureNetwork(opts: { enabled: boolean; autoEnroll?: boolean; url?: string }): Promise { + const value = { + enabled: opts.enabled, + networkUrl: opts.url ?? receiverUrl, + vendorTokenCiphertext: encryptSecret('vntok_test'), + scopedKeyId: null, + autoEnroll: opts.autoEnroll ?? true, + }; + await db(TABLES.Config) + .insert({ tenantId: DEFAULT_TENANT_ID, key: 'network_membership', value: value as unknown as never, updatedAt: new Date() }) + .onConflict(['tenantId', 'key']) + .merge({ value: value as unknown as never, updatedAt: new Date() }); +} + +describe.skipIf(skipIntegration)('partner-signup + network push', () => { + it('signup without Network: creates Partner + magic link, no Network call', async () => { + const res = await request(app) + .post('/partner-signup') + .send({ email: 'creator1@example.test', name: 'Creator One' }); + expect(res.status).toBe(201); + expect(res.body.status).toBe('active'); + + const partner = await db(TABLES.Partner).where({ email: 'creator1@example.test' }).first(); + expect(partner).toBeDefined(); + expect(partner!.activatedAt).not.toBeNull(); + expect(received).toHaveLength(0); + }); + + it('signup with Network on: pushes /partners/upsert and stamps networkCreatorId', async () => { + await configureNetwork({ enabled: true }); + const expectedCreatorId = `crt_${ulid()}`; + mockResponse = { + status: 200, + body: { + networkCreatorId: expectedCreatorId, + alreadyExisted: false, + affiliations: [], + }, + }; + + const res = await request(app) + .post('/partner-signup') + .send({ email: 'creator2@example.test', name: 'Creator Two' }); + expect(res.status).toBe(201); + + expect(received).toHaveLength(1); + expect(received[0]!.path).toBe('/partners/upsert'); + expect(received[0]!.authorization).toBe('Bearer vntok_test'); + const sent = received[0]!.body as { email: string; metadata?: { source: string } }; + expect(sent.email).toBe('creator2@example.test'); + expect(sent.metadata?.source).toBe('self_signup'); + + const partner = await db(TABLES.Partner).where({ email: 'creator2@example.test' }).first(); + const meta = partner!.metadata as { network?: { creatorId: string; preExisting: boolean } }; + expect(meta.network?.creatorId).toBe(expectedCreatorId); + expect(meta.network?.preExisting).toBe(false); + }); + + it('signup with Network down: Partner row created, NetworkOutbox enqueued', async () => { + await configureNetwork({ enabled: true }); + mockResponse = { status: 503, body: { error: 'unavailable' } }; + + const res = await request(app) + .post('/partner-signup') + .send({ email: 'creator3@example.test', name: 'Creator Three' }); + expect(res.status).toBe(201); + + const partner = await db(TABLES.Partner).where({ email: 'creator3@example.test' }).first(); + expect(partner).toBeDefined(); + + const outbox = await db(TABLES.NetworkOutbox).where({ tenantId: DEFAULT_TENANT_ID }); + expect(outbox).toHaveLength(1); + expect(outbox[0]!.op).toBe('partner_upsert'); + expect(outbox[0]!.attempts).toBe(1); + expect(outbox[0]!.status).toBe('pending'); + }); + + it('drainOutbox retries enqueued rows and deletes them on success', async () => { + await configureNetwork({ enabled: true }); + // First push fails → enqueued. + mockResponse = { status: 503, body: {} }; + await request(app) + .post('/partner-signup') + .send({ email: 'creator4@example.test', name: 'Creator Four' }); + expect((await db(TABLES.NetworkOutbox)).length).toBe(1); + + // Make outbox row eligible NOW (otherwise nextAttemptAt is in the future). + await db(TABLES.NetworkOutbox).update({ nextAttemptAt: new Date(Date.now() - 1000) }); + + // Recover the Network and drain. + mockResponse = { + status: 200, + body: { networkCreatorId: 'crt_recovered', alreadyExisted: false, affiliations: [] }, + }; + const result = await drainOutbox(db, DEFAULT_TENANT_ID); + expect(result.drained).toBe(1); + expect(result.succeeded).toBe(1); + expect(result.dead).toBe(0); + + const remaining = await db(TABLES.NetworkOutbox); + expect(remaining).toHaveLength(0); + }); + + it('require_review policy: Partner created with activatedAt=null and still pushes (status=pending)', async () => { + await configureNetwork({ enabled: true }); + await db(TABLES.Config) + .insert({ + tenantId: DEFAULT_TENANT_ID, + key: 'partner_signup', + value: { policy: 'require_review' } as unknown as never, + updatedAt: new Date(), + }) + .onConflict(['tenantId', 'key']) + .merge({ value: { policy: 'require_review' } as unknown as never, updatedAt: new Date() }); + + const res = await request(app) + .post('/partner-signup') + .send({ email: 'pending@example.test', name: 'Pending Creator' }); + expect(res.status).toBe(201); + expect(res.body.status).toBe('pending_review'); + + const partner = await db(TABLES.Partner).where({ email: 'pending@example.test' }).first(); + expect(partner!.activatedAt).toBeNull(); + + expect(received).toHaveLength(1); + expect((received[0]!.body as { status: string }).status).toBe('pending'); + }); + + it('admin POST /partners pushes to Network when enabled', async () => { + await configureNetwork({ enabled: true }); + const res = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'invited@example.test', name: 'Invited' }); + expect(res.status).toBe(201); + + expect(received).toHaveLength(1); + expect((received[0]!.body as { metadata: { source: string } }).metadata.source).toBe('admin_invite'); + }); + + it('admin POST /partners/:id/revoke pushes a revoke regardless of autoEnroll', async () => { + // autoEnroll OFF — but revoke should still mirror so Network stops matching. + await configureNetwork({ enabled: true, autoEnroll: false }); + + const create = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'revoke-target@example.test', name: 'Revoke Target' }); + expect(create.status).toBe(201); + expect(received).toHaveLength(0); // autoEnroll off → no upsert + + const revoke = await request(app) + .post(`/partners/${create.body.id}/revoke`) + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ reason: 'test', notify: false }); + expect(revoke.status).toBe(200); + + expect(received).toHaveLength(1); + expect(received[0]!.path).toBe('/partners/upsert'); + expect((received[0]!.body as { status: string; vendorPartnerId: string }).status).toBe('revoked'); + }); + + it('backfill reconciliation: pre-existing Network creators come back with alreadyExisted=true', async () => { + // Two partners sit in the vendor's DB BEFORE Network is enabled. + const p1 = ulid(); + const p2 = ulid(); + await db(TABLES.Partner).insert([ + { id: p1, tenantId: DEFAULT_TENANT_ID, email: 'preexisting@example.test', name: 'PreExisting', activatedAt: new Date() }, + { id: p2, tenantId: DEFAULT_TENANT_ID, email: 'fresh@example.test', name: 'Fresh', activatedAt: new Date() }, + ]); + + // Simulate that 'preexisting@example.test' already has a Network identity + // from another vendor — receiver returns the same id with alreadyExisted=true. + networkCreators.set('preexisting@example.test', 'crt_preexisting_already'); + + await configureNetwork({ enabled: true }); + + const res = await request(app) + .post('/config/network/backfill') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({}); + expect(res.status).toBe(200); + expect(res.body).toEqual({ total: 2, pushed: 2, queued: 0 }); + + const refreshed1 = await db(TABLES.Partner).where({ id: p1 }).first(); + const refreshed2 = await db(TABLES.Partner).where({ id: p2 }).first(); + const meta1 = refreshed1!.metadata as { network?: { creatorId: string; preExisting: boolean } }; + const meta2 = refreshed2!.metadata as { network?: { creatorId: string; preExisting: boolean } }; + expect(meta1.network?.creatorId).toBe('crt_preexisting_already'); + expect(meta1.network?.preExisting).toBe(true); + expect(meta2.network?.preExisting).toBe(false); + }); + + it('GET /config/network never leaks the vendor token', async () => { + await configureNetwork({ enabled: true }); + const res = await request(app) + .get('/config/network') + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(res.status).toBe(200); + expect(res.body).not.toHaveProperty('vendorToken'); + expect(res.body).not.toHaveProperty('vendorTokenCiphertext'); + expect(res.body.hasVendorToken).toBe(true); + expect(res.body.networkUrl).toBe(receiverUrl); + }); +}); diff --git a/apps/api/src/__tests__/network-billing-proxy.test.ts b/apps/api/src/__tests__/network-billing-proxy.test.ts new file mode 100644 index 0000000..f3a70c8 --- /dev/null +++ b/apps/api/src/__tests__/network-billing-proxy.test.ts @@ -0,0 +1,228 @@ +/** + * Vendor-side admin proxy routes for Network billing. + * + * Spins up a local HTTP server acting as the Network's billing + * endpoints (GET /vendors/me/billing, POST /vendors/me/billing/checkout, + * POST /vendors/me/billing/portal). Verifies the openpartner main side + * forwards correctly with the right bearer + body, surfaces Network + * errors as the right HTTP status, and gates on admin auth. + * + * Skipped if DATABASE_URL isn't set. + */ + +import { createServer, type Server } from 'node:http'; +import type { AddressInfo } from 'node:net'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import request from 'supertest'; +import { DEFAULT_TENANT_ID, TABLES } from '@openpartner/db'; +import { db } from '../db.js'; +import { createApp } from '../app.js'; +import { encryptSecret } from '../crypto.js'; + +const ADMIN_KEY = 'op_test_billing_proxy_0123456789abcdef0123'; +process.env.ADMIN_API_KEY = ADMIN_KEY; +process.env.OPENPARTNER_MODE = process.env.OPENPARTNER_MODE ?? 'selfhost'; +process.env.OPENPARTNER_TENANCY = process.env.OPENPARTNER_TENANCY ?? 'single'; +process.env.PORTAL_URL = process.env.PORTAL_URL ?? 'http://localhost:5673'; + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; +const app = createApp({ enableLogger: false }); + +interface NetCall { + method: string; + path: string; + body: unknown; + authorization: string | undefined; +} + +let receiver: Server; +let receiverUrl: string; +const calls: NetCall[] = []; +let nextResponse: { status: number; body: unknown } = { + status: 200, + body: { + billingRequired: true, + bundledWithMainPlan: false, + subscriptionStatus: null, + stripeCustomerId: null, + stripeSubscriptionId: null, + currentPeriodEnd: null, + networkPriceConfigured: true, + networkUsagePriceConfigured: true, + billingEnabled: true, + }, +}; + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); + await new Promise((resolve) => { + receiver = createServer((req, res) => { + const chunks: Buffer[] = []; + req.on('data', (c) => chunks.push(c)); + req.on('end', () => { + const body = chunks.length ? JSON.parse(Buffer.concat(chunks).toString('utf8')) : null; + calls.push({ + method: req.method ?? '', + path: req.url ?? '', + body, + authorization: req.headers.authorization, + }); + res.writeHead(nextResponse.status, { 'content-type': 'application/json' }); + res.end(JSON.stringify(nextResponse.body)); + }); + }).listen(0, '127.0.0.1', () => { + const addr = receiver.address() as AddressInfo; + receiverUrl = `http://127.0.0.1:${addr.port}`; + resolve(); + }); + }); +}); + +afterAll(async () => { + if (receiver) await new Promise((r) => receiver.close(() => r())); + await db.destroy(); +}); + +beforeEach(async () => { + if (skipIntegration) return; + await db(TABLES.Config).whereIn('tenantId', [DEFAULT_TENANT_ID]).del(); + calls.length = 0; +}); + +async function configureNetwork(opts: { enabled: boolean }): Promise { + const value = { + enabled: opts.enabled, + networkUrl: receiverUrl, + vendorTokenCiphertext: encryptSecret('vntok_proxytest'), + scopedKeyId: null, + autoEnroll: true, + }; + await db(TABLES.Config) + .insert({ + tenantId: DEFAULT_TENANT_ID, + key: 'network_membership', + value: value as unknown as never, + updatedAt: new Date(), + }) + .onConflict(['tenantId', 'key']) + .merge({ value: value as unknown as never, updatedAt: new Date() }); +} + +describe.skipIf(skipIntegration)('GET /admin/network/billing', () => { + it('proxies with vendor bearer + returns Network payload', async () => { + await configureNetwork({ enabled: true }); + const res = await request(app) + .get('/admin/network/billing') + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(res.status).toBe(200); + expect(res.body.billingRequired).toBe(true); + expect(res.body.networkPriceConfigured).toBe(true); + + const call = calls[0]!; + expect(call.method).toBe('GET'); + expect(call.path).toBe('/vendors/me/billing'); + expect(call.authorization).toBe('Bearer vntok_proxytest'); + }); + + it('without Network configured: 503 network_call_failed', async () => { + const res = await request(app) + .get('/admin/network/billing') + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(res.status).toBe(503); + expect(res.body.error).toBe('network_call_failed'); + expect(calls).toHaveLength(0); + }); + + it('without admin auth: 401', async () => { + await configureNetwork({ enabled: true }); + const res = await request(app).get('/admin/network/billing'); + expect(res.status).toBe(401); + expect(calls).toHaveLength(0); + }); + + it('Network 5xx surfaces as 5xx network_call_failed', async () => { + await configureNetwork({ enabled: true }); + nextResponse = { status: 503, body: { error: 'billing_not_configured' } }; + const res = await request(app) + .get('/admin/network/billing') + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(res.status).toBe(503); + expect(res.body.error).toBe('network_call_failed'); + }); +}); + +describe.skipIf(skipIntegration)('POST /admin/network/billing/checkout', () => { + it('forwards body + returns Stripe URL from Network', async () => { + await configureNetwork({ enabled: true }); + nextResponse = { status: 200, body: { url: 'https://checkout.stripe.test/cs_xxx' } }; + const res = await request(app) + .post('/admin/network/billing/checkout') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ + successUrl: 'https://acme.example.test/admin/network/billing?subscribed=1', + cancelUrl: 'https://acme.example.test/admin/network/billing', + }); + expect(res.status).toBe(200); + expect(res.body.url).toBe('https://checkout.stripe.test/cs_xxx'); + + const call = calls[0]!; + expect(call.method).toBe('POST'); + expect(call.path).toBe('/vendors/me/billing/checkout'); + expect((call.body as { successUrl: string }).successUrl).toContain('subscribed=1'); + }); + + it('rejects malformed body', async () => { + await configureNetwork({ enabled: true }); + const res = await request(app) + .post('/admin/network/billing/checkout') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ successUrl: 'not-a-url', cancelUrl: 'also-not' }); + expect(res.status).toBe(400); + expect(res.body.error).toBe('invalid_body'); + expect(calls).toHaveLength(0); + }); + + it('Network 400 (e.g. bundled_with_main_plan) surfaces as 400', async () => { + await configureNetwork({ enabled: true }); + nextResponse = { status: 400, body: { error: 'billing_bundled_with_main_plan' } }; + const res = await request(app) + .post('/admin/network/billing/checkout') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ + successUrl: 'https://acme.example.test/ok', + cancelUrl: 'https://acme.example.test/x', + }); + expect(res.status).toBe(400); + expect(res.body.error).toBe('network_call_failed'); + expect(res.body.detail).toContain('billing_bundled_with_main_plan'); + }); +}); + +describe.skipIf(skipIntegration)('POST /admin/network/billing/portal', () => { + it('forwards returnUrl + returns portal URL', async () => { + await configureNetwork({ enabled: true }); + nextResponse = { status: 200, body: { url: 'https://billing-portal.stripe.test/bp_xxx' } }; + const res = await request(app) + .post('/admin/network/billing/portal') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ returnUrl: 'https://acme.example.test/admin/network/billing' }); + expect(res.status).toBe(200); + expect(res.body.url).toBe('https://billing-portal.stripe.test/bp_xxx'); + + const call = calls[0]!; + expect(call.method).toBe('POST'); + expect(call.path).toBe('/vendors/me/billing/portal'); + expect((call.body as { returnUrl: string }).returnUrl).toBe('https://acme.example.test/admin/network/billing'); + }); + + it('rejects malformed returnUrl', async () => { + await configureNetwork({ enabled: true }); + const res = await request(app) + .post('/admin/network/billing/portal') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ returnUrl: 'not-a-url' }); + expect(res.status).toBe(400); + expect(res.body.error).toBe('invalid_body'); + }); +}); diff --git a/apps/api/src/__tests__/network-payouts-report.test.ts b/apps/api/src/__tests__/network-payouts-report.test.ts new file mode 100644 index 0000000..18a0737 --- /dev/null +++ b/apps/api/src/__tests__/network-payouts-report.test.ts @@ -0,0 +1,236 @@ +/** + * Vendor-side Network payout aggregation + report-to-Network flow. + * + * Spins up a local HTTP receiver acting as the Network's + * /vendors/me/report-payouts endpoint, configures network_membership, + * seeds Payout + Partner rows (some Network-originated, some not), + * and verifies aggregateNetworkOriginatedPayouts + the + * reportNetworkPayoutsToNetwork glue. + * + * Skipped if DATABASE_URL isn't set. + */ + +import { createServer, type Server } from 'node:http'; +import type { AddressInfo } from 'node:net'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import { ulid } from 'ulid'; +import { DEFAULT_TENANT_ID, TABLES } from '@openpartner/db'; +import { db } from '../db.js'; +import { aggregateNetworkOriginatedPayouts } from '../usage-billing.js'; +import { reportNetworkPayoutsToNetwork } from '../network-client.js'; +import { encryptSecret } from '../crypto.js'; +import { CONFIG_KEYS, getConfig } from '../config.js'; + +process.env.OPENPARTNER_MODE = process.env.OPENPARTNER_MODE ?? 'selfhost'; +process.env.OPENPARTNER_TENANCY = process.env.OPENPARTNER_TENANCY ?? 'single'; +process.env.PORTAL_URL = process.env.PORTAL_URL ?? 'http://localhost:5673'; + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; + +interface ReceivedPayout { + amountUsd: number; + sinceIso: string | null; + untilIso: string; + authorization: string | undefined; +} + +let receiver: Server; +let receiverUrl: string; +const received: ReceivedPayout[] = []; +let mockResponse: { status: number; body: unknown } = { status: 202, body: { accepted: true } }; + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); + await new Promise((resolve) => { + receiver = createServer((req, res) => { + const chunks: Buffer[] = []; + req.on('data', (c) => chunks.push(c)); + req.on('end', () => { + const body = chunks.length ? JSON.parse(Buffer.concat(chunks).toString('utf8')) : null; + if (req.url === '/vendors/me/report-payouts' && req.method === 'POST') { + received.push({ + amountUsd: (body as { amountUsd: number }).amountUsd, + sinceIso: (body as { sinceIso: string | null }).sinceIso, + untilIso: (body as { untilIso: string }).untilIso, + authorization: req.headers.authorization, + }); + } + res.writeHead(mockResponse.status, { 'content-type': 'application/json' }); + res.end(JSON.stringify(mockResponse.body)); + }); + }).listen(0, '127.0.0.1', () => { + const addr = receiver.address() as AddressInfo; + receiverUrl = `http://127.0.0.1:${addr.port}`; + resolve(); + }); + }); +}); + +afterAll(async () => { + if (receiver) await new Promise((r) => receiver.close(() => r())); + await db.destroy(); +}); + +const TABLES_TO_CLEAN = [ + TABLES.Payout, + TABLES.Partner, + TABLES.Config, +]; + +beforeEach(async () => { + if (skipIntegration) return; + for (const t of TABLES_TO_CLEAN) { + await db(t).whereIn('tenantId', [DEFAULT_TENANT_ID]).del(); + } + received.length = 0; + mockResponse = { status: 202, body: { accepted: true } }; +}); + +async function configureNetwork(opts: { enabled: boolean; url?: string }): Promise { + const value = { + enabled: opts.enabled, + networkUrl: opts.url ?? receiverUrl, + vendorTokenCiphertext: encryptSecret('vntok_test'), + scopedKeyId: null, + autoEnroll: true, + }; + await db(TABLES.Config) + .insert({ tenantId: DEFAULT_TENANT_ID, key: 'network_membership', value: value as unknown as never, updatedAt: new Date() }) + .onConflict(['tenantId', 'key']) + .merge({ value: value as unknown as never, updatedAt: new Date() }); +} + +async function seedPartner(opts: { id?: string; networkCreatorId: string | null }): Promise { + const id = opts.id ?? ulid(); + const metadata = opts.networkCreatorId + ? { network: { creatorId: opts.networkCreatorId, preExisting: false, syncedAt: new Date().toISOString() } } + : {}; + await db(TABLES.Partner).insert({ + id, + tenantId: DEFAULT_TENANT_ID, + email: `${id}@example.test`, + name: 'P', + metadata: metadata as unknown as never, + activatedAt: new Date(), + }); + return id; +} + +async function seedPayout(opts: { + partnerId: string; + amount: number; + status: 'paid' | 'pending' | 'failed'; + completedAt?: Date | null; +}): Promise { + await db(TABLES.Payout).insert({ + id: ulid(), + tenantId: DEFAULT_TENANT_ID, + partnerId: opts.partnerId, + amount: opts.amount.toFixed(2), + currency: 'USD', + method: 'manual', + status: opts.status, + metadata: {} as unknown as never, + completedAt: opts.completedAt ?? (opts.status === 'paid' ? new Date() : null), + }); +} + +describe.skipIf(skipIntegration)('aggregateNetworkOriginatedPayouts', () => { + it('sums only paid payouts whose Partner has metadata.network.creatorId', async () => { + const networkPartner = await seedPartner({ networkCreatorId: 'crt_alice' }); + const directPartner = await seedPartner({ networkCreatorId: null }); + await seedPayout({ partnerId: networkPartner, amount: 100, status: 'paid' }); + await seedPayout({ partnerId: networkPartner, amount: 250, status: 'paid' }); + // Pending: doesn't count. + await seedPayout({ partnerId: networkPartner, amount: 999, status: 'pending', completedAt: null }); + // Direct partner: doesn't count even if paid. + await seedPayout({ partnerId: directPartner, amount: 500, status: 'paid' }); + + const total = await aggregateNetworkOriginatedPayouts(db, null, new Date()); + expect(total).toBe(350); + }); + + it('respects since (exclusive) + until (inclusive) bounds', async () => { + const p = await seedPartner({ networkCreatorId: 'crt_x' }); + const earlier = new Date(Date.now() - 60_000); + const later = new Date(); + await seedPayout({ partnerId: p, amount: 50, status: 'paid', completedAt: earlier }); + await seedPayout({ partnerId: p, amount: 75, status: 'paid', completedAt: later }); + + // Window: (earlier, later] — first payout is excluded by since. + const total = await aggregateNetworkOriginatedPayouts(db, earlier, later); + expect(total).toBe(75); + }); + + it('returns 0 when no payouts in range', async () => { + const total = await aggregateNetworkOriginatedPayouts(db, null, new Date()); + expect(total).toBe(0); + }); +}); + +describe.skipIf(skipIntegration)('reportNetworkPayoutsToNetwork', () => { + it('skips when network membership not enabled', async () => { + // No config row → reason='network_not_configured'. + const r = await reportNetworkPayoutsToNetwork(db, DEFAULT_TENANT_ID); + expect(r.reported).toBe(false); + expect(r.reason).toBe('network_not_configured'); + expect(received).toHaveLength(0); + }); + + it('skips + advances high-water mark when amount is zero', async () => { + await configureNetwork({ enabled: true }); + const r = await reportNetworkPayoutsToNetwork(db, DEFAULT_TENANT_ID); + expect(r.reported).toBe(false); + expect(r.reason).toBe('no_network_payouts_in_range'); + expect(r.amountUsd).toBe(0); + // High-water mark advanced. + const mark = await getConfig(db, DEFAULT_TENANT_ID, CONFIG_KEYS.LastNetworkPayoutsReportedAt); + expect(mark).toBeTruthy(); + }); + + it('reports the right total + sets bearer + advances high-water mark on success', async () => { + await configureNetwork({ enabled: true }); + const p = await seedPartner({ networkCreatorId: 'crt_z' }); + await seedPayout({ partnerId: p, amount: 1000, status: 'paid' }); + await seedPayout({ partnerId: p, amount: 234.56, status: 'paid' }); + + const r = await reportNetworkPayoutsToNetwork(db, DEFAULT_TENANT_ID); + expect(r.reported).toBe(true); + expect(r.amountUsd).toBe(1234.56); + expect(received).toHaveLength(1); + expect(received[0]!.amountUsd).toBe(1234.56); + expect(received[0]!.authorization).toBe('Bearer vntok_test'); + + // Mark advanced. + const mark = await getConfig(db, DEFAULT_TENANT_ID, CONFIG_KEYS.LastNetworkPayoutsReportedAt); + expect(mark).toBeTruthy(); + }); + + it('on Network 5xx: does NOT advance high-water mark (so next tick re-reports)', async () => { + await configureNetwork({ enabled: true }); + const p = await seedPartner({ networkCreatorId: 'crt_q' }); + await seedPayout({ partnerId: p, amount: 100, status: 'paid' }); + + mockResponse = { status: 503, body: { error: 'unavailable' } }; + const r = await reportNetworkPayoutsToNetwork(db, DEFAULT_TENANT_ID); + expect(r.reported).toBe(false); + expect(r.reason).toContain('network 503'); + const mark = await getConfig(db, DEFAULT_TENANT_ID, CONFIG_KEYS.LastNetworkPayoutsReportedAt); + expect(mark).toBeNull(); + }); + + it('second consecutive successful run only includes payouts after the first run', async () => { + await configureNetwork({ enabled: true }); + const p = await seedPartner({ networkCreatorId: 'crt_r' }); + await seedPayout({ partnerId: p, amount: 50, status: 'paid' }); + + const r1 = await reportNetworkPayoutsToNetwork(db, DEFAULT_TENANT_ID); + expect(r1.amountUsd).toBe(50); + + // Add a new payout post-mark, then re-run. + await seedPayout({ partnerId: p, amount: 25, status: 'paid' }); + const r2 = await reportNetworkPayoutsToNetwork(db, DEFAULT_TENANT_ID); + expect(r2.amountUsd).toBe(25); + }); +}); diff --git a/apps/api/src/__tests__/network.test.ts b/apps/api/src/__tests__/network.test.ts deleted file mode 100644 index 41c1013..0000000 --- a/apps/api/src/__tests__/network.test.ts +++ /dev/null @@ -1,767 +0,0 @@ -/** - * End-to-end Network flow, with a real running Express server standing in - * as the "vendor's OpenPartner instance" that the Network federates to. - * - * Walkthrough: - * 1. Admin registers a NetworkVendor with the local instance URL + admin key. - * 2. Vendor uses the issued vendor API key to publish an Offering tied to - * a real Campaign on their instance. - * 3. Admin creates a NetworkCreator and activates it. - * 4. Creator applies to the Offering. - * 5. Vendor approves → federation POSTs to the same server to create a - * Partner + Link. Partnership row is written with the public share URL. - * 6. We assert the Partner + Link actually exist on the vendor's side. - */ - -import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; -import type { AddressInfo } from 'node:net'; -import request from 'supertest'; -import { ulid } from 'ulid'; -import { TABLES } from '@openpartner/db'; -import { db } from '../db.js'; -import { createApp } from '../app.js'; - -const ADMIN_KEY = 'op_test_network_admin_0123456789abcdef0123'; -process.env.ADMIN_API_KEY = ADMIN_KEY; -process.env.OPENPARTNER_MODE = 'selfhost'; -process.env.NETWORK_ENCRYPTION_KEY = 'a'.repeat(64); // 32 hex bytes - -const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; -// ApiKey has FKs to NetworkVendor, NetworkCreator, and Partner, so it must -// be cleared BEFORE those parent tables. Similarly Partnership/Request have -// FKs to Offering/NetworkVendor/NetworkCreator. -const TABLES_TO_CLEAN = [ - TABLES.Partnership, - TABLES.PartnershipRequest, - TABLES.Offering, - TABLES.Session, - TABLES.MagicLinkToken, - TABLES.DevMessage, - TABLES.ApiKey, - TABLES.Commission, - TABLES.Attribution, - TABLES.Event, - TABLES.Identity, - TABLES.Click, - TABLES.Link, - TABLES.Campaign, - TABLES.Payout, - TABLES.NetworkVendor, - TABLES.NetworkCreator, - TABLES.Partner, - TABLES.Config, -]; - -const app = createApp({ enableLogger: false }); -let server: ReturnType; -let instanceUrl: string; - -beforeAll(async () => { - if (skipIntegration) return; - await db.raw('select 1'); - await new Promise((resolve) => { - server = app.listen(0, () => { - const port = (server.address() as AddressInfo).port; - instanceUrl = `http://127.0.0.1:${port}`; - // Pin the router URL so federation doesn't try to swap ports. - process.env.NETWORK_ROUTER_URL = instanceUrl; - resolve(); - }); - }); -}); - -afterAll(async () => { - if (server) await new Promise((resolve) => server.close(() => resolve())); - await db.destroy(); -}); - -beforeEach(async () => { - if (skipIntegration) return; - for (const t of TABLES_TO_CLEAN) { - await db(t).del(); - } -}); - -describe.skipIf(skipIntegration)('openpartner network', () => { - it('vendor → offering → creator → request → approve federates a partner + link', async () => { - // Create a campaign on the "vendor's instance" (same server in tests). - const campaignRes = await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Referral', commissionRule: { type: 'percent', value: 30, recurring: true } }); - expect(campaignRes.status).toBe(201); - const vendorCampaignId = campaignRes.body.id; - - // Register vendor on the Network (admin-gated). - const vendorRegRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ - name: 'Acme', - slug: 'acme', - websiteUrl: 'https://acme.example', - instanceUrl, - instanceKey: ADMIN_KEY, - }); - expect(vendorRegRes.status).toBe(201); - const vendorId = vendorRegRes.body.vendor.id; - const vendorApiKey = vendorRegRes.body.apiKey; - - // Admin activates the vendor. - await request(app) - .post(`/network/vendors/${vendorId}/activate`) - .set('Authorization', `Bearer ${ADMIN_KEY}`); - - // Vendor publishes an offering. - const offeringRes = await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorApiKey}`) - .send({ - title: 'Acme Pro — 30% for 6 months', - productUrl: 'https://acme.example/pro', - description: 'Sell our flagship to your audience.', - vendorCampaignId, - terms: { - payout: { type: 'recurring_percent', percent: 30, durationMonths: 6 }, - bonuses: [{ description: '$500 at $10k MRR', triggerRevenueUsd: 10000, bonusUsd: 500 }], - cookieWindowDays: 60, - }, - published: true, - }); - expect(offeringRes.status).toBe(201); - const offeringId = offeringRes.body.offering.id; - - // Admin onboards a creator. - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ - name: 'Grace Hopper', - handle: 'gracie', - email: 'grace@example.com', - platforms: [{ platform: 'youtube', url: 'https://youtube.com/@gracie', followers: 120000 }], - }); - expect(creatorRes.status).toBe(201); - const creatorId = creatorRes.body.creator.id; - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorId}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - // Creator sees the directory. - const dirRes = await request(app).get('/network/directory/offerings'); - expect(dirRes.status).toBe(200); - expect(dirRes.body.offerings).toHaveLength(1); - expect(dirRes.body.offerings[0].title).toContain('Acme'); - - // Creator applies. - const applyRes = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId, message: 'I have 120k subs interested in this.' }); - expect(applyRes.status).toBe(201); - const requestId = applyRes.body.request.id; - - // Vendor approves (this federates). - const approveRes = await request(app) - .post(`/network/requests/${requestId}/approve`) - .set('Authorization', `Bearer ${vendorApiKey}`) - .send({}); - expect(approveRes.status).toBe(200); - expect(approveRes.body.partnership.status).toBe('active'); - expect(approveRes.body.federated.partnerId).toBeTruthy(); - expect(approveRes.body.federated.linkKey).toBe('gracie'); - expect(approveRes.body.federated.publicShareUrl).toBe(`${instanceUrl}/r/gracie`); - - // The vendor's instance actually has the partner + link now. - const partnerOnVendor = await db(TABLES.Partner).where({ id: approveRes.body.federated.partnerId }).first(); - expect(partnerOnVendor).toBeDefined(); - expect(partnerOnVendor!.email).toBe('grace@example.com'); - expect(partnerOnVendor!.metadata).toMatchObject({ source: 'openpartner_network' }); - - const linkOnVendor = await db(TABLES.Link).where({ linkKey: 'gracie' }).first(); - expect(linkOnVendor).toBeDefined(); - expect(linkOnVendor!.campaignId).toBe(vendorCampaignId); - }); - - it('creator cannot double-apply to the same offering', async () => { - const campaignRes = await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'C', commissionRule: { type: 'percent', value: 10 } }); - const vendorCampaignId = campaignRes.body.id; - - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Vendo', slug: `vendo-${Date.now()}`, instanceUrl, instanceKey: ADMIN_KEY }); - const vendorApiKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offeringRes = await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorApiKey}`) - .send({ - title: 'Offer', - productUrl: 'https://v.example', - vendorCampaignId, - terms: { payout: { type: 'one_time_fee', amount: 50 }, cookieWindowDays: 30 }, - published: true, - }); - const offeringId = offeringRes.body.offering.id; - - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Eva', handle: `eva_${Date.now()}`, email: `eva${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const first = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId }); - expect(first.status).toBe(201); - - const second = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId }); - expect(second.status).toBe(409); - }); - - it('concurrent /approve calls do not both federate — one wins, the other 409s', async () => { - // Regression for the ultrareview race. Both calls see status='pending' - // on the SELECT; the conditional UPDATE pending→approving admits one. - const campaignRes = await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Race', commissionRule: { type: 'percent', value: 15 } }); - const vendorCampaignId = campaignRes.body.id; - - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'RaceVendor', slug: `race-${Date.now()}`, instanceUrl, instanceKey: ADMIN_KEY }); - const vendorApiKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offeringRes = await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorApiKey}`) - .send({ - title: 'RaceOffer', - productUrl: 'https://race.example', - vendorCampaignId, - terms: { payout: { type: 'one_time_fee', amount: 25 }, cookieWindowDays: 30 }, - published: true, - }); - const offeringId = offeringRes.body.offering.id; - - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Rico', handle: `rico_${Date.now()}`, email: `rico${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const reqRes = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId }); - const reqId = reqRes.body.request.id; - - // Fire two approvals simultaneously. - const [a, b] = await Promise.all([ - request(app).post(`/network/requests/${reqId}/approve`).set('Authorization', `Bearer ${vendorApiKey}`), - request(app).post(`/network/requests/${reqId}/approve`).set('Authorization', `Bearer ${vendorApiKey}`), - ]); - const statuses = [a.status, b.status].sort(); - // One succeeds (200), the other loses the CAS race (409). - expect(statuses).toEqual([200, 409]); - - // And we only federated ONE Partnership for this request. - const partnerships = await db(TABLES.Partnership).where({ requestId: reqId }); - expect(partnerships).toHaveLength(1); - }); - - it('creator-chosen promo code becomes the share-link slug', async () => { - const campaign = (await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Promo test', commissionRule: { type: 'percent', value: 20 } })).body; - - const vendorRouter = `${instanceUrl}`; // point router at same server for the test - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ - name: 'Coherence', - slug: `coherence-${Date.now()}`, - instanceUrl, - instanceKey: ADMIN_KEY, - routerUrl: vendorRouter, - }); - const vendorKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offeringRes = await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Coherence Pro', - productUrl: 'https://getcoherence.io/pro', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'recurring_percent', percent: 20, durationMonths: null }, cookieWindowDays: 60 }, - published: true, - }); - const offeringId = offeringRes.body.offering.id; - - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Grace', handle: `g_${Date.now()}`, email: `g${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const applyRes = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId, promoCode: 'graciefindsdeals' }); - expect(applyRes.status).toBe(201); - expect(applyRes.body.request.promoCode).toBe('graciefindsdeals'); - - const approveRes = await request(app) - .post(`/network/requests/${applyRes.body.request.id}/approve`) - .set('Authorization', `Bearer ${vendorKey}`) - .send({}); - expect(approveRes.status).toBe(200); - expect(approveRes.body.federated.linkKey).toBe('graciefindsdeals'); - expect(approveRes.body.federated.publicShareUrl).toBe(`${vendorRouter}/r/graciefindsdeals`); - - const linkOnVendor = await db(TABLES.Link).where({ linkKey: 'graciefindsdeals' }).first(); - expect(linkOnVendor).toBeDefined(); - }); - - it('defaults to creator default promo code, falls back to handle', async () => { - const campaign = (await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Defaults', commissionRule: { type: 'percent', value: 10 } })).body; - - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'DefaultCo', slug: `default-${Date.now()}`, instanceUrl, instanceKey: ADMIN_KEY }); - const vendorKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offering = (await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Offering One', - productUrl: 'https://example.com', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'one_time_fee', amount: 1 }, cookieWindowDays: 30 }, - published: true, - })).body.offering; - - // Creator WITH a default - const handle = `ada_${Date.now()}`; - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Ada', handle, email: `ada${Date.now()}@e.com`, defaultPromoCode: 'ada-picks' }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - // No promoCode on the request — should use the creator's default. - const req1 = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId: offering.id }); - expect(req1.body.request.promoCode).toBe('ada-picks'); - - // Creator WITHOUT a default → handle - const handle2 = `rose_${Date.now()}`; - const c2 = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Rose', handle: handle2, email: `rose${Date.now()}@e.com` }); - const c2key = c2.body.apiKey; - await request(app).post(`/network/creators/${c2.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const o2 = (await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Offering Two', - productUrl: 'https://example.com', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'one_time_fee', amount: 2 }, cookieWindowDays: 30 }, - published: true, - })).body.offering; - - const req2 = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${c2key}`) - .send({ offeringId: o2.id }); - expect(req2.body.request.promoCode).toBe(handle2); - }); - - it('earnings endpoint federates a read and surfaces per-partnership stats', async () => { - // Campaign with a 20% recurring rule on the vendor's instance - const campaign = (await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Earn test', commissionRule: { type: 'percent', value: 20 } })).body; - - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'EarnVendor', slug: `earn-${Date.now()}`, instanceUrl, instanceKey: ADMIN_KEY, routerUrl: instanceUrl }); - const vendorKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offeringId = (await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Earning offering', - productUrl: 'https://example.com/pro', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'recurring_percent', percent: 20, durationMonths: 6 }, cookieWindowDays: 60 }, - published: true, - })).body.offering.id; - - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Earner', handle: `earner_${Date.now()}`, email: `earner${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const reqRes = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId, promoCode: 'earntest' }); - await request(app) - .post(`/network/requests/${reqRes.body.request.id}/approve`) - .set('Authorization', `Bearer ${vendorKey}`) - .send({}); - - // Simulate traffic on the vendor's side: click → identify → event. - // (We write Click directly because the router is a separate server in - // prod; the dashboard endpoint doesn't care how Clicks got there.) - const link = await db(TABLES.Link).where({ linkKey: 'earntest' }).first(); - expect(link).toBeDefined(); - const clickId = ulid(); - await db(TABLES.Click).insert({ - id: clickId, - linkId: link!.id, - partnerId: link!.partnerId, - campaignId: link!.campaignId, - landingUrl: 'https://example.com/pro', - ipHash: 'x', - userAgent: 'x', - referer: null, - fraudFlag: null, - }); - - const userId = `viewer_${Date.now()}`; - await request(app).post('/attribution/identify').send({ cref: clickId, userId }); - await request(app) - .post('/attribution/events') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ userId, type: 'invoice_paid', value: 250 }); - - // Creator pulls their earnings via the Network's federated read. - const earnings = await request(app) - .get('/network/partnerships/earnings') - .set('Authorization', `Bearer ${creatorKey}`); - expect(earnings.status).toBe(200); - expect(earnings.body.partnerships).toHaveLength(1); - const row = earnings.body.partnerships[0]; - expect(row.status).toBe('ok'); - expect(row.stats.clicks).toBe(1); - expect(row.stats.attributedEvents).toBe(1); - expect(row.stats.attributedRevenue).toBe(250); - expect(row.stats.commissionByStatus.accrued).toBe(50); // 20% of 250 - - expect(earnings.body.totals.clicks).toBe(1); - expect(earnings.body.totals.attributedRevenue).toBe(250); - expect(earnings.body.totals.commission.accrued).toBe(50); - expect(earnings.body.totals.unreachable).toBe(0); - expect(earnings.body.totals.healthy).toBe(1); - }); - - it('earnings endpoint surfaces unreachable vendors without blacking out', async () => { - const campaign = (await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Unreach', commissionRule: { type: 'percent', value: 10 } })).body; - - // Register the vendor against a dead URL so federation will fail. - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ - name: 'DeadVendor', - slug: `dead-${Date.now()}`, - instanceUrl: 'http://127.0.0.1:1', // port 1 — nothing listens here - instanceKey: ADMIN_KEY, - }); - const vendorKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - // Insert a Partnership directly so we don't have to federate-create - // one against the dead instance. - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Drift', handle: `drift_${Date.now()}`, email: `drift${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offeringId = (await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Offline offering', - productUrl: 'https://example.com', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'one_time_fee', amount: 10 }, cookieWindowDays: 30 }, - published: true, - })).body.offering.id; - - await db(TABLES.PartnershipRequest).insert({ - id: ulid(), - offeringId, - vendorId: vendorRes.body.vendor.id, - creatorId: creatorRes.body.creator.id, - direction: 'creator_to_vendor', - status: 'approved', - promoCode: 'drift', - decidedAt: new Date(), - }); - const partnershipId = ulid(); - const lastReq = await db(TABLES.PartnershipRequest).where({ creatorId: creatorRes.body.creator.id }).first(); - await db(TABLES.Partnership).insert({ - id: partnershipId, - requestId: lastReq!.id, - offeringId, - vendorId: vendorRes.body.vendor.id, - creatorId: creatorRes.body.creator.id, - vendorPartnerId: 'phantom', - vendorLinkKey: 'drift', - publicShareUrl: 'http://127.0.0.1:1/r/drift', - status: 'active', - }); - - const earnings = await request(app) - .get('/network/partnerships/earnings') - .set('Authorization', `Bearer ${creatorKey}`); - expect(earnings.status).toBe(200); - expect(earnings.body.partnerships).toHaveLength(1); - expect(earnings.body.partnerships[0].status).toBe('error'); - expect(earnings.body.totals.unreachable).toBe(1); - expect(earnings.body.totals.clicks).toBe(0); - }); - - it('full network federation works with a scoped key (not admin)', async () => { - // Mint a scoped key on the "vendor instance" with exactly the - // federation permission set. Register the vendor with THAT key. - const scopedMint = await request(app) - .post('/api-keys/scoped') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ scopes: ['partners:write', 'partners:read', 'links:write', 'commissions:read'] }); - const scopedKey = scopedMint.body.plaintext as string; - - const campaign = (await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Scoped test', commissionRule: { type: 'percent', value: 15 } })).body; - - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ - name: 'ScopedCo', - slug: `scoped-${Date.now()}`, - instanceUrl, - instanceKey: scopedKey, // <-- scoped, not ADMIN_KEY - routerUrl: instanceUrl, - }); - const vendorKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offeringId = (await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Scoped offering', - productUrl: 'https://example.com', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'recurring_percent', percent: 15, durationMonths: null }, cookieWindowDays: 45 }, - published: true, - })).body.offering.id; - - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Scopy', handle: `scopy_${Date.now()}`, email: `scopy${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - await request(app).post(`/network/creators/${creatorRes.body.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const applyRes = await request(app) - .post('/network/requests') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ offeringId, promoCode: 'scopyshares' }); - - const approveRes = await request(app) - .post(`/network/requests/${applyRes.body.request.id}/approve`) - .set('Authorization', `Bearer ${vendorKey}`) - .send({}); - expect(approveRes.status).toBe(200); - expect(approveRes.body.federated.linkKey).toBe('scopyshares'); - - // Federated read (commissions:read) works too. - const earnings = await request(app) - .get('/network/partnerships/earnings') - .set('Authorization', `Bearer ${creatorKey}`); - expect(earnings.status).toBe(200); - expect(earnings.body.partnerships[0].status).toBe('ok'); - }); - - it('verify-key endpoint flags unrestricted admin keys and accepts proper scoped keys', async () => { - // Unrestricted admin → warn. - const adminCheck = await request(app) - .post('/network/vendors/verify-key') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ instanceUrl, instanceKey: ADMIN_KEY }); - expect(adminCheck.status).toBe(200); - expect(adminCheck.body.unrestricted).toBe(true); - expect(adminCheck.body.acceptable).toBe(true); - - // Scoped with all required → acceptable, missing=[]. - const fullyScoped = await request(app) - .post('/api-keys/scoped') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ scopes: ['partners:write', 'partners:read', 'links:write', 'commissions:read'] }); - const okCheck = await request(app) - .post('/network/vendors/verify-key') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ instanceUrl, instanceKey: fullyScoped.body.plaintext }); - expect(okCheck.status).toBe(200); - expect(okCheck.body.unrestricted).toBe(false); - expect(okCheck.body.missing).toEqual([]); - expect(okCheck.body.acceptable).toBe(true); - - // Scoped with only some → missing listed. - const partial = await request(app) - .post('/api-keys/scoped') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ scopes: ['partners:write'] }); - const missCheck = await request(app) - .post('/network/vendors/verify-key') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ instanceUrl, instanceKey: partial.body.plaintext }); - expect(missCheck.status).toBe(200); - expect(missCheck.body.missing).toEqual( - expect.arrayContaining(['partners:read', 'links:write', 'commissions:read']), - ); - expect(missCheck.body.acceptable).toBe(false); - }); - - it('creator profile patch + directory visibility', async () => { - const creatorRes = await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Start', handle: `start_${Date.now()}`, email: `start${Date.now()}@e.com` }); - const creatorKey = creatorRes.body.apiKey; - const creatorId = creatorRes.body.creator.id; - - // Inactive creators don't appear in the public directory. - let dir = await request(app).get('/network/directory/creators'); - expect(dir.body.creators.find((c: { id: string }) => c.id === creatorId)).toBeUndefined(); - - await request(app).post(`/network/creators/${creatorId}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - dir = await request(app).get('/network/directory/creators'); - expect(dir.body.creators.find((c: { id: string }) => c.id === creatorId)).toBeDefined(); - - // Self-edit persists. - const patch = await request(app) - .patch('/network/creators/me') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ - name: 'Patched', - bio: 'I publish on YouTube.', - defaultPromoCode: 'patchedcode', - platforms: [{ platform: 'youtube', url: 'https://youtube.com/@patched', followers: 50000 }], - }); - expect(patch.status).toBe(200); - expect(patch.body.creator.name).toBe('Patched'); - expect(patch.body.creator.bio).toBe('I publish on YouTube.'); - expect(patch.body.creator.defaultPromoCode).toBe('patchedcode'); - expect(patch.body.creator.platforms).toHaveLength(1); - - // Handle is not in the PATCH schema — it stays pinned. - const handleAttempt = await request(app) - .patch('/network/creators/me') - .set('Authorization', `Bearer ${creatorKey}`) - .send({ handle: 'renamed' }); - expect(handleAttempt.status).toBe(200); - expect(handleAttempt.body.creator.handle).not.toBe('renamed'); - }); - - it('vendor invite via /network/invites creates a pending request', async () => { - const campaign = (await request(app) - .post('/campaigns') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Inv', commissionRule: { type: 'percent', value: 10 } })).body; - - const vendorRes = await request(app) - .post('/network/vendors') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'InviteVendor', slug: `inv-${Date.now()}`, instanceUrl, instanceKey: ADMIN_KEY }); - const vendorKey = vendorRes.body.apiKey; - await request(app).post(`/network/vendors/${vendorRes.body.vendor.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const offering = (await request(app) - .post('/network/offerings') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - title: 'Invite offering', - productUrl: 'https://example.com', - vendorCampaignId: campaign.id, - terms: { payout: { type: 'one_time_fee', amount: 1 }, cookieWindowDays: 30 }, - published: true, - })).body.offering; - - const creator = (await request(app) - .post('/network/creators') - .set('Authorization', `Bearer ${ADMIN_KEY}`) - .send({ name: 'Targ', handle: `targ_${Date.now()}`, email: `targ${Date.now()}@e.com` })).body; - await request(app).post(`/network/creators/${creator.creator.id}/activate`).set('Authorization', `Bearer ${ADMIN_KEY}`); - - const invite = await request(app) - .post('/network/invites') - .set('Authorization', `Bearer ${vendorKey}`) - .send({ - offeringId: offering.id, - creatorId: creator.creator.id, - message: 'Want to be part of this?', - promoCode: 'targshare', - }); - expect(invite.status).toBe(201); - expect(invite.body.request.direction).toBe('vendor_to_creator'); - expect(invite.body.request.promoCode).toBe('targshare'); - }); - - it('encryption round-trips', async () => { - const { encryptKey, decryptKey } = await import('../network/crypto.js'); - const enc = encryptKey('hello-secret-key'); - expect(enc).not.toContain('hello'); - expect(decryptKey(enc)).toBe('hello-secret-key'); - }); -}); diff --git a/apps/api/src/__tests__/partner-invite.test.ts b/apps/api/src/__tests__/partner-invite.test.ts new file mode 100644 index 0000000..56e7173 --- /dev/null +++ b/apps/api/src/__tests__/partner-invite.test.ts @@ -0,0 +1,278 @@ +/** + * Partner invite + magic-link signin happy paths. + * + * Uses an in-memory capturing mailer injected via __setMailerForTests so + * the suite can read the magic-link URL without hitting Postmark. + */ + +import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import request from 'supertest'; +import { TABLES } from '@openpartner/db'; +import { db } from '../db.js'; +import { createApp } from '../app.js'; +import { __setMailerForTests, type Mailer, type Message } from '../mailer.js'; + +const ADMIN_KEY = 'op_test_invite_admin_0123456789abcdef0123'; +process.env.ADMIN_API_KEY = ADMIN_KEY; +process.env.PORTAL_URL = 'http://localhost:5673'; +process.env.OPENPARTNER_TENANCY = 'single'; + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; + +const TABLES_TO_CLEAN = [ + TABLES.Session, + TABLES.MagicLinkToken, + TABLES.ApiKey, + TABLES.Partner, +]; + +const app = createApp({ enableLogger: false }); + +class CapturingMailer implements Mailer { + readonly sent: Message[] = []; + async send(_ctx: unknown, msg: Message): Promise { + this.sent.push(msg); + } + findFor(to: string, purpose?: string): Message | undefined { + // Matches either metadata.purpose or tag — the /auth/signin path + // for revoked partners sets purpose to + // 'partner_revoked_signin_attempt' but keeps tag='partner_revoked'. + return this.sent.find( + (m) => + m.to === to && + (purpose == null || m.metadata?.purpose === purpose || m.tag === purpose), + ); + } +} + +let mailer: CapturingMailer; + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); +}); + +beforeEach(async () => { + mailer = new CapturingMailer(); + __setMailerForTests(mailer); + if (skipIntegration) return; + for (const t of TABLES_TO_CLEAN) { + await db(t).del(); + } +}); + +afterEach(() => { + __setMailerForTests(null); +}); + +afterAll(async () => { + await db.destroy(); +}); + +function extractToken(body: string): string { + const match = /token=([^\s&"]+)/.exec(body); + if (!match) throw new Error(`no token in body:\n${body}`); + return decodeURIComponent(match[1]!); +} + +describe.skipIf(skipIntegration)('partner invite + signin', () => { + it('admin invite creates a pending partner + sends an email; verify activates + issues session', async () => { + const created = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'gracie@example.com', name: 'Gracie' }); + expect(created.status).toBe(201); + expect(created.body.invited).toBe(true); + expect(created.body.activatedAt).toBeNull(); + + expect(created.body).not.toHaveProperty('plaintext'); + expect(created.body).not.toHaveProperty('apiKey'); + + const invite = mailer.findFor('gracie@example.com', 'partner_invite'); + expect(invite).toBeDefined(); + + const token = extractToken(invite!.text); + const verify = await request(app).post('/auth/magic/verify').send({ token }); + expect(verify.status).toBe(200); + expect(verify.body.role).toBe('partner'); + expect(verify.body.partner.email).toBe('gracie@example.com'); + + const setCookie = verify.headers['set-cookie'] as unknown as string[] | undefined; + expect(setCookie).toBeTruthy(); + const cookie = setCookie![0]!.split(';')[0]!; + expect(cookie.startsWith('op_session=')).toBe(true); + + const activated = await db(TABLES.Partner).where({ id: created.body.id }).first(); + expect((activated as { activatedAt: Date | null }).activatedAt).not.toBeNull(); + + const who = await request(app).get('/auth/whoami').set('Cookie', cookie); + expect(who.status).toBe(200); + expect(who.body.role).toBe('partner'); + expect(who.body.partnerId).toBe(created.body.id); + }); + + it('magic-link tokens are single-use — second verify 400s', async () => { + await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'dup@example.com', name: 'Dup' }); + + const token = extractToken(mailer.findFor('dup@example.com')!.text); + + const first = await request(app).post('/auth/magic/verify').send({ token }); + expect(first.status).toBe(200); + + const second = await request(app).post('/auth/magic/verify').send({ token }); + expect(second.status).toBe(400); + expect(second.body.error).toBe('invalid_or_expired_token'); + }); + + it('returning-partner /auth/signin emails a signin link when the partner is activated', async () => { + await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'return@example.com', name: 'Return' }); + const inviteToken = extractToken(mailer.findFor('return@example.com', 'partner_invite')!.text); + await request(app).post('/auth/magic/verify').send({ token: inviteToken }); + + mailer.sent.length = 0; + + const signin = await request(app).post('/auth/signin').send({ email: 'return@example.com' }); + expect(signin.status).toBe(200); + + const signinMsg = mailer.findFor('return@example.com', 'partner_signin'); + expect(signinMsg).toBeDefined(); + + const signinToken = extractToken(signinMsg!.text); + const verify = await request(app).post('/auth/magic/verify').send({ token: signinToken }); + expect(verify.status).toBe(200); + }); + + it('signin for an unknown email silently returns ok (no user enumeration)', async () => { + const res = await request(app).post('/auth/signin').send({ email: 'nobody@example.com' }); + expect(res.status).toBe(200); + expect(res.body.ok).toBe(true); + expect(mailer.sent.length).toBe(0); + }); + + it('admin can revoke an active partner — sessions die, reinstate undoes it', async () => { + // Invite + activate. + const created = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'rev@example.com', name: 'Rev' }); + const partnerId = created.body.id; + const token = extractToken(mailer.findFor('rev@example.com', 'partner_invite')!.text); + const verify = await request(app).post('/auth/magic/verify').send({ token }); + const cookie = (verify.headers['set-cookie'] as unknown as string[])[0]!.split(';')[0]!; + + const before = await request(app).get('/auth/whoami').set('Cookie', cookie); + expect(before.status).toBe(200); + + mailer.sent.length = 0; + const revoke = await request(app) + .post(`/partners/${partnerId}/revoke`) + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ reason: 'Ending our partnership — thanks for your work' }); + expect(revoke.status).toBe(200); + expect(revoke.body.notified).toBe(true); + + // Notification email fired with the reason in the body. + const notice = mailer.findFor('rev@example.com', 'partner_revoked'); + expect(notice).toBeDefined(); + expect(notice!.text).toContain('suspended'); + expect(notice!.text).toContain('Ending our partnership'); + + // Session killed. + const after = await request(app).get('/auth/whoami').set('Cookie', cookie); + expect(after.status).toBe(401); + + // Second revoke → 409. + const dupe = await request(app) + .post(`/partners/${partnerId}/revoke`) + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(dupe.status).toBe(409); + + // Reinstate + sign-in works again. + const reinstate = await request(app) + .post(`/partners/${partnerId}/reinstate`) + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(reinstate.status).toBe(200); + + mailer.sent.length = 0; + const signin = await request(app).post('/auth/signin').send({ email: 'rev@example.com' }); + expect(signin.status).toBe(200); + expect(mailer.findFor('rev@example.com', 'partner_signin')).toBeDefined(); + }); + + it('revoke with notify=false does not email the partner', async () => { + const created = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'silent@example.com', name: 'Silent' }); + const partnerId = created.body.id; + const token = extractToken(mailer.findFor('silent@example.com')!.text); + await request(app).post('/auth/magic/verify').send({ token }); + + mailer.sent.length = 0; + const revoke = await request(app) + .post(`/partners/${partnerId}/revoke`) + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ notify: false, reason: 'Investigating fraud' }); + expect(revoke.status).toBe(200); + expect(revoke.body.notified).toBe(false); + expect(mailer.sent.length).toBe(0); + }); + + it('signin for a revoked partner is silent — no email, no enumeration leak', async () => { + // Invite + activate + revoke (notify=false so there's no inbox noise + // from the revoke itself — we want to observe the signin path). + const created = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'after@example.com', name: 'After' }); + const token = extractToken(mailer.findFor('after@example.com')!.text); + await request(app).post('/auth/magic/verify').send({ token }); + await request(app) + .post(`/partners/${created.body.id}/revoke`) + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ reason: 'Program closed', notify: false }); + + mailer.sent.length = 0; + const signin = await request(app).post('/auth/signin').send({ email: 'after@example.com' }); + expect(signin.status).toBe(200); + expect(signin.body.ok).toBe(true); + + // No email should be sent — earlier behavior let an attacker spam + // the victim by repeatedly hitting /auth/signin with their address. + expect(mailer.sent.length).toBe(0); + }); + + it('resend invite generates a fresh token and 409s once the partner is already activated', async () => { + const created = await request(app) + .post('/partners') + .set('Authorization', `Bearer ${ADMIN_KEY}`) + .send({ email: 'resend@example.com', name: 'Resend' }); + const partnerId = created.body.id; + + const resend = await request(app) + .post(`/partners/${partnerId}/invite`) + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(resend.status).toBe(200); + + const invites = mailer.sent.filter( + (m) => m.to === 'resend@example.com' && m.metadata?.purpose === 'partner_invite', + ); + expect(invites.length).toBe(2); + + // Activate via the first token, then resend should 409. + const token = extractToken(invites[0]!.text); + await request(app).post('/auth/magic/verify').send({ token }); + + const after = await request(app) + .post(`/partners/${partnerId}/invite`) + .set('Authorization', `Bearer ${ADMIN_KEY}`); + expect(after.status).toBe(409); + expect(after.body.error).toBe('already_activated'); + }); +}); diff --git a/apps/api/src/__tests__/regressions.test.ts b/apps/api/src/__tests__/regressions.test.ts index 4f18373..9d916a4 100644 --- a/apps/api/src/__tests__/regressions.test.ts +++ b/apps/api/src/__tests__/regressions.test.ts @@ -15,12 +15,13 @@ import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; import request from 'supertest'; import { ulid } from 'ulid'; -import { TABLES } from '@openpartner/db'; +import { DEFAULT_TENANT_ID, TABLES } from '@openpartner/db'; import { db } from '../db.js'; import { createApp } from '../app.js'; const ADMIN_KEY = 'op_test_regressions_0123456789abcdef0123'; process.env.ADMIN_API_KEY = ADMIN_KEY; +process.env.OPENPARTNER_TENANCY = 'single'; const TABLES_TO_CLEAN = [ TABLES.Commission, @@ -71,6 +72,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, @@ -78,7 +80,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { ts: new Date(), }); const userId = `u-${Date.now()}`; - await db(TABLES.Identity).insert({ id: ulid(), clickId, userId }); + await db(TABLES.Identity).insert({ id: ulid(), tenantId: DEFAULT_TENANT_ID, clickId, userId }); // Directly insert two rows with the same externalEventId — emulates // what the Stripe handler would try on a retry. The partial unique @@ -86,6 +88,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { const externalEventId = `evt_${ulid()}`; await db(TABLES.Event).insert({ id: ulid(), + tenantId: DEFAULT_TENANT_ID, userId, type: 'invoice_paid', value: '100.00', @@ -98,6 +101,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { const inserted = await db(TABLES.Event) .insert({ id: ulid(), + tenantId: DEFAULT_TENANT_ID, userId, type: 'invoice_paid', value: '100.00', @@ -129,16 +133,18 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { for (const cid of clickIds) { await db(TABLES.Click).insert({ id: cid, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, landingUrl: 'https://example.com/', ts: new Date(), }); - await db(TABLES.Identity).insert({ id: ulid(), clickId: cid, userId }); + await db(TABLES.Identity).insert({ id: ulid(), tenantId: DEFAULT_TENANT_ID, clickId: cid, userId }); } await db(TABLES.Event).insert({ id: eventId, + tenantId: DEFAULT_TENANT_ID, userId, type: 'invoice_paid', value: '300.00', @@ -148,6 +154,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { for (const cid of clickIds) { await db(TABLES.Attribution).insert({ id: ulid(), + tenantId: DEFAULT_TENANT_ID, eventId, clickId: cid, partnerId: partner.id, @@ -183,6 +190,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { await db(TABLES.Click).insert([ { id: flaggedClick, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, @@ -192,6 +200,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { }, { id: okClick, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, @@ -200,8 +209,8 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { }, ]); await db(TABLES.Identity).insert([ - { id: ulid(), clickId: flaggedClick, userId }, - { id: ulid(), clickId: okClick, userId }, + { id: ulid(), tenantId: DEFAULT_TENANT_ID, clickId: flaggedClick, userId }, + { id: ulid(), tenantId: DEFAULT_TENANT_ID, clickId: okClick, userId }, ]); // Post an event — only the ok click earns; there's one attribution row. @@ -244,6 +253,7 @@ describe.skipIf(skipIntegration)('ultrareview regressions', () => { const deliveryId = ulid(); await db(TABLES.WebhookDelivery).insert({ id: deliveryId, + tenantId: DEFAULT_TENANT_ID, endpointId: epA.id, eventId: `evt_${ulid()}`, eventType: 'commission.paid', diff --git a/apps/api/src/__tests__/safe-fetch.test.ts b/apps/api/src/__tests__/safe-fetch.test.ts deleted file mode 100644 index 1a01256..0000000 --- a/apps/api/src/__tests__/safe-fetch.test.ts +++ /dev/null @@ -1,57 +0,0 @@ -/** - * safeFetch: SSRF guard on outbound Network calls. - * - * We can't cheaply test a real fetch without a network, so this file - * exercises the protocol + hostname validation that safeFetch does - * BEFORE calling fetch. Runs under NODE_ENV=production to skip the - * test-env bypass baked into the guard. - */ - -import { afterEach, beforeEach, describe, expect, it } from 'vitest'; -import { safeFetch } from '../network/safe-fetch.js'; - -describe('safeFetch SSRF guard', () => { - const original = process.env.NODE_ENV; - beforeEach(() => { - process.env.NODE_ENV = 'production'; - delete process.env.NETWORK_ALLOW_PRIVATE_HOSTS; - }); - afterEach(() => { - process.env.NODE_ENV = original; - }); - - it('rejects non-http(s) protocols', async () => { - await expect(safeFetch('file:///etc/passwd')).rejects.toMatchObject({ code: 'unsupported_protocol' }); - await expect(safeFetch('gopher://example.com/')).rejects.toMatchObject({ code: 'unsupported_protocol' }); - }); - - it('rejects IPv4 loopback / private ranges by literal IP', async () => { - await expect(safeFetch('http://127.0.0.1/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - await expect(safeFetch('http://10.0.0.1/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - await expect(safeFetch('http://192.168.1.1/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - await expect(safeFetch('http://169.254.169.254/latest/meta-data/')).rejects.toMatchObject({ - code: 'private_host_blocked', - }); - await expect(safeFetch('http://172.16.0.1/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - }); - - it('rejects IPv6 loopback / link-local / unique-local', async () => { - await expect(safeFetch('http://[::1]/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - await expect(safeFetch('http://[fe80::1]/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - await expect(safeFetch('http://[fc00::1]/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - }); - - it('rejects hostnames that resolve to loopback (localhost)', async () => { - // localhost normally resolves to 127.0.0.1 or ::1 via the hosts file. - await expect(safeFetch('http://localhost/')).rejects.toMatchObject({ code: 'private_host_blocked' }); - }); - - it('opts out of the guard when NETWORK_ALLOW_PRIVATE_HOSTS=1', async () => { - process.env.NETWORK_ALLOW_PRIVATE_HOSTS = '1'; - // We still expect the request itself to fail (nothing listening at - // this port in the test env), but NOT with private_host_blocked. - await expect(safeFetch('http://127.0.0.1:59999/', { signal: AbortSignal.timeout(200) })).rejects.not.toMatchObject( - { code: 'private_host_blocked' }, - ); - }); -}); diff --git a/apps/api/src/__tests__/stripe-webhook.test.ts b/apps/api/src/__tests__/stripe-webhook.test.ts new file mode 100644 index 0000000..ae2a427 --- /dev/null +++ b/apps/api/src/__tests__/stripe-webhook.test.ts @@ -0,0 +1,489 @@ +/** + * Stripe webhook tests covering the merchant-side billing flow: + * checkout.session.completed with client_reference_id stitches an Identity, + * and downstream invoice.paid resolves through that Identity. + * + * Signature verification uses the real Stripe SDK against a test secret; + * stripe.customers.update / retrieve are mocked so tests don't hit the API. + */ + +import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest'; +import request from 'supertest'; +import { ulid } from 'ulid'; +import Stripe from 'stripe'; + +const STRIPE_SECRET = 'sk_test_dummy_for_webhook_tests'; +const WEBHOOK_SECRET = 'whsec_test_secret_for_webhook_tests'; + +process.env.STRIPE_SECRET_KEY = STRIPE_SECRET; +process.env.STRIPE_WEBHOOK_SECRET = WEBHOOK_SECRET; +// Force selfhost so tests don't pick up whatever's in .env (vitest auto-loads +// it). The merchant-subscription persistence path runs in any mode anyway. +process.env.OPENPARTNER_MODE = 'selfhost'; +process.env.OPENPARTNER_TENANCY = 'single'; + +// Mock the Stripe constructor so customer ops are inert. We keep the real +// webhooks helper (used inside the route for signature verification) by +// wrapping a real Stripe instance and overriding only `customers`. +vi.mock('stripe', async () => { + const actual = await vi.importActual('stripe'); + const Real = actual.default; + function MockedStripe(this: unknown, key: string, opts?: Stripe.StripeConfig) { + const instance = new Real(key, opts) as Stripe; + (instance as unknown as { customers: unknown }).customers = { + update: vi.fn().mockResolvedValue({ id: 'cus_test_mocked' }), + retrieve: vi.fn().mockResolvedValue({ id: 'cus_test_mocked', metadata: {}, deleted: false }), + }; + return instance; + } + // Preserve the static `webhooks` namespace for any direct imports. + (MockedStripe as unknown as { webhooks: unknown }).webhooks = (Real as unknown as { webhooks: unknown }).webhooks; + return { default: MockedStripe }; +}); + +// Imports must follow the env + mock setup so they pick up the right config. +const { DEFAULT_TENANT_ID, TABLES } = await import('@openpartner/db'); +const { db } = await import('../db.js'); +const { createApp } = await import('../app.js'); + +const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; +const app = createApp({ enableLogger: false }); + +// Real Stripe instance for signing test webhook payloads. The mock above +// overrides `customers`, but `webhooks.generateTestHeaderString` is a static +// method on the class and remains real. +const stripeForSigning = new Stripe(STRIPE_SECRET); + +function postWebhook(eventPayload: object) { + const body = JSON.stringify(eventPayload); + const sig = stripeForSigning.webhooks.generateTestHeaderString({ + payload: body, + secret: WEBHOOK_SECRET, + }); + return request(app) + .post('/webhooks/stripe') + .set('content-type', 'application/json') + .set('stripe-signature', sig) + .send(body); +} + +const TABLES_TO_CLEAN = [ + TABLES.Commission, + TABLES.Attribution, + TABLES.Event, + TABLES.Identity, + TABLES.Click, + TABLES.Link, + TABLES.Campaign, + TABLES.Partner, + TABLES.Config, +]; + +interface Ids { + partnerId: string; + campaignId: string; + linkId: string; + clickId: string; +} + +async function seedClick(): Promise { + const partnerId = ulid(); + const campaignId = ulid(); + const linkId = ulid(); + const clickId = ulid(); + await db(TABLES.Partner).insert({ id: partnerId, tenantId: DEFAULT_TENANT_ID, name: 'Test partner', email: `p-${partnerId}@example.com` }); + await db(TABLES.Campaign).insert({ + id: campaignId, + tenantId: DEFAULT_TENANT_ID, + name: 'Default', + attributionModel: 'last_click', + attributionWindowDays: 60, + commissionRule: { type: 'percent', value: 20 }, + }); + await db(TABLES.Link).insert({ + id: linkId, + tenantId: DEFAULT_TENANT_ID, + partnerId, + campaignId, + linkKey: `lk-${linkId}`, + destinationUrl: 'https://example.com', + }); + await db(TABLES.Click).insert({ + id: clickId, + tenantId: DEFAULT_TENANT_ID, + linkId, + partnerId, + campaignId, + landingUrl: 'https://example.com/landing', + ipHash: 'h', + ts: new Date(), + }); + return { partnerId, campaignId, linkId, clickId }; +} + +beforeAll(async () => { + if (skipIntegration) return; + await db.raw('select 1'); +}); + +afterAll(async () => { + await db.destroy(); +}); + +beforeEach(async () => { + if (skipIntegration) return; + for (const t of TABLES_TO_CLEAN) await db(t).del(); +}); + +describe.skipIf(skipIntegration)('stripe webhook — merchant billing flow', () => { + it('checkout.session.completed with valid client_reference_id stitches Identity + emits signup', async () => { + const { clickId } = await seedClick(); + const customerId = `cus_${ulid()}`; + + const res = await postWebhook({ + id: `evt_${ulid()}`, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + client_reference_id: clickId, + customer: customerId, + subscription: `sub_${ulid()}`, + }, + }, + }); + + expect(res.status).toBe(200); + const identity = await db(TABLES.Identity).where({ clickId, userId: customerId }).first(); + expect(identity).toBeTruthy(); + + const event = await db(TABLES.Event).where({ userId: customerId, type: 'signup' }).first(); + expect(event).toBeTruthy(); + }); + + it('checkout.session.completed with unknown client_reference_id is silently dropped', async () => { + const customerId = `cus_${ulid()}`; + const bogusCref = ulid(); // not in Click table + + const res = await postWebhook({ + id: `evt_${ulid()}`, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + client_reference_id: bogusCref, + customer: customerId, + subscription: `sub_${ulid()}`, + }, + }, + }); + + expect(res.status).toBe(200); + const identity = await db(TABLES.Identity).where({ userId: customerId }).first(); + expect(identity).toBeFalsy(); + const event = await db(TABLES.Event).where({ userId: customerId }).first(); + expect(event).toBeFalsy(); + }); + + it('redelivery of the same Stripe event is idempotent', async () => { + const { clickId } = await seedClick(); + const customerId = `cus_${ulid()}`; + const eventId = `evt_${ulid()}`; + const payload = { + id: eventId, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + client_reference_id: clickId, + customer: customerId, + subscription: `sub_${ulid()}`, + }, + }, + }; + + await postWebhook(payload); + await postWebhook(payload); + + const events = await db(TABLES.Event).where({ userId: customerId, type: 'signup' }); + expect(events).toHaveLength(1); + }); + + it('invoice.paid resolves userId via the Identity stitched at checkout', async () => { + const { clickId, partnerId } = await seedClick(); + const customerId = `cus_${ulid()}`; + + // 1. Checkout stitches the Identity. + await postWebhook({ + id: `evt_${ulid()}`, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + client_reference_id: clickId, + customer: customerId, + subscription: `sub_${ulid()}`, + }, + }, + }); + + // 2. invoice.paid arrives with customer as a Stripe Customer object so the + // real-API retrieve path isn't exercised. The metadata has no + // openpartner_user_id, forcing the Identity-fallback resolution. + const res = await postWebhook({ + id: `evt_${ulid()}`, + type: 'invoice.paid', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `in_${ulid()}`, + customer: { id: customerId, metadata: {}, object: 'customer' }, + amount_paid: 4900, + currency: 'usd', + }, + }, + }); + + expect(res.status).toBe(200); + const event = await db(TABLES.Event).where({ userId: customerId, type: 'invoice_paid' }).first(); + expect(event).toBeTruthy(); + expect(Number(event!.value)).toBe(49); + + // Attribution should have credited the seeded partner. + const attribution = await db(TABLES.Attribution).where({ eventId: event!.id }).first(); + expect(attribution).toBeTruthy(); + expect(attribution!.partnerId).toBe(partnerId); + }); + + it('checkout.session.completed without client_reference_id falls through to merchant-subscription path', async () => { + // No seeded click — this simulates "the merchant subscribing to us" + // (which billing.ts persists as a Config row, not as an Identity/Event). + const res = await postWebhook({ + id: `evt_${ulid()}`, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + customer: `cus_${ulid()}`, + subscription: `sub_${ulid()}`, + // no client_reference_id + }, + }, + }); + + expect(res.status).toBe(200); + const events = await db(TABLES.Event); + expect(events).toHaveLength(0); + const identities = await db(TABLES.Identity); + expect(identities).toHaveLength(0); + }); +}); + +describe.skipIf(skipIntegration)('stripe webhook — refund + reversal flow', () => { + it('charge.refunded reverses non-paid Commissions linked to the original invoice', async () => { + const { clickId } = await seedClick(); + const customerId = `cus_${ulid()}`; + const stripeInvoiceId = `in_${ulid()}`; + const stripeChargeId = `ch_${ulid()}`; + + // 1. Stitch the Identity via checkout, then drive an invoice.paid + // (with the Stripe customer as an embedded object so no API retrieve + // happens). The mapper records stripeInvoiceId in metadata. + await postWebhook({ + id: `evt_${ulid()}`, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + client_reference_id: clickId, + customer: customerId, + subscription: `sub_${ulid()}`, + }, + }, + }); + + await postWebhook({ + id: `evt_${ulid()}`, + type: 'invoice.paid', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: stripeInvoiceId, + customer: { id: customerId, metadata: {}, object: 'customer' }, + amount_paid: 4900, + currency: 'usd', + charge: stripeChargeId, + }, + }, + }); + + // Pre-condition: a Commission was accrued for the invoice_paid event. + // (Note: the signup Event also runs through attribution and produces a + // $0 commission, but we only care about the invoice-derived ones here.) + const invoicePaidEvent = await db(TABLES.Event).where({ type: 'invoice_paid' }).first(); + expect(invoicePaidEvent).toBeTruthy(); + const invoiceAttributions = await db(TABLES.Attribution).where({ eventId: invoicePaidEvent!.id }); + const invoiceAttributionIds = invoiceAttributions.map((a) => a.id); + const accruedBefore = await db(TABLES.Commission) + .whereIn('attributionId', invoiceAttributionIds) + .where({ status: 'accrued' }); + expect(accruedBefore.length).toBeGreaterThan(0); + + // 2. The refund: charge.refunded with .invoice pointing at our stored + // stripeInvoiceId. Customer is embedded so no real API call. + const refundRes = await postWebhook({ + id: `evt_${ulid()}`, + type: 'charge.refunded', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: stripeChargeId, + customer: { id: customerId, metadata: {}, object: 'customer' }, + invoice: stripeInvoiceId, + amount_refunded: 4900, + currency: 'usd', + }, + }, + }); + + expect(refundRes.status).toBe(200); + + // Post-condition: invoice-derived Commissions are now reversed; other + // commissions (e.g. the signup $0) are untouched. + const invoiceCommissionsAfter = await db(TABLES.Commission) + .whereIn('attributionId', invoiceAttributionIds); + expect(invoiceCommissionsAfter.every((c) => c.status === 'reversed')).toBe(true); + expect(invoiceCommissionsAfter.length).toBe(accruedBefore.length); + + // The corrective Event was inserted and its metadata records the + // reversal count for downstream observability. + const refundEvent = await db(TABLES.Event).where({ type: 'refund' }).first(); + expect(refundEvent).toBeTruthy(); + expect((refundEvent!.metadata as { reversedCommissions?: number }).reversedCommissions) + .toBe(accruedBefore.length); + }); + + it('charge.refunded leaves already-paid Commissions paid and surfaces the count', async () => { + const { clickId } = await seedClick(); + const customerId = `cus_${ulid()}`; + const stripeInvoiceId = `in_${ulid()}`; + const stripeChargeId = `ch_${ulid()}`; + + await postWebhook({ + id: `evt_${ulid()}`, + type: 'checkout.session.completed', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `cs_${ulid()}`, + mode: 'subscription', + client_reference_id: clickId, + customer: customerId, + subscription: `sub_${ulid()}`, + }, + }, + }); + + await postWebhook({ + id: `evt_${ulid()}`, + type: 'invoice.paid', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: stripeInvoiceId, + customer: { id: customerId, metadata: {}, object: 'customer' }, + amount_paid: 4900, + currency: 'usd', + charge: stripeChargeId, + }, + }, + }); + + // Simulate the partner having already been paid by flipping the + // invoice-derived commissions to 'paid'. + const invoicePaidEvent = await db(TABLES.Event).where({ type: 'invoice_paid' }).first(); + const invoiceAttributions = await db(TABLES.Attribution).where({ eventId: invoicePaidEvent!.id }); + const invoiceAttributionIds = invoiceAttributions.map((a) => a.id); + await db(TABLES.Commission) + .whereIn('attributionId', invoiceAttributionIds) + .update({ status: 'paid', paidAt: new Date() }); + + const refundRes = await postWebhook({ + id: `evt_${ulid()}`, + type: 'charge.refunded', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: stripeChargeId, + customer: { id: customerId, metadata: {}, object: 'customer' }, + invoice: stripeInvoiceId, + amount_refunded: 4900, + currency: 'usd', + }, + }, + }); + + expect(refundRes.status).toBe(200); + + // No invoice-derived commissions were flipped — partner already has the money. + const stillPaid = await db(TABLES.Commission) + .whereIn('attributionId', invoiceAttributionIds) + .where({ status: 'paid' }); + expect(stillPaid.length).toBeGreaterThan(0); + const reversedFromInvoice = await db(TABLES.Commission) + .whereIn('attributionId', invoiceAttributionIds) + .where({ status: 'reversed' }); + expect(reversedFromInvoice).toHaveLength(0); + + // The refund Event's metadata flags the count for admin attention. + const refundEvent = await db(TABLES.Event).where({ type: 'refund' }).first(); + expect((refundEvent!.metadata as { alreadyPaidCommissions?: number }).alreadyPaidCommissions) + .toBe(stillPaid.length); + }); + + it('charge.refunded does not run attribution on the corrective Event', async () => { + const { clickId } = await seedClick(); + const customerId = `cus_${ulid()}`; + const stripeInvoiceId = `in_${ulid()}`; + + // Set up an Identity for the customer so attribution would normally fire. + await db(TABLES.Identity).insert({ id: ulid(), tenantId: DEFAULT_TENANT_ID, clickId, userId: customerId }); + + const refundRes = await postWebhook({ + id: `evt_${ulid()}`, + type: 'charge.refunded', + created: Math.floor(Date.now() / 1000), + data: { + object: { + id: `ch_${ulid()}`, + customer: { id: customerId, metadata: {}, object: 'customer' }, + invoice: stripeInvoiceId, + amount_refunded: 1900, + currency: 'usd', + }, + }, + }); + + expect(refundRes.status).toBe(200); + expect(refundRes.body.corrective).toBe('refund'); + + // No Attribution rows for the refund Event — corrective events skip + // attribution to avoid creating phantom negative commissions. + const refundEvent = await db(TABLES.Event).where({ type: 'refund' }).first(); + expect(refundEvent).toBeTruthy(); + const attributions = await db(TABLES.Attribution).where({ eventId: refundEvent!.id }); + expect(attributions).toHaveLength(0); + }); +}); diff --git a/apps/api/src/__tests__/webhooks.test.ts b/apps/api/src/__tests__/webhooks.test.ts index 81aa107..dca1264 100644 --- a/apps/api/src/__tests__/webhooks.test.ts +++ b/apps/api/src/__tests__/webhooks.test.ts @@ -10,13 +10,14 @@ import type { AddressInfo } from 'node:net'; import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from 'vitest'; import request from 'supertest'; import { ulid } from 'ulid'; -import { TABLES } from '@openpartner/db'; +import { DEFAULT_TENANT_ID, TABLES } from '@openpartner/db'; import { db } from '../db.js'; import { createApp } from '../app.js'; const ADMIN_KEY = 'op_test_webhook_admin_0123456789abcdef0123'; process.env.ADMIN_API_KEY = ADMIN_KEY; process.env.OPENPARTNER_MODE = 'selfhost'; +process.env.OPENPARTNER_TENANCY = 'single'; const skipIntegration = !process.env.DATABASE_URL || process.env.INTEGRATION === 'skip'; @@ -147,6 +148,7 @@ describe.skipIf(skipIntegration)('webhooks', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId, partnerId, campaignId, @@ -231,6 +233,7 @@ describe.skipIf(skipIntegration)('webhooks', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, @@ -311,6 +314,7 @@ describe.skipIf(skipIntegration)('webhooks', () => { const clickId = ulid(); await db(TABLES.Click).insert({ id: clickId, + tenantId: DEFAULT_TENANT_ID, linkId: link.id, partnerId: partner.id, campaignId: campaign.id, diff --git a/apps/api/src/app.ts b/apps/api/src/app.ts index a57941f..19c654d 100644 --- a/apps/api/src/app.ts +++ b/apps/api/src/app.ts @@ -5,6 +5,7 @@ import cookieParser from 'cookie-parser'; import cors from 'cors'; import helmet from 'helmet'; import pinoHttp from 'pino-http'; +import rateLimit from 'express-rate-limit'; import { ulid } from 'ulid'; import { stripeWebhookRouter } from './routes/stripe-webhook.js'; @@ -19,19 +20,33 @@ import { connectRouter } from './routes/connect.js'; import { payoutsRouter } from './routes/payouts.js'; import { commissionsRouter } from './routes/commissions.js'; import { exportRouter } from './routes/export.js'; +import { importPartnersRouter } from './routes/import-partners.js'; +import { couponsRouter } from './routes/coupons.js'; import { billingRouter } from './routes/billing.js'; import { authRouter } from './routes/auth.js'; +import { partnerAuthRouter } from './routes/partner-auth.js'; import { adminOverviewRouter } from './routes/admin-overview.js'; -import { magicLinkRouter } from './routes/magic-link.js'; import { funnelRouter } from './routes/funnel.js'; +import { settingsRouter } from './routes/settings.js'; +import { mountStaticUploads, uploadsRouter } from './routes/uploads.js'; +import { adminsRouter } from './routes/admins.js'; +import { installRouter } from './routes/install.js'; import { fraudReviewRouter } from './routes/fraud-review.js'; import { webhooksRouter } from './routes/webhooks.js'; -import { networkVendorsRouter } from './routes/network-vendors.js'; -import { networkCreatorsRouter } from './routes/network-creators.js'; -import { networkOfferingsRouter } from './routes/network-offerings.js'; -import { networkRequestsRouter } from './routes/network-requests.js'; -import { networkEarningsRouter } from './routes/network-earnings.js'; import { metricsRouter } from './routes/metrics.js'; +import { signupRouter } from './routes/signup.js'; +import { partnerSignupRouter } from './routes/partner-signup.js'; +import { networkPartnerRouter } from './routes/network-partner.js'; +import { accountDeletionRouter } from './routes/account-deletion.js'; +import { partnerCampaignsRouter } from './routes/partner-campaigns.js'; +import { onboardingRouter } from './routes/onboarding.js'; +import { creatorPortalRouter } from './routes/creator-portal.js'; +import { signinRouter } from './routes/signin.js'; +import { clicksRouter } from './routes/clicks.js'; +import { sessionHomeRouter } from './routes/session-home.js'; +import { platformAuthRouter } from './routes/platform-auth.js'; +import { tenantMiddleware } from './tenancy.js'; +import { trialGate } from './middleware/trial-gate.js'; export function createApp(options: { enableLogger?: boolean } = {}) { const app = express(); @@ -71,6 +86,53 @@ export function createApp(options: { enableLogger?: boolean } = {}) { ); app.use(cookieParser()); + // CSRF: we deliberately do NOT mount a CSRF-token middleware. Defense + // is layered: + // 1. Session cookies are issued with SameSite=Lax (auth-sessions.ts + + // platform-sessions.ts) — modern browsers refuse to attach them + // on cross-site state-changing requests, which kills the basic + // CSRF attack vector (a malicious site fetch()'ing our endpoints + // with credentials: include can't get the cookie sent). + // 2. CORS allowlist is explicit (no `origin: true` reflection; + // see corsOrigins above), so a cross-origin XHR / fetch can't + // read the response even if it could send the request. + // 3. The state-changing surface that bypasses cookies entirely — + // bearer-token API access — uses scoped keys, not session + // cookies, so it isn't CSRF-relevant. + // CodeQL flags the cookie middleware as unprotected (js/missing-token- + // validation) because it can't see the SameSite property on the cookies + // we issue downstream — that's a static-analysis false positive. + + // Global rate limit. Caps a single IP at 600 requests / 5 min — well + // above any legitimate single-user workload (the portal does ~30 req + // on a cold dashboard load) but tight enough that a runaway script + // or credential-stuffing loop hits the wall fast. Self-hosters who + // sit behind a CDN with rate-limiting (Cloudflare, etc.) get this as + // belt-and-suspenders; deployments without an edge layer get baseline + // protection here. /health is exempt so kube-probes don't drain the + // budget; the click-router hot path runs in a separate service so + // it's unaffected. Standard headers turn off the legacy X-RateLimit-* + // (we use the IETF draft). + app.use( + rateLimit({ + windowMs: 5 * 60 * 1000, + limit: 600, + standardHeaders: 'draft-7', + legacyHeaders: false, + // Skip endpoints we trust externally: + // /health, /metrics — kube-probes / Prometheus scrape, no auth, high freq + // /webhooks/* — Stripe etc. retry on rejection; rate-limiting them + // would amplify upstream backpressure into our queues + // /uploads/* — static handler for FS-backed image assets + skip: (req) => + req.path === '/health' || + req.path === '/metrics' || + req.path.startsWith('/webhooks/') || + req.path.startsWith('/uploads/'), + message: { error: 'rate_limited' }, + }), + ); + // Request correlation: accept an inbound X-Request-Id for callers that // want to correlate across services, generate a ULID otherwise, and // echo it back as X-Request-Id so a client can paste it into a support @@ -106,8 +168,45 @@ export function createApp(options: { enableLogger?: boolean } = {}) { res.json({ ok: true, service: 'api', mode: MODE }); }); + // Static handler for FS-backed uploads (no-op when STORAGE_KIND=s3). + // Mounted before any auth middleware — uploaded assets are public-read + // by design (avatars + logos appear in unauthenticated contexts). + mountStaticUploads(app); + + // ----- Public, non-tenant routes (mounted BEFORE tenantMiddleware) ----- + // These either run before any tenant exists (install), or are platform- + // wide (metrics scraped by Prometheus). They use the privileged db + // directly and are responsible for their own access control. + app.use(installRouter); + app.use(signupRouter); + app.use(signinRouter); + app.use(sessionHomeRouter); + app.use(platformAuthRouter); + app.use(metricsRouter); + // Creator portal is platform-level (no tenant), proxies to Network. + // Mount before tenantMiddleware so it's reachable from app.openpartner.dev/* + // without a /t// prefix. + app.use(creatorPortalRouter); + + // ----- Tenant scope ----- + // Everything below runs inside a per-request transaction with + // app.tenant_id set, so RLS scopes every query to the current tenant. + // In single-tenancy mode, tenantId is always 'default'. In multi-tenancy + // mode, it's resolved from /t//... in the URL. + app.use(tenantMiddleware); + + // Soft trial-gate. Returns 402 on a small allowlist of expensive write + // endpoints when the tenant's trial expired without conversion. Reads, + // SDK callbacks, click ingestion, billing routes, and auth all stay + // open. Mounted right after tenantMiddleware so it has tenant scope. + app.use(trialGate); + app.use(authRouter); - app.use(magicLinkRouter); + app.use(partnerAuthRouter); + app.use(partnerSignupRouter); + app.use(adminsRouter); + app.use(settingsRouter); + app.use(uploadsRouter); app.use(funnelRouter); app.use(fraudReviewRouter); app.use(webhooksRouter); @@ -116,23 +215,29 @@ export function createApp(options: { enableLogger?: boolean } = {}) { app.use(partnersRouter); app.use(campaignsRouter); app.use(linksRouter); + app.use(clicksRouter); app.use(dashboardRouter); app.use(apiKeysRouter); app.use(connectRouter); app.use(payoutsRouter); app.use(commissionsRouter); app.use(exportRouter); + app.use(importPartnersRouter); + app.use(couponsRouter); app.use(billingRouter); app.use(adminOverviewRouter); - app.use(networkVendorsRouter); - app.use(networkCreatorsRouter); - app.use(networkOfferingsRouter); - app.use(networkRequestsRouter); - app.use(networkEarningsRouter); - app.use(metricsRouter); + app.use(networkPartnerRouter); + app.use(accountDeletionRouter); + app.use(partnerCampaignsRouter); + app.use(onboardingRouter); app.use((err: Error, req: Request, res: Response, _next: NextFunction) => { req.log?.error({ err }, 'request_failed'); + // Echo message + stack to stderr so CI logs surface 500s that the + // test harness would otherwise swallow. Safe — these are + // unauthenticated server-side error paths; we're logging them + // anyway via pino when the logger's on. + console.error(`[500] ${req.method} ${req.url} ${err?.message}\n${err?.stack ?? ''}`); res.status(500).json({ error: 'internal_error' }); }); diff --git a/apps/api/src/attribution.ts b/apps/api/src/attribution.ts index 11b5dce..ef3e60e 100644 --- a/apps/api/src/attribution.ts +++ b/apps/api/src/attribution.ts @@ -58,13 +58,28 @@ export async function attributeEvent( const cleanClicks = clicks.filter((c) => !c.fraudFlag); if (cleanClicks.length === 0) return { status: 'no_click' }; - const campaignIds = Array.from(new Set(cleanClicks.map((c) => c.campaignId))); + // Revoked partners are excluded from attribution — any call landing + // here (whether a live event webhook or a replay via + // attributeBacklogForUser) skips them regardless of whether the event + // timestamp predates the revoke. This matches the admin-intent of + // "stop all earning for this partner going forward" and keeps backlog + // re-runs from retroactively shifting weight around a revoked + // partner. Historical attribution rows already in the table for the + // partner aren't deleted on revoke — admin reverses those manually + // via the commissions review queue if needed. + const partnerIds = Array.from(new Set(cleanClicks.map((c) => c.partnerId))); + const revokedPartners = await db('Partner').whereIn('id', partnerIds).whereNotNull('revokedAt').pluck('id'); + const revokedSet = new Set(revokedPartners as string[]); + const eligibleByPartner = cleanClicks.filter((c) => !revokedSet.has(c.partnerId)); + if (eligibleByPartner.length === 0) return { status: 'no_click' }; + + const campaignIds = Array.from(new Set(eligibleByPartner.map((c) => c.campaignId))); const campaigns = await db(TABLES.Campaign).whereIn('id', campaignIds); const campaignsById = new Map(campaigns.map((c) => [c.id, c])); // Clicks in-window relative to this event, with their campaign attached. const eligible: ClickWithCampaign[] = []; - for (const click of cleanClicks) { + for (const click of eligibleByPartner) { const campaign = campaignsById.get(click.campaignId); if (!campaign) continue; const windowMs = campaign.attributionWindowDays * 24 * 60 * 60 * 1000; @@ -91,6 +106,7 @@ export async function attributeEvent( const insertRes = await db(TABLES.Attribution) .insert({ id: attributionId, + tenantId: event.tenantId, eventId: event.id, partnerId: click.partnerId, campaignId: click.campaignId, @@ -105,11 +121,22 @@ export async function attributeEvent( if (insertRes.length === 0) continue; // dup — already attributed allDup = false; + // Commission lifecycle gate: events dated after the campaign's + // endsAt don't accrue. Attribution row stays for analytics — we + // still want to know the click → conversion path was real — just + // no payout. Pre-startsAt is similarly skipped (defensive; clicks + // shouldn't exist for a not-yet-started campaign). + const { commissionEarnable } = await import('./campaign-lifecycle.js'); + if (!commissionEarnable(click.campaign, new Date(event.ts))) { + continue; + } + const rule = parseCommissionRule(click.campaign.commissionRule); const amount = computeCommissionAmount(rule, event) * weight; const commissionId = ulid(); await db(TABLES.Commission).insert({ id: commissionId, + tenantId: event.tenantId, attributionId, partnerId: click.partnerId, amount: amount.toFixed(2), @@ -117,7 +144,7 @@ export async function attributeEvent( status: 'accrued', }); results.push({ clickId: click.id, partnerId: click.partnerId, weight, attributionId, commissionId }); - dispatchEvent('attribution.created', { + dispatchEvent(event.tenantId, 'attribution.created', { attributionId, eventId: event.id, partnerId: click.partnerId, diff --git a/apps/api/src/auth-sessions.ts b/apps/api/src/auth-sessions.ts index d326540..ff2144c 100644 --- a/apps/api/src/auth-sessions.ts +++ b/apps/api/src/auth-sessions.ts @@ -1,149 +1,168 @@ /** - * Magic-link + session primitives. + * Magic-link token + session primitives. Generic over principal kind: + * both admins and partners use the same magic-link verify + session + * cookie flow, differing only in which table we check + activate on + * verify. * - * Tokens and session tokens share the same sha256-at-rest pattern API - * keys already use: we store prefix (for indexed lookup) + hash, never - * the plaintext. Cookies carry the plaintext; comparisons use - * constant-time equality on the hash. + * Tokens look like `opml_` and sessions look like `ops_`. Both + * use the same prefix+hash lookup as ApiKey — plaintext is only ever + * held at generation + verify time. + * + * Multi-tenant: every function takes a `Knex` (typically `req.db`, the + * per-request transaction with `app.tenant_id` set). issueMagicLink and + * createSession also take `tenantId` because RLS WITH CHECK rejects + * inserts that don't match the GUC, and the tenantId is needed to stamp + * the row. */ -import { createHash, randomBytes, timingSafeEqual } from 'node:crypto'; +import { createHash, randomBytes } from 'node:crypto'; +import type { CookieOptions } from 'express'; +import type { Knex } from 'knex'; import { ulid } from 'ulid'; import { TABLES, - type MagicLinkClaim, - type MagicLinkPurpose, type MagicLinkTokenRow, - type SessionPrincipalKind, + type MagicLinkPurpose, + type PrincipalKind, type SessionRow, } from '@openpartner/db'; -import { db } from './db.js'; export const SESSION_COOKIE_NAME = 'op_session'; -const MAGIC_PREFIX_LEN = 8; -const SESSION_PREFIX_LEN = 8; +const TOKEN_PREFIX_LEN = 8; +const MAGIC_TTL_MS = 15 * 60 * 1000; // 15 minutes +const SESSION_TTL_MS = 30 * 24 * 60 * 60 * 1000; // 30 days -function hash(s: string): string { - return createHash('sha256').update(s).digest('hex'); +function hash(plaintext: string): string { + return createHash('sha256').update(plaintext).digest('hex'); } -function constantTimeStringEqual(a: string, b: string): boolean { - if (a.length !== b.length) return false; - return timingSafeEqual(Buffer.from(a), Buffer.from(b)); +function generate(prefixLiteral: string): { plaintext: string; prefix: string; tokenHash: string } { + const raw = randomBytes(24).toString('hex'); + const plaintext = `${prefixLiteral}_${raw}`; + return { plaintext, prefix: plaintext.slice(0, TOKEN_PREFIX_LEN), tokenHash: hash(plaintext) }; } -// ---- Magic-link tokens ---- - -export interface IssuedToken { - id: string; +export interface IssuedMagicLink { plaintext: string; + expiresAt: Date; } -export async function issueMagicLink(params: { - email: string; - purpose: MagicLinkPurpose; - claim?: MagicLinkClaim; - ttlSeconds?: number; -}): Promise { - const plaintext = `mlt_${randomBytes(32).toString('base64url')}`; - const prefix = plaintext.slice(0, MAGIC_PREFIX_LEN); - const tokenHash = hash(plaintext); - const id = ulid(); - const expiresAt = new Date(Date.now() + (params.ttlSeconds ?? 15 * 60) * 1000); // 15 min default - +export async function issueMagicLink( + db: Knex, + params: { + tenantId: string; + email: string; + purpose: MagicLinkPurpose; + principalKind: PrincipalKind; + principalId: string; + }, +): Promise { + const { plaintext, prefix, tokenHash } = generate('opml'); + const expiresAt = new Date(Date.now() + MAGIC_TTL_MS); await db(TABLES.MagicLinkToken).insert({ - id, + id: ulid(), + tenantId: params.tenantId, prefix, tokenHash, email: params.email.toLowerCase(), purpose: params.purpose, - claim: params.claim ? (JSON.stringify(params.claim) as unknown as never) : null, + principalKind: params.principalKind, + principalId: params.principalId, expiresAt, }); - - return { id, plaintext }; + return { plaintext, expiresAt }; } -export type ConsumeResult = - | { ok: true; token: MagicLinkTokenRow } - | { ok: false; error: 'not_found' | 'expired' | 'already_consumed' }; +export interface ConsumedMagicLink { + token: MagicLinkTokenRow; +} -export async function consumeMagicLink(plaintext: string): Promise { - if (plaintext.length < MAGIC_PREFIX_LEN) return { ok: false, error: 'not_found' }; - const prefix = plaintext.slice(0, MAGIC_PREFIX_LEN); +export async function consumeMagicLink(db: Knex, plaintext: string): Promise { + if (plaintext.length < TOKEN_PREFIX_LEN) return null; + const prefix = plaintext.slice(0, TOKEN_PREFIX_LEN); const tokenHash = hash(plaintext); + const now = new Date(); - const candidates = await db(TABLES.MagicLinkToken).where({ prefix }); - const match = candidates.find((row) => constantTimeStringEqual(row.tokenHash, tokenHash)); - if (!match) return { ok: false, error: 'not_found' }; - if (match.consumedAt) return { ok: false, error: 'already_consumed' }; - if (new Date(match.expiresAt).getTime() < Date.now()) return { ok: false, error: 'expired' }; - - // Atomic single-use consumption: conditional update on consumedAt IS NULL. const updated = await db(TABLES.MagicLinkToken) - .where({ id: match.id }) + .where({ prefix, tokenHash }) .whereNull('consumedAt') - .update({ consumedAt: new Date() }) + .andWhere('expiresAt', '>', now) + .update({ consumedAt: now }) .returning('*'); - if (updated.length === 0) return { ok: false, error: 'already_consumed' }; - return { ok: true, token: updated[0]! }; + const row = updated[0]; + return row ? { token: row as MagicLinkTokenRow } : null; } -// ---- Sessions ---- - -const SESSION_TTL_DAYS = 30; +export interface IssuedSession { + plaintext: string; + id: string; + expiresAt: Date; +} -export async function createSession(params: { - principalKind: SessionPrincipalKind; - principalId: string; -}): Promise { - const plaintext = `ops_${randomBytes(32).toString('base64url')}`; - const prefix = plaintext.slice(0, SESSION_PREFIX_LEN); - const tokenHash = hash(plaintext); +export async function createSession( + db: Knex, + params: { + tenantId: string; + principalKind: PrincipalKind; + principalId: string; + }, +): Promise { + const { plaintext, prefix, tokenHash } = generate('ops'); const id = ulid(); - const expiresAt = new Date(Date.now() + SESSION_TTL_DAYS * 24 * 60 * 60 * 1000); - + const expiresAt = new Date(Date.now() + SESSION_TTL_MS); await db(TABLES.Session).insert({ id, + tenantId: params.tenantId, prefix, tokenHash, principalKind: params.principalKind, principalId: params.principalId, expiresAt, - lastSeenAt: new Date(), }); - - return { id, plaintext }; + return { plaintext, id, expiresAt }; } -export async function resolveSession(plaintext: string): Promise { - if (plaintext.length < SESSION_PREFIX_LEN) return null; - const prefix = plaintext.slice(0, SESSION_PREFIX_LEN); +export async function resolveSession(db: Knex, plaintext: string): Promise { + if (!plaintext || plaintext.length < TOKEN_PREFIX_LEN) return null; + const prefix = plaintext.slice(0, TOKEN_PREFIX_LEN); const tokenHash = hash(plaintext); - - const candidates = await db(TABLES.Session) - .where({ prefix }) - .whereNull('revokedAt'); - const match = candidates.find((row) => constantTimeStringEqual(row.tokenHash, tokenHash)); - if (!match) return null; - if (new Date(match.expiresAt).getTime() < Date.now()) return null; - - // Non-blocking lastSeen bump — we don't await. - void db(TABLES.Session).where({ id: match.id }).update({ lastSeenAt: new Date() }); - return match; + const now = new Date(); + const row = await db(TABLES.Session) + .where({ prefix, tokenHash }) + .whereNull('revokedAt') + .andWhere('expiresAt', '>', now) + .first(); + if (!row) return null; + + // Defense-in-depth: if the principal was revoked but somehow a session + // slipped through, reject here too. + if (row.principalKind === 'partner') { + const partner = (await db('Partner').where({ id: row.principalId }).first()) as + | { revokedAt: Date | null } + | undefined; + if (!partner || partner.revokedAt) return null; + } else if (row.principalKind === 'admin') { + const admin = (await db('Admin').where({ id: row.principalId }).first()) as + | { revokedAt: Date | null; activatedAt: Date | null } + | undefined; + if (!admin || admin.revokedAt || !admin.activatedAt) return null; + } + + void db(TABLES.Session).where({ id: row.id }).update({ lastSeenAt: now }); + return row; } -export async function revokeSession(id: string): Promise { +export async function revokeSession(db: Knex, id: string): Promise { await db(TABLES.Session).where({ id }).update({ revokedAt: new Date() }); } -export function sessionCookieOptions() { +export function sessionCookieOptions(): CookieOptions { return { httpOnly: true, - sameSite: 'lax' as const, secure: process.env.NODE_ENV === 'production', + sameSite: 'lax', path: '/', - maxAge: SESSION_TTL_DAYS * 24 * 60 * 60 * 1000, + maxAge: SESSION_TTL_MS, }; } diff --git a/apps/api/src/auth.ts b/apps/api/src/auth.ts index 8710f8a..bf7b131 100644 --- a/apps/api/src/auth.ts +++ b/apps/api/src/auth.ts @@ -3,25 +3,32 @@ * * Two shapes of credential: * - ADMIN_API_KEY env var — bootstrap admin key, valid in all modes. - * - ApiKey rows in the database — either admin (partnerId null) or partner-scoped. + * - ApiKey rows in the database — either admin (partnerId null), + * partner-scoped, or a scoped key used by a federating client like + * an OpenPartner Network hub. * - * We never store plaintext. Keys look like `op_<24 hex>` and are identified by - * an 8-char prefix so lookups are indexed rather than table scans. The hash is - * sha256 over the whole key. + * We never store plaintext. Keys look like `op_<24 hex>` and are identified + * by an 8-char prefix so lookups are indexed rather than table scans. The + * hash is sha256 over the whole key. + * + * Multi-tenant: principal lookups go through `req.db` (the per-request + * transaction with `app.tenant_id` set), so RLS scopes ApiKey + Session + * lookups to the current tenant. `requireAuth` therefore must run after + * `tenantMiddleware`. */ import { createHash, randomBytes } from 'node:crypto'; import type { NextFunction, Request, Response } from 'express'; +import type { Knex } from 'knex'; import { ulid } from 'ulid'; import { TABLES, type ApiKeyRow } from '@openpartner/db'; -import { db } from './db.js'; export type ApiKeyPrincipal = | { role: 'admin'; source: 'env' } | { role: 'admin'; source: 'db'; apiKeyId: string } + | { role: 'admin'; source: 'session'; sessionId: string; adminId: string } | { role: 'partner'; source: 'db'; apiKeyId: string; partnerId: string } - | { role: 'network_vendor'; source: 'db' | 'session'; apiKeyId?: string; sessionId?: string; networkVendorId: string } - | { role: 'network_creator'; source: 'db' | 'session'; apiKeyId?: string; sessionId?: string; networkCreatorId: string } + | { role: 'partner'; source: 'session'; sessionId: string; partnerId: string } | { role: 'scoped'; source: 'db'; apiKeyId: string; scopes: string[] }; declare global { @@ -77,34 +84,26 @@ export function requirePartnerOrAdmin(paramName: string = 'id') { } async function resolvePrincipal(req: Request): Promise { + const db = req.db; + if (!db) { + // requireAuth was mounted on a route that didn't pass through + // tenantMiddleware. That's a routing bug — fail loudly. + throw new Error('requireAuth invoked without a tenant-scoped req.db'); + } + const header = req.header('authorization'); if (!header) { - // No Bearer — try the session cookie instead. This is what the - // portal uses after a creator signs in via magic link. const cookie = (req as unknown as { cookies?: Record }).cookies?.op_session; if (!cookie) return null; const { resolveSession } = await import('./auth-sessions.js'); - const session = await resolveSession(cookie); + const session = await resolveSession(db, cookie); if (!session) return null; - if (session.principalKind === 'network_creator') { - return { - role: 'network_creator', - source: 'session', - sessionId: session.id, - networkCreatorId: session.principalId, - }; + if (session.principalKind === 'admin') { + return { role: 'admin', source: 'session', sessionId: session.id, adminId: session.principalId }; } - if (session.principalKind === 'network_vendor') { - return { - role: 'network_vendor', - source: 'session', - sessionId: session.id, - networkVendorId: session.principalId, - }; - } - // Future: partner / admin session kinds if we add human auth for them. - return null; + return { role: 'partner', source: 'session', sessionId: session.id, partnerId: session.principalId }; } + const match = /^Bearer\s+(\S+)$/i.exec(header); if (!match) return null; const token = match[1]!; @@ -126,17 +125,11 @@ async function resolvePrincipal(req: Request): Promise { // Non-blocking last-used bump. void db(TABLES.ApiKey).where({ id: match2.id }).update({ lastUsedAt: new Date() }); - // Scoped keys take precedence over any FK role. The FK columns are - // only meaningful for non-scoped keys (admin / partner / vendor / creator). + // Scoped keys take precedence — they're used by Network-style federation + // where the caller has a narrow permission set rather than a role. if (Array.isArray(match2.scopes)) { return { role: 'scoped', source: 'db', apiKeyId: match2.id, scopes: match2.scopes }; } - if (match2.networkVendorId) { - return { role: 'network_vendor', source: 'db', apiKeyId: match2.id, networkVendorId: match2.networkVendorId }; - } - if (match2.networkCreatorId) { - return { role: 'network_creator', source: 'db', apiKeyId: match2.id, networkCreatorId: match2.networkCreatorId }; - } if (match2.partnerId) { return { role: 'partner', source: 'db', apiKeyId: match2.id, partnerId: match2.partnerId }; } @@ -176,22 +169,23 @@ function constantTimeEqual(a: string, b: string): boolean { return diff === 0; } -export async function createApiKeyRow(params: { - partnerId?: string | null; - networkVendorId?: string | null; - networkCreatorId?: string | null; - scopes?: string[] | null; - label?: string; -}): Promise<{ id: string; plaintext: string }> { +export async function createApiKeyRow( + db: Knex, + params: { + tenantId: string; + partnerId?: string | null; + scopes?: string[] | null; + label?: string; + }, +): Promise<{ id: string; plaintext: string }> { const { plaintext, prefix, hash } = generateApiKey(); const id = ulid(); await db(TABLES.ApiKey).insert({ id, + tenantId: params.tenantId, prefix, keyHash: hash, partnerId: params.partnerId ?? null, - networkVendorId: params.networkVendorId ?? null, - networkCreatorId: params.networkCreatorId ?? null, // pg jsonb: arrays need stringification; null stays null scopes: params.scopes != null ? (JSON.stringify(params.scopes) as unknown as never) @@ -200,17 +194,3 @@ export async function createApiKeyRow(params: { }); return { id, plaintext }; } - -export function requireNetworkVendor(req: Request, res: Response, next: NextFunction): void { - const p = req.principal; - if (!p) return void res.status(401).json({ error: 'unauthorized' }); - if (p.role === 'admin' || p.role === 'network_vendor') return next(); - res.status(403).json({ error: 'forbidden' }); -} - -export function requireNetworkCreator(req: Request, res: Response, next: NextFunction): void { - const p = req.principal; - if (!p) return void res.status(401).json({ error: 'unauthorized' }); - if (p.role === 'admin' || p.role === 'network_creator') return next(); - res.status(403).json({ error: 'forbidden' }); -} diff --git a/apps/api/src/billing-plan.ts b/apps/api/src/billing-plan.ts new file mode 100644 index 0000000..512bb21 --- /dev/null +++ b/apps/api/src/billing-plan.ts @@ -0,0 +1,114 @@ +/** + * Per-tenant billing plan resolver + helpers. + * + * The hosted multi-tenant deployment now lets each tenant pick a tier + * (Flex / Revshare / Enterprise) at signup. Selfhost deployments keep + * the global OPENPARTNER_MODE env behavior — a single mode for the + * single installation. + * + * Resolution rules: + * + * selfhost mode: always returns the global mode (no Tenant + * lookup; selfhost has one tenant by definition). + * hosted mode + plan: returns the tenant's billingPlan column. + * hosted mode + null: legacy tenant predating per-tenant billing — + * fall back to OPENPARTNER_MODE so reportUsage + * and friends keep working until the operator + * backfills. + * + * `effectiveMode()` returns the same shape as the legacy `getMode()` so + * downstream code (reportUsageToStripe, billing route) can switch on + * one variable. + */ + +import type { Knex } from 'knex'; +import { TABLES, type BillingPlan, type TenantRow } from '@openpartner/db'; +import { getMode, type OpenPartnerMode } from './stripe.js'; + +export const TRIAL_DAYS = 14; + +export interface TenantBillingState { + /** The plan column on Tenant. Null for legacy or selfhost. */ + plan: BillingPlan | null; + /** Mode the rest of the billing layer should switch on. Same shape + * as the legacy getMode() return so existing callers can swap in + * with no changes. */ + mode: OpenPartnerMode; + trialEndsAt: Date | null; + inTrial: boolean; + /** True iff the tenant has previously activated a trial. Used to + * refuse second-and-later trials. */ + hasUsedTrial: boolean; + stripeCustomerId: string | null; + stripeSubscriptionId: string | null; + /** True for tenants that picked a paid plan, used a trial, and are + * now without an active subscription. The soft trial-gate uses + * this to 402 expensive write endpoints. */ + trialExpiredWithoutSubscription: boolean; +} + +export async function getTenantBillingState(db: Knex, tenantId: string): Promise { + const tenant = await db(TABLES.Tenant) + .where({ id: tenantId }) + .first(['billingPlan', 'trialEndsAt', 'firstTrialActivatedAt', 'stripeCustomerId', 'stripeSubscriptionId']); + const plan = (tenant?.billingPlan as BillingPlan | null) ?? null; + const hasUsedTrial = !!tenant?.firstTrialActivatedAt; + const stripeSubscriptionId = tenant?.stripeSubscriptionId ?? null; + // "Trial expired without sub" = paid plan picked, trial has been + // used at some point, no current subscription. Enterprise tenants + // are never gated (they don't go through the Checkout flow). + const trialExpiredWithoutSubscription = + plan !== null && + plan !== 'enterprise' && + hasUsedTrial && + !stripeSubscriptionId; + return { + plan, + mode: planToMode(plan), + trialEndsAt: tenant?.trialEndsAt ? new Date(tenant.trialEndsAt) : null, + inTrial: tenant?.trialEndsAt ? new Date(tenant.trialEndsAt) > new Date() : false, + hasUsedTrial, + stripeCustomerId: tenant?.stripeCustomerId ?? null, + stripeSubscriptionId, + trialExpiredWithoutSubscription, + }; +} + +/** Map a billing plan to the legacy mode enum. + * - flex/enterprise → 'flat' (Stripe sub with monthly + metered) + * - revshare → 'revshare' (metered-only) + * - null → fall through to OPENPARTNER_MODE env (selfhost or + * legacy hosted tenants from before this column existed) */ +export function planToMode(plan: BillingPlan | null): OpenPartnerMode { + if (plan === 'flex' || plan === 'enterprise') return 'flat'; + if (plan === 'revshare') return 'revshare'; + return getMode(); +} + +/** Stripe price IDs for a given plan. Returns the line items to pass + * to Stripe Checkout. Enterprise returns null — those tenants don't + * get a Stripe subscription (sales-led billing). */ +export function priceIdsForPlan(plan: BillingPlan): Array<{ price: string; quantity?: number }> | null { + if (plan === 'enterprise') return null; + if (plan === 'flex') { + const base = process.env.STRIPE_FLAT_PRICE_ID; + const usage = process.env.STRIPE_FLAT_USAGE_PRICE_ID; + if (!base) throw new Error('STRIPE_FLAT_PRICE_ID not configured'); + const items: Array<{ price: string; quantity?: number }> = [{ price: base, quantity: 1 }]; + if (usage) items.push({ price: usage }); + return items; + } + // revshare: metered only. + const usage = process.env.STRIPE_REVSHARE_USAGE_PRICE_ID; + if (!usage) throw new Error('STRIPE_REVSHARE_USAGE_PRICE_ID not configured'); + return [{ price: usage }]; +} + +/** Trial end timestamp `TRIAL_DAYS` days from now, normalized to the + * start of the day so renewal dates align cleanly across timezones. */ +export function newTrialEnd(): Date { + const d = new Date(); + d.setUTCHours(0, 0, 0, 0); + d.setUTCDate(d.getUTCDate() + TRIAL_DAYS); + return d; +} diff --git a/apps/api/src/brand-name.ts b/apps/api/src/brand-name.ts new file mode 100644 index 0000000..ff35e8f --- /dev/null +++ b/apps/api/src/brand-name.ts @@ -0,0 +1,19 @@ +/** + * Resolve the user-facing brand name for a tenant. + * + * Priority: + * 1. program_settings.programName from Config (admin-set in Settings) + * 2. Tenant.displayName (set at signup; always populated for hosted) + * 3. null (callers fall back to a generic phrase) + */ + +import type { Knex } from 'knex'; +import { TABLES, type ConfigRow, type TenantRow } from '@openpartner/db'; + +export async function resolveBrandName(db: Knex, tenantId: string): Promise { + const row = await db(TABLES.Config).where({ tenantId, key: 'program_settings' }).first(); + const value = (row?.value ?? {}) as { programName?: string }; + if (value.programName && value.programName.trim()) return value.programName.trim(); + const tenant = await db(TABLES.Tenant).where({ id: tenantId }).first(); + return tenant?.displayName ?? null; +} diff --git a/apps/api/src/campaign-end-notifications.ts b/apps/api/src/campaign-end-notifications.ts new file mode 100644 index 0000000..1983764 --- /dev/null +++ b/apps/api/src/campaign-end-notifications.ts @@ -0,0 +1,155 @@ +/** + * Daily sweep that finds campaigns ending in ~7 days and emails the + * brand admin + every partner that has at least one Link in the + * campaign. Stamps Campaign.endNotificationSentAt to make it + * idempotent across deploys / re-runs. + * + * "About 7 days out" is a 24h window: endsAt between (now+6.5d) and + * (now+7.5d). Scheduler fires daily at 09:00 UTC; the half-day padding + * means a clock skew or a missed run still catches everything. + * + * If the admin extends endsAt past 7.5 days from now we clear the + * flag so a fresh reminder fires before the new end date. + */ + +import { TABLES, type AdminRow, type CampaignRow, type PartnerRow, type TenantRow } from '@openpartner/db'; +import { db, appDb } from './db.js'; +import { getMailer } from './mailer.js'; +import { campaignEndingBrandEmail, campaignEndingPartnerEmail } from './email-templates.js'; +import { resolveBrandName } from './brand-name.js'; + +const ONE_DAY_MS = 24 * 60 * 60 * 1000; + +interface SweepResult { + notified: number; + cleared: number; + errors: number; +} + +export async function sweepCampaignEndNotifications(): Promise { + const now = new Date(); + const windowStart = new Date(now.getTime() + 6.5 * ONE_DAY_MS); + const windowEnd = new Date(now.getTime() + 7.5 * ONE_DAY_MS); + + // Reset endNotificationSentAt on campaigns whose endsAt got pushed + // past the 7.5-day window — admin extended, so a future reminder + // should fire when the new end date approaches. + const { rowCount: clearedCount } = (await db.raw( + `update "Campaign" + set "endNotificationSentAt" = null + where "endNotificationSentAt" is not null + and "endsAt" is not null + and "endsAt" > ?`, + [windowEnd], + )) as { rowCount: number }; + const cleared = Number(clearedCount ?? 0); + + // Now find campaigns ripe for the reminder. + const due = await db(TABLES.Campaign) + .whereNotNull('endsAt') + .andWhere('endsAt', '>=', windowStart) + .andWhere('endsAt', '<', windowEnd) + .whereNull('endNotificationSentAt') + .select('*'); + + let notified = 0; + let errors = 0; + + for (const campaign of due) { + try { + await notifyOne(campaign); + // Stamp via privileged db (cross-tenant context here in scheduler). + await db(TABLES.Campaign) + .where({ id: campaign.id }) + .update({ endNotificationSentAt: new Date() }); + notified += 1; + } catch (err) { + console.error('[end-notify] campaign failed', { id: campaign.id, err }); + errors += 1; + } + } + + return { notified, cleared, errors }; +} + +async function notifyOne(campaign: CampaignRow): Promise { + if (!campaign.endsAt) return; + + // Need a tenant-scoped trx for mailer.send (it reads tenant mail + // settings + brand name via the privileged-but-tenant-aware path). + // The scheduler runs cross-tenant so we open one per campaign. + const trx = await appDb.transaction(); + try { + await trx.raw(`set local app.tenant_id = '${campaign.tenantId.replace(/'/g, "''")}'`); + + const tenant = await trx(TABLES.Tenant).where({ id: campaign.tenantId }).first(); + const brandName = await resolveBrandName(trx, campaign.tenantId); + + // Brand admin: pick the oldest activated admin to avoid spamming + // every co-admin. They can forward internally if needed. + const admin = await trx(TABLES.Admin) + .where({ tenantId: campaign.tenantId }) + .whereNotNull('activatedAt') + .whereNull('revokedAt') + .orderBy('activatedAt', 'asc') + .first(); + + const portalBase = (process.env.PORTAL_URL ?? '').replace(/\/$/, ''); + const tenantSlug = tenant?.slug ?? ''; + const tenantBase = portalBase && tenantSlug ? `${portalBase}/t/${tenantSlug}` : ''; + const manageUrl = tenantBase ? `${tenantBase}/admin/campaigns` : ''; + + if (admin && manageUrl) { + const tmpl = campaignEndingBrandEmail(brandName, campaign.name, campaign.endsAt, manageUrl); + await getMailer().send({ db: trx, tenantId: campaign.tenantId }, { + to: admin.email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'campaign_ending_brand', + metadata: { campaignId: campaign.id }, + }); + } + + // Partners with at least one Link in this campaign. Joining + // through Link rather than PartnerCampaign on purpose — only + // partners who actually promoted should get the email; passive + // grants without any posted Link are noise. + const linkPartners = (await trx + .from(TABLES.Link) + .join(TABLES.Partner, `${TABLES.Partner}.id`, `${TABLES.Link}.partnerId`) + .where(`${TABLES.Link}.campaignId`, campaign.id) + .whereNull(`${TABLES.Partner}.revokedAt`) + .whereNotNull(`${TABLES.Partner}.activatedAt`) + .distinct( + `${TABLES.Partner}.id`, + `${TABLES.Partner}.email`, + `${TABLES.Partner}.name`, + )) as Array>; + + const programUrl = tenantBase ? `${tenantBase}/links` : ''; + for (const p of linkPartners) { + try { + const tmpl = campaignEndingPartnerEmail(p.name, brandName, campaign.name, campaign.endsAt, programUrl); + await getMailer().send({ db: trx, tenantId: campaign.tenantId }, { + to: p.email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'campaign_ending_partner', + metadata: { campaignId: campaign.id, partnerId: p.id }, + }); + } catch (err) { + // One partner's mail failure shouldn't block the rest. Log, + // continue. Stamping endNotificationSentAt at the campaign + // level still moves on — we don't re-try individual partners. + console.error('[end-notify] partner mail failed', { campaignId: campaign.id, partnerId: p.id, err }); + } + } + + await trx.commit(); + } catch (err) { + await trx.rollback().catch(() => {}); + throw err; + } +} diff --git a/apps/api/src/campaign-lifecycle.ts b/apps/api/src/campaign-lifecycle.ts new file mode 100644 index 0000000..1a4dc0e --- /dev/null +++ b/apps/api/src/campaign-lifecycle.ts @@ -0,0 +1,48 @@ +/** + * Campaign lifecycle status — computed at read time from + * (startsAt, endsAt). Single source of truth for every gate: + * - Link create + * - Application apply / approve + * - Offering visibility on the Network + * - Commission attribution at event time + * + * Time-bound semantics: + * - status pertains to NOW (or `at` if passed) — does the campaign + * accept new activity? + * - commissionEarnable(at) gates per-event accrual: an event timed + * after endsAt doesn't earn even if the campaign is still + * accepting writes (clock drift, retroactive imports, etc.) + */ + +export type CampaignStatus = 'scheduled' | 'active' | 'ended'; + +interface LifecycleFields { + startsAt: Date | string | null; + endsAt: Date | string | null; +} + +function asDate(d: Date | string | null): Date | null { + if (!d) return null; + return d instanceof Date ? d : new Date(d); +} + +export function campaignStatus(c: LifecycleFields, at: Date = new Date()): CampaignStatus { + const start = asDate(c.startsAt); + const end = asDate(c.endsAt); + if (start && at < start) return 'scheduled'; + if (end && at >= end) return 'ended'; + return 'active'; +} + +/** True if the campaign currently accepts new partner applications, + * new Link creation, and surfaces in creator discovery. */ +export function campaignAcceptsNewActivity(c: LifecycleFields, at?: Date): boolean { + return campaignStatus(c, at) === 'active'; +} + +/** True if a commission may accrue against this campaign for an event + * dated `at`. False once endsAt has passed. Lets existing Links keep + * redirecting (URLs intact) while quietly stopping earnings. */ +export function commissionEarnable(c: LifecycleFields, at: Date): boolean { + return campaignStatus(c, at) !== 'ended'; +} diff --git a/apps/api/src/config.ts b/apps/api/src/config.ts index 210e2c2..1636b45 100644 --- a/apps/api/src/config.ts +++ b/apps/api/src/config.ts @@ -1,20 +1,33 @@ +import type { Knex } from 'knex'; import { TABLES, type ConfigRow } from '@openpartner/db'; -import { db } from './db.js'; -export async function getConfig(key: string): Promise { - const row = await db(TABLES.Config).where({ key }).first(); +export async function getConfig(db: Knex, tenantId: string, key: string): Promise { + const row = await db(TABLES.Config).where({ tenantId, key }).first(); return row ? (row.value as T) : null; } -export async function setConfig(key: string, value: T): Promise { +export async function setConfig(db: Knex, tenantId: string, key: string, value: T): Promise { + // Config.value is jsonb. Knex/pg auto-serialize objects, but a plain + // primitive (string, number, boolean) is sent as raw text and rejected + // because "foo" is not valid JSON without quotes. Stringify + cast so + // every value type round-trips correctly. + const json = JSON.stringify(value); + const jsonbValue = db.raw('?::jsonb', [json]); await db(TABLES.Config) - .insert({ key, value: value as unknown as object, updatedAt: new Date() }) - .onConflict('key') - .merge({ value: value as unknown as object, updatedAt: new Date() }); + .insert({ tenantId, key, value: jsonbValue as unknown as object, updatedAt: new Date() }) + .onConflict(['tenantId', 'key']) + .merge({ value: jsonbValue as unknown as object, updatedAt: new Date() }); } // Known config keys — centralized so we don't stringly-type across the codebase. export const CONFIG_KEYS = { StripeMerchantCustomerId: 'stripe.merchant.customerId', StripeMerchantSubscriptionId: 'stripe.merchant.subscriptionId', + // High-water mark for usage reporting. Stored as ISO string. The next + // run aggregates Events with ts > this value, then advances the mark. + LastUsageReportedAt: 'stripe.merchant.lastUsageReportedAt', + // High-water mark for Network-originated payout reporting (separate + // from usage reporting because the cadence + trigger are different + // — payouts are weekly, usage is daily). + LastNetworkPayoutsReportedAt: 'network.lastPayoutsReportedAt', } as const; diff --git a/apps/api/src/crypto.ts b/apps/api/src/crypto.ts new file mode 100644 index 0000000..a5f05de --- /dev/null +++ b/apps/api/src/crypto.ts @@ -0,0 +1,65 @@ +/** + * AES-256-GCM envelope encryption for secrets stored in Config. + * + * One master key — SECRETS_ENCRYPTION_KEY env, 32 raw bytes as hex + * (64 chars) or base64 (44 chars). Required in production; dev falls + * back to a fixed dev-only key and logs a warning, which means + * encrypted values don't survive a rotation but local development + * doesn't need to care. + * + * Envelope: 12-byte IV || 16-byte auth tag || ciphertext, base64 encoded. + * The tag catches tampering so bad-faith DB writes can't silently return + * different plaintext than what was stored. + */ + +import { createCipheriv, createDecipheriv, randomBytes } from 'node:crypto'; + +const ALG = 'aes-256-gcm'; +const IV_LEN = 12; + +let cachedKey: Buffer | null = null; + +function masterKey(): Buffer { + if (cachedKey) return cachedKey; + const raw = process.env.SECRETS_ENCRYPTION_KEY; + if (!raw) { + if (process.env.NODE_ENV === 'production') { + throw new Error('SECRETS_ENCRYPTION_KEY is required in production'); + } + // Dev fallback — deterministic so local restarts keep decrypting + // previously-encrypted rows, but obviously not safe for prod. + console.warn('[crypto] SECRETS_ENCRYPTION_KEY not set — using dev-only fallback. DO NOT USE IN PROD.'); + cachedKey = Buffer.alloc(32, 0x42); + return cachedKey; + } + const buf = raw.length === 64 ? Buffer.from(raw, 'hex') : Buffer.from(raw, 'base64'); + if (buf.length !== 32) throw new Error('SECRETS_ENCRYPTION_KEY must decode to exactly 32 bytes'); + cachedKey = buf; + return buf; +} + +export function encryptSecret(plaintext: string): string { + const iv = randomBytes(IV_LEN); + const cipher = createCipheriv(ALG, masterKey(), iv); + const ct = Buffer.concat([cipher.update(plaintext, 'utf8'), cipher.final()]); + const tag = cipher.getAuthTag(); + return Buffer.concat([iv, tag, ct]).toString('base64'); +} + +export function decryptSecret(envelope: string): string { + const buf = Buffer.from(envelope, 'base64'); + const iv = buf.subarray(0, IV_LEN); + const tag = buf.subarray(IV_LEN, IV_LEN + 16); + const ct = buf.subarray(IV_LEN + 16); + // authTagLength: 16 is required to refuse short tags on decrypt. Without + // it Node accepts any tag 4–16 bytes, which weakens GCM forgery resistance + // (Semgrep gcm-no-tag-length). + const decipher = createDecipheriv(ALG, masterKey(), iv, { authTagLength: 16 }); + decipher.setAuthTag(tag); + return Buffer.concat([decipher.update(ct), decipher.final()]).toString('utf8'); +} + +/** For tests. Resets the cached master key so a test can set env + re-encrypt. */ +export function __resetCryptoKeyForTests(): void { + cachedKey = null; +} diff --git a/apps/api/src/db.ts b/apps/api/src/db.ts index 74c6ff9..8ed940e 100644 --- a/apps/api/src/db.ts +++ b/apps/api/src/db.ts @@ -1,4 +1,73 @@ +/** + * Two database connections: + * + * db (admin) — connection used for migrations + cross-tenant operations + * (signup, platform-admin tooling). Uses DATABASE_URL. + * Bypasses RLS (it's a superuser/owner role). + * + * appDb — connection used for normal tenant-scoped requests. Uses + * DATABASE_URL_APP if set, otherwise falls back to + * DATABASE_URL. When set to the openpartner_app role, + * every query is subject to RLS — the per-request + * transaction (see tenancy.ts) sets `app.tenant_id` to + * scope rows correctly. + * + * Self-hosters can leave DATABASE_URL_APP unset. RLS is then bypassed + * (the app runs as the same role as migrations) but app-level tenantId + * filtering still applies, so isolation is preserved at the query layer. + * For real defense-in-depth, set DATABASE_URL_APP to the openpartner_app + * role connection string. + */ import './env.js'; import { createDb } from '@openpartner/db'; -export const db = createDb({ connectionString: process.env.DATABASE_URL! }); +const adminUrl = process.env.DATABASE_URL; +const rawAppUrl = process.env.DATABASE_URL_APP ?? adminUrl; + +if (!adminUrl) { + throw new Error('DATABASE_URL must be set'); +} + +// If the admin URL specifies sslmode but the app URL doesn't, carry it +// across. Both URLs target the same managed cluster (the app role lives +// alongside the admin role), so SSL requirements are identical — making +// operators set sslmode in two places is just a footgun. DO Managed +// Postgres rejects unencrypted connections, so a missing sslmode on the +// app URL surfaces as a pg_hba "no encryption" error at request time. +function inheritSslMode(appUrl: string, adminUrl: string): string { + if (/[?&]sslmode=/i.test(appUrl)) return appUrl; + const adminMode = adminUrl.match(/[?&](sslmode=[^&]+)/i); + if (!adminMode) return appUrl; + const sep = appUrl.includes('?') ? '&' : '?'; + return `${appUrl}${sep}${adminMode[1]}`; +} + +const appUrl = inheritSslMode(rawAppUrl!, adminUrl); + +/** + * Privileged knex instance. Used by: + * - migrations (via the migrate.ts script, separately) + * - signup flow (creates Tenant rows; the request has no tenantId yet) + * - platform-admin tooling + * - background jobs that genuinely need cross-tenant access + * - stripe webhook tenant resolution (looks up by partnerId/payoutId + * across tenants before opening a per-tenant trx for processing) + * - the in-process scheduler enumerating active tenants + * - /metrics scrape (platform-wide counts) + * + * `bypassRls: true` sets `row_security = off` on every pooled connection + * so cross-tenant queries actually return rows. Without this, FORCE RLS + * would silently zero out every query on this pool — even for the table + * owner. The role used here must be the table owner or have BYPASSRLS. + * + * Day-to-day API request handling should use req.db (the transaction-bound + * appDb instance) instead, so RLS is the second line of defense. + */ +export const db = createDb({ connectionString: adminUrl, bypassRls: true }); + +/** + * Per-tenant pool. Tenant scope is set on each transaction via SET LOCAL + * app.tenant_id; see tenancy.ts withTenantTransaction. RLS is *not* + * bypassed on this pool — that's the whole point. + */ +export const appDb = createDb({ connectionString: appUrl! }); diff --git a/apps/api/src/email-templates.ts b/apps/api/src/email-templates.ts index 70e866f..8295fdf 100644 --- a/apps/api/src/email-templates.ts +++ b/apps/api/src/email-templates.ts @@ -1,150 +1,193 @@ /** - * HTML + plain-text templates for auth emails. - * - * Kept deliberately small: inline CSS only, safe fonts, single CTA - * button. Plaintext fallback carries the same link for clients that - * block HTML. Preheader text primes the inbox preview so the user sees - * "Sign in to OpenPartner" before they open the message. + * Minimal email templates for partner invite + signin. Plain-text body + * is always authoritative — the HTML body is a simple wrapper so it + * renders cleanly in Gmail / Outlook without requiring MJML or a + * templating engine. */ -export interface MagicEmail { +export function buildMagicLinkUrl(token: string, tenantSlug?: string | null): string { + const base = (process.env.PORTAL_URL ?? 'http://localhost:5673').replace(/\/$/, ''); + // In multi-tenant mode the SPA's auth endpoints live under /t//. + // Pass the slug so the link drops the recipient on the right tenant + // path; the SPA's api client uses the URL prefix to scope its calls. + const tenantPath = tenantSlug ? `/t/${tenantSlug}` : ''; + return `${base}${tenantPath}/auth/magic?token=${encodeURIComponent(token)}`; +} + +export interface EmailTemplate { subject: string; text: string; html: string; - tag: string; } -interface BuildParams { - headline: string; - preheader: string; - intro: string; - buttonLabel: string; - url: string; - note?: string; - tag: string; - subject: string; +export function partnerInviteEmail(name: string, link: string, brandName: string | null = null): EmailTemplate { + const brand = brandName || 'the partner program'; + const subject = brandName ? `You're invited to ${brandName}'s partner program` : `You're invited to the partner program`; + const text = [ + `Hi ${name},`, + ``, + `You've been invited to join ${brand}. Click the link below to accept`, + `and set up your dashboard:`, + ``, + link, + ``, + `This link is good for 15 minutes.`, + ].join('\n'); + return { subject, text, html: wrap(text, link, 'Accept invite') }; } -function esc(s: string): string { - return s - .replaceAll('&', '&') - .replaceAll('<', '<') - .replaceAll('>', '>') - .replaceAll('"', '"') - .replaceAll("'", '''); +export function partnerSigninEmail(name: string, link: string, brandName: string | null = null): EmailTemplate { + const subject = brandName ? `Sign in to ${brandName}` : `Your partner dashboard sign-in link`; + const text = [ + `Hi ${name},`, + ``, + brandName + ? `Click the link below to sign in to ${brandName}:` + : `Click the link below to sign in to your partner dashboard:`, + ``, + link, + ``, + `This link is good for 15 minutes. If you didn't ask for it, ignore this email.`, + ].join('\n'); + return { subject, text, html: wrap(text, link, 'Sign in') }; } -function build(params: BuildParams): MagicEmail { - const { headline, preheader, intro, buttonLabel, url, note, tag, subject } = params; +export function adminInviteEmail(name: string, link: string, programName: string | null): EmailTemplate { + const brand = programName || 'your partner program'; + const subject = `You've been invited to administer ${brand}`; + const text = [ + `Hi ${name},`, + ``, + `You've been invited as an administrator for ${brand}. Click the link below`, + `to accept the invitation and sign in:`, + ``, + link, + ``, + `This link is good for 15 minutes.`, + ].join('\n'); + return { subject, text, html: wrap(text, link, 'Accept invite') }; +} - const text = - `${headline}\n\n${intro}\n\n${buttonLabel}: ${url}\n\n` + - `This link expires in 15 minutes. If you didn't request it, ignore this email.` + - (note ? `\n\n${note}` : ''); +export function adminSigninEmail(name: string, link: string): EmailTemplate { + const subject = `Your admin dashboard sign-in link`; + const text = [ + `Hi ${name},`, + ``, + `Click the link below to sign in:`, + ``, + link, + ``, + `This link is good for 15 minutes. If you didn't ask for it, ignore this email.`, + ].join('\n'); + return { subject, text, html: wrap(text, link, 'Sign in') }; +} - const html = ` +export function partnerRevokedEmail(name: string, reason: string | null): EmailTemplate { + const subject = `Your partner account has been suspended`; + const text = [ + `Hi ${name},`, + ``, + `Your partner account has been suspended by the program administrator.`, + ...(reason ? [``, `Reason: ${reason}`] : []), + ``, + `If you believe this was done in error, please contact the administrator of the partner program directly.`, + ].join('\n'); + // No CTA button on this template — there's nowhere for the partner + // to go. Plain-text-in-HTML is fine. + const html = ` - - -${esc(subject)} - - - ${esc(preheader)} - - - +
- - - - - - - - - - - - - - - - - ${note ? `` : ''} - - - + +
- - - - - -
OOpenPartner
-
${esc(headline)}
${esc(intro)}
- ${esc(buttonLabel)} -
- This link expires in 15 minutes. If you didn't request it, ignore this email. -
${esc(note)}
- Or paste this URL into your browser:
- ${esc(url)} -
+ - -
+ +
+ ${text + .split('\n') + .map((l) => l.replace(//g, '>')) + .join('
')} +
-
- +
+ `; - - return { subject, text, html, tag }; + return { subject, text, html }; } -export function creatorSignupEmail(name: string, url: string): MagicEmail { - return build({ - subject: 'Finish your OpenPartner signup', - preheader: `Hi ${name} — one click to finish creating your OpenPartner account.`, - headline: `Hi ${name}, one click to finish.`, - intro: - 'Click the button below to verify your email and finish setting up your OpenPartner creator account. Once verified, you can browse offerings and apply to promote any product.', - buttonLabel: 'Finish signup', - url, - tag: 'creator_signup', - }); +/** Brand admin: 7-day notice that a Campaign is about to end. */ +export function campaignEndingBrandEmail( + brandName: string | null, + campaignName: string, + endsAt: Date, + manageUrl: string, +): EmailTemplate { + const dateStr = endsAt.toUTCString().replace(/ \d\d:\d\d:\d\d GMT$/, ''); + const subject = `"${campaignName}" ends in 7 days`; + const text = [ + `Hi${brandName ? ` from ${brandName}` : ''},`, + ``, + `Your campaign "${campaignName}" is scheduled to end ${dateStr}.`, + ``, + `Existing share-links will keep redirecting after that date — your`, + `partners' posted content stays alive — but no new commissions will`, + `accrue on conversions dated past the end.`, + ``, + `If you'd like to keep it running, edit the end date in your admin:`, + ``, + manageUrl, + ].join('\n'); + return { subject, text, html: wrap(text, manageUrl, 'Manage campaign') }; } -export function creatorSigninEmail(url: string): MagicEmail { - return build({ - subject: 'Your OpenPartner sign-in link', - preheader: 'One click to sign in to OpenPartner.', - headline: 'Sign in to OpenPartner', - intro: 'Click the button below to sign in.', - buttonLabel: 'Sign in', - url, - tag: 'creator_signin', - }); +/** Partner: 7-day notice that a Campaign they have at least one Link + * in is about to end. */ +export function campaignEndingPartnerEmail( + partnerName: string, + brandName: string | null, + campaignName: string, + endsAt: Date, + programUrl: string, +): EmailTemplate { + const dateStr = endsAt.toUTCString().replace(/ \d\d:\d\d:\d\d GMT$/, ''); + const fromBrand = brandName ?? 'the brand'; + const subject = `Heads up — "${campaignName}" ends in 7 days`; + const text = [ + `Hi ${partnerName},`, + ``, + `The "${campaignName}" program from ${fromBrand} ends ${dateStr}.`, + ``, + `Your existing share-links keep working past that date, but clicks`, + `that come in afterward won't earn commission. Worth squeezing in any`, + `last promotion before then.`, + ``, + programUrl, + ].join('\n'); + return { subject, text, html: wrap(text, programUrl, 'View program') }; } -export function vendorSignupEmail(name: string, url: string): MagicEmail { - return build({ - subject: 'Finish your OpenPartner vendor signup', - preheader: `Verify your email to submit ${name} for Network review.`, - headline: `Welcome, ${name}.`, - intro: - 'Click the button below to verify your email and submit your vendor application. An admin reviews your federation credentials and activates your account — usually within a day.', - buttonLabel: 'Verify email', - url, - tag: 'vendor_signup', - note: - "After your account is active, creators on the Network will be able to discover and apply to promote your offerings. You'll receive their applications in your portal inbox.", - }); -} - -export function vendorSigninEmail(url: string): MagicEmail { - return build({ - subject: 'Your OpenPartner sign-in link', - preheader: 'One click to sign in to OpenPartner.', - headline: 'Sign in to OpenPartner', - intro: 'Click the button below to sign in.', - buttonLabel: 'Sign in', - url, - tag: 'vendor_signin', - }); +function wrap(text: string, cta: string, ctaLabel: string): string { + const escaped = text + .split('\n') + .map((line) => + line.startsWith('http') + ? `${cta}` + : line.replace(//g, '>'), + ) + .join('
'); + return ` + + + + +
+ + +
+ ${escaped} +

+ ${ctaLabel} +
+
+ +`; } diff --git a/apps/api/src/export.ts b/apps/api/src/export.ts index a6e70a2..323617e 100644 --- a/apps/api/src/export.ts +++ b/apps/api/src/export.ts @@ -92,8 +92,17 @@ export interface ImportReport { * Re-import a bundle. We use onConflict(pk).ignore so the operation is * idempotent: running the same export twice won't create duplicates, and * partial-then-resumed imports work. + * + * Multi-tenant: every row's tenantId is rewritten to the importing tenant. + * This is what lets a hosted-tier export ("acme" tenantId throughout) + * round-trip into a self-host installation (default tenantId), satisfying + * the data-portability commitment in CLAUDE.md. */ -export async function importBundle(db: Knex, bundle: ImportBundle): Promise { +export async function importBundle( + db: Knex, + tenantId: string, + bundle: ImportBundle, +): Promise { const inserted: Record = {}; const skipped: Record = {}; @@ -101,7 +110,10 @@ export async function importBundle(db: Knex, bundle: ImportBundle): Promise normalizeRow(r as Record)); + const normalized = rows.map((r) => ({ + ...normalizeRow(r as Record), + tenantId, + })); const result = await db(table) .insert(normalized) diff --git a/apps/api/src/mail-settings.ts b/apps/api/src/mail-settings.ts new file mode 100644 index 0000000..efc8c73 --- /dev/null +++ b/apps/api/src/mail-settings.ts @@ -0,0 +1,252 @@ +/** + * Mail transport configuration, stored in Config under 'mail_settings'. + * + * SMTP password and Postmark server token are AES-GCM encrypted at rest + * using SECRETS_ENCRYPTION_KEY. Everything else (host, port, from, + * username) is stored plaintext — they're identifiers, not secrets. + * + * Resolution order at dispatch time: + * 1. UI-configured settings for the calling tenant (if any, and not partially blank) + * 2. Env vars (SMTP_HOST / POSTMARK_SERVER_TOKEN + MAIL_FROM) + * 3. Console fallback + * + * The UI always wins over env, so an admin editing from the portal can + * rotate creds without a redeploy. Hosted deployments that want to + * force env can simply leave the UI empty. + * + * Multi-tenant: every read/write is scoped to a tenantId. Each tenant has + * its own (encrypted) UI mail config; env fallback is shared platform-wide. + */ + +import type { Knex } from 'knex'; +import { TABLES, type ConfigRow } from '@openpartner/db'; +import { decryptSecret, encryptSecret } from './crypto.js'; + +export type MailTransportKind = 'smtp' | 'postmark' | 'none'; + +export interface SmtpConfig { + host: string; + port: number; + secure: boolean; + user: string | null; + /** Decrypted plaintext. Never serialize this to JSON clients. */ + password: string | null; +} + +export interface PostmarkConfig { + serverToken: string; + messageStream: string; +} + +export interface ResolvedMailConfig { + kind: MailTransportKind; + from: string | null; + smtp?: SmtpConfig; + postmark?: PostmarkConfig; + source: 'ui' | 'env' | 'none'; +} + +// ---------- on-disk shapes ---------- + +interface StoredMailSettings { + kind: MailTransportKind | null; + from: string | null; + smtp?: { + host: string; + port: number; + secure: boolean; + user: string | null; + passwordCiphertext: string | null; + } | null; + postmark?: { + serverTokenCiphertext: string; + messageStream: string; + } | null; +} + +const CONFIG_KEY = 'mail_settings'; + +// ---------- read ---------- + +async function readStored(db: Knex, tenantId: string): Promise { + const row = await db(TABLES.Config).where({ tenantId, key: CONFIG_KEY }).first(); + return (row?.value as StoredMailSettings | undefined) ?? null; +} + +/** Sanitized payload for the admin UI — no ciphertexts, no plaintext secrets. */ +export interface PublicMailSettings { + kind: MailTransportKind | null; + from: string | null; + smtp: { host: string; port: number; secure: boolean; user: string | null; hasPassword: boolean } | null; + postmark: { hasToken: boolean; messageStream: string } | null; +} + +export async function getPublicMailSettings(db: Knex, tenantId: string): Promise { + const stored = await readStored(db, tenantId); + if (!stored) return { kind: null, from: null, smtp: null, postmark: null }; + return { + kind: stored.kind, + from: stored.from, + smtp: stored.smtp + ? { + host: stored.smtp.host, + port: stored.smtp.port, + secure: stored.smtp.secure, + user: stored.smtp.user, + hasPassword: !!stored.smtp.passwordCiphertext, + } + : null, + postmark: stored.postmark + ? { hasToken: !!stored.postmark.serverTokenCiphertext, messageStream: stored.postmark.messageStream } + : null, + }; +} + +/** Decrypted + usable by the mailer. Never hand to an HTTP client. */ +export async function resolveMailConfig(db: Knex, tenantId: string): Promise { + const stored = await readStored(db, tenantId); + if (stored?.kind === 'smtp' && stored.from && stored.smtp) { + return { + kind: 'smtp', + from: stored.from, + smtp: { + host: stored.smtp.host, + port: stored.smtp.port, + secure: stored.smtp.secure, + user: stored.smtp.user, + password: stored.smtp.passwordCiphertext ? decryptSecret(stored.smtp.passwordCiphertext) : null, + }, + source: 'ui', + }; + } + if (stored?.kind === 'postmark' && stored.from && stored.postmark) { + return { + kind: 'postmark', + from: stored.from, + postmark: { + serverToken: decryptSecret(stored.postmark.serverTokenCiphertext), + messageStream: stored.postmark.messageStream, + }, + source: 'ui', + }; + } + + // Fallback to env. + const from = process.env.MAIL_FROM ?? null; + const smtpHost = process.env.SMTP_HOST; + const postmarkToken = process.env.POSTMARK_SERVER_TOKEN; + if (smtpHost && from) { + return { + kind: 'smtp', + from, + smtp: { + host: smtpHost, + port: Number(process.env.SMTP_PORT ?? 587), + secure: process.env.SMTP_SECURE === '1' || process.env.SMTP_PORT === '465', + user: process.env.SMTP_USER ?? null, + password: process.env.SMTP_PASSWORD ?? null, + }, + source: 'env', + }; + } + if (postmarkToken && from) { + return { + kind: 'postmark', + from, + postmark: { + serverToken: postmarkToken, + messageStream: process.env.POSTMARK_MESSAGE_STREAM ?? 'outbound', + }, + source: 'env', + }; + } + return { kind: 'none', from: null, source: 'none' }; +} + +// ---------- write ---------- + +/** + * Write an input shape from the UI. Fields left blank / null mean + * "keep the existing stored value" for secrets, "clear" for everything + * else. This lets an admin rotate only the password without re-entering + * it. + */ +export interface MailSettingsInput { + kind: MailTransportKind | null; + from?: string | null; + smtp?: { + host?: string | null; + port?: number | null; + secure?: boolean | null; + user?: string | null; + /** New plaintext password. Leave undefined to keep existing. */ + password?: string | null; + } | null; + postmark?: { + /** New plaintext token. Leave undefined to keep existing. */ + serverToken?: string | null; + messageStream?: string | null; + } | null; +} + +export class MailSettingsValidationError extends Error { + constructor(public readonly code: string, public readonly field?: string) { + super(code); + } +} + +export async function saveMailSettings( + db: Knex, + tenantId: string, + input: MailSettingsInput, +): Promise { + const current = (await readStored(db, tenantId)) ?? ({ kind: null, from: null } as StoredMailSettings); + + const next: StoredMailSettings = { + kind: input.kind ?? current.kind, + from: input.from !== undefined ? (input.from || null) : current.from, + }; + + if (next.kind === 'smtp') { + const prevSmtp = current.smtp ?? null; + const s = input.smtp ?? null; + const host = s?.host ?? prevSmtp?.host ?? ''; + // Saving kind='smtp' without a host would succeed silently and + // then blow up at first email. Reject at the boundary. + if (!host.trim()) throw new MailSettingsValidationError('smtp_host_required', 'smtp.host'); + if (!next.from) throw new MailSettingsValidationError('from_required', 'from'); + const rotated = s?.password !== undefined && s.password !== ''; + next.smtp = { + host, + port: s?.port ?? prevSmtp?.port ?? 587, + secure: s?.secure ?? prevSmtp?.secure ?? false, + user: s?.user ?? prevSmtp?.user ?? null, + passwordCiphertext: rotated + ? (s!.password === null ? null : encryptSecret(s!.password as string)) + : prevSmtp?.passwordCiphertext ?? null, + }; + next.postmark = null; + } else if (next.kind === 'postmark') { + const prevPm = current.postmark ?? null; + const p = input.postmark ?? null; + const rotated = p?.serverToken !== undefined && p.serverToken !== ''; + const token = rotated ? encryptSecret(p!.serverToken as string) : prevPm?.serverTokenCiphertext; + if (!token) throw new MailSettingsValidationError('postmark_server_token_required', 'postmark.serverToken'); + if (!next.from) throw new MailSettingsValidationError('from_required', 'from'); + next.postmark = { + serverTokenCiphertext: token, + messageStream: p?.messageStream ?? prevPm?.messageStream ?? 'outbound', + }; + next.smtp = null; + } else { + // kind = 'none' or null — clear creds, keep from if user wants to pre-set it. + next.smtp = null; + next.postmark = null; + } + + const now = new Date(); + await db(TABLES.Config) + .insert({ tenantId, key: CONFIG_KEY, value: next as unknown as never, updatedAt: now }) + .onConflict(['tenantId', 'key']) + .merge({ value: next as unknown as never, updatedAt: now }); +} diff --git a/apps/api/src/mailer.ts b/apps/api/src/mailer.ts index a571cb1..b70b8f3 100644 --- a/apps/api/src/mailer.ts +++ b/apps/api/src/mailer.ts @@ -1,125 +1,168 @@ /** - * Mail delivery. The abstraction stays tiny — a single send() that takes - * { to, subject, text, html?, metadata? }. + * Transactional mailer. Transport comes from resolveMailConfig(): * - * Two implementations: - * DevMailer persists to the DevMessage table. An admin-only - * /dev/mailbox endpoint reads them back so local dev - * and CI can follow magic links without configuring - * a real provider. - * PostmarkMailer POSTs to Postmark's Email API over native fetch - * (no SDK dep). Used in any environment with - * MAIL_TRANSPORT=postmark set. + * UI-configured settings (Config table, per-tenant) take precedence — + * stored encrypted at rest; SMTP password / Postmark token decrypted at + * dispatch time. * - * The factory reads MAIL_TRANSPORT: - * "postmark" → PostmarkMailer (requires POSTMARK_SERVER_TOKEN + MAIL_FROM) - * anything else → DevMailer + * Env fallback if UI is empty (SMTP_HOST / POSTMARK_SERVER_TOKEN + + * MAIL_FROM). Env is shared platform-wide. + * + * Console fallback if neither — dev only; the magic link prints + * to the `pnpm dev:api` terminal. + * + * Mailers are created per-send so a settings change takes effect on + * the next email without a restart. A cached mailer would be a stale- + * creds footgun after rotation. + * + * Multi-tenant: every send takes a `{ db, tenantId }` context so we look + * up the right tenant's UI overrides. Pass through the request's `req.db` + * (transaction with app.tenant_id pinned). + * + * Tests override via __setMailerForTests with an in-memory capturer. */ -import { ulid } from 'ulid'; -import { TABLES, type DevMessageRow } from '@openpartner/db'; -import { db } from './db.js'; +import type { Knex } from 'knex'; +import nodemailer from 'nodemailer'; +import { resolveMailConfig } from './mail-settings.js'; +import { resolveBrandName } from './brand-name.js'; +import { TABLES, type ConfigRow } from '@openpartner/db'; -export interface MailMessage { +export interface Message { to: string; subject: string; text: string; html?: string; - metadata?: Record; - /** - * Opaque tag Postmark stores alongside the message. We use it to - * distinguish creator_signup / creator_signin / vendor_signup / - * vendor_signin in dashboards and searches. - */ tag?: string; + metadata?: Record; +} + +export interface SendContext { + db: Knex; + tenantId: string; } export interface Mailer { - send(msg: MailMessage): Promise; + send(ctx: SendContext, msg: Message): Promise; } -class DevMailer implements Mailer { - async send(msg: MailMessage): Promise { - await db(TABLES.DevMessage).insert({ - id: ulid(), - to: msg.to, - subject: msg.subject, - body: msg.text, - html: msg.html ?? null, - metadata: (msg.metadata ?? {}) as never, - }); - console.log(`[dev-mail] to=${msg.to} subject="${msg.subject}"`); - } +/** Strip and quote pieces of a name for safe use in an RFC 5322 display + * name. Drops control chars and double-quotes; trims to 80 chars. + * The control-char range is intentional — it's exactly what RFC 5322 + * forbids in atom + quoted-string productions. */ +// eslint-disable-next-line no-control-regex +const DISPLAY_NAME_UNSAFE = /[\x00-\x1f"\\]/g; +function safeDisplayName(name: string): string { + return name.replace(DISPLAY_NAME_UNSAFE, '').trim().slice(0, 80); } -class PostmarkMailer implements Mailer { - constructor( - private readonly serverToken: string, - private readonly from: string, - private readonly messageStream: string, - ) {} +/** Pull the bare email address out of `"Name" ` or `addr@host`. */ +function extractAddress(from: string): string { + const m = from.match(/<([^>]+)>/); + return (m?.[1] ?? from).trim(); +} - async send(msg: MailMessage): Promise { - const res = await fetch('https://api.postmarkapp.com/email', { - method: 'POST', - headers: { - 'content-type': 'application/json', - accept: 'application/json', - 'x-postmark-server-token': this.serverToken, - }, - body: JSON.stringify({ - From: this.from, - To: msg.to, - Subject: msg.subject, - TextBody: msg.text, - HtmlBody: msg.html, - MessageStream: this.messageStream, - Tag: msg.tag, - // Metadata values must be strings per Postmark's contract. - Metadata: msg.metadata - ? Object.fromEntries(Object.entries(msg.metadata).map(([k, v]) => [k, String(v)])) - : undefined, - }), - }); +class RoutingMailer implements Mailer { + async send(ctx: SendContext, msg: Message): Promise { + const cfg = await resolveMailConfig(ctx.db, ctx.tenantId); - if (!res.ok) { - const text = await res.text(); - throw new Error(`postmark send failed: ${res.status} ${text.slice(0, 300)}`); + // Brand-aware From + Reply-To when we're using the platform fallback + // (source='env'). Pattern: `"Acme via OpenPartner" `, + // Reply-To = the brand's support email if set. Means hosted brands + // get brand identity in the inbox without configuring email at all. + // For source='ui' (brand wired up their own provider) we trust their + // From and don't second-guess it. + let from = cfg.from ?? null; + let replyTo: string | undefined; + if (cfg.source === 'env' && from) { + const brandName = await resolveBrandName(ctx.db, ctx.tenantId); + if (brandName) { + from = `"${safeDisplayName(brandName)} via OpenPartner" <${extractAddress(from)}>`; + } + const supportRow = await ctx.db(TABLES.Config) + .where({ tenantId: ctx.tenantId, key: 'program_settings' }) + .first(); + const supportEmail = ((supportRow?.value as { supportEmail?: string } | undefined)?.supportEmail ?? '').trim(); + if (supportEmail) { + replyTo = supportEmail; + } else { + // Fallback: oldest active admin on the tenant. Means a partner + // hitting Reply on a sign-in email always lands on a human at + // the brand, even before Settings → Brand info is filled in. + const fallbackAdmin = await ctx.db(TABLES.Admin) + .where({ tenantId: ctx.tenantId }) + .whereNotNull('activatedAt') + .whereNull('revokedAt') + .orderBy('activatedAt', 'asc') + .first(['email']); + if (fallbackAdmin?.email) replyTo = fallbackAdmin.email as string; + } } - // 200 with ErrorCode=0 is the success shape; ErrorCode != 0 is a - // per-message rejection (recipient suppressed, blocked, etc). Both - // are worth knowing about, but only ErrorCode != 0 with a non-zero - // status should throw. Postmark returns ErrorCode=0 on 200. - const body = (await res.json()) as { ErrorCode?: number; Message?: string }; - if (body.ErrorCode && body.ErrorCode !== 0) { - throw new Error(`postmark rejected message: ${body.ErrorCode} ${body.Message ?? ''}`); + if (cfg.kind === 'smtp' && cfg.smtp && from) { + const transporter = nodemailer.createTransport({ + host: cfg.smtp.host, + port: cfg.smtp.port, + secure: cfg.smtp.secure, + auth: + cfg.smtp.user && cfg.smtp.password + ? { user: cfg.smtp.user, pass: cfg.smtp.password } + : undefined, + }); + await transporter.sendMail({ + from, + to: msg.to, + replyTo, + subject: msg.subject, + text: msg.text, + html: msg.html, + headers: msg.tag ? { 'X-Tag': msg.tag } : undefined, + }); + return; + } + if (cfg.kind === 'postmark' && cfg.postmark && from) { + const res = await fetch('https://api.postmarkapp.com/email', { + method: 'POST', + headers: { + 'content-type': 'application/json', + accept: 'application/json', + 'X-Postmark-Server-Token': cfg.postmark.serverToken, + }, + body: JSON.stringify({ + From: from, + To: msg.to, + ReplyTo: replyTo, + Subject: msg.subject, + TextBody: msg.text, + HtmlBody: msg.html, + Tag: msg.tag, + MessageStream: cfg.postmark.messageStream, + Metadata: msg.metadata, + }), + }); + if (!res.ok) { + throw new Error(`postmark ${res.status}: ${await res.text().catch(() => '')}`); + } + const json = (await res.json().catch(() => ({}))) as { ErrorCode?: number; Message?: string }; + if (json.ErrorCode && json.ErrorCode !== 0) { + throw new Error(`postmark error ${json.ErrorCode}: ${json.Message ?? 'unknown'}`); + } + return; } + // Console fallback. Dev only. + console.log(`[mail] to=${msg.to} subject=${JSON.stringify(msg.subject)}`); + console.log(msg.text); } } -let mailerInstance: Mailer | null = null; +let override: Mailer | null = null; +const routing = new RoutingMailer(); export function getMailer(): Mailer { - if (mailerInstance) return mailerInstance; - const transport = process.env.MAIL_TRANSPORT ?? 'dev'; - if (transport === 'postmark') { - const token = process.env.POSTMARK_SERVER_TOKEN; - const from = process.env.MAIL_FROM; - if (!token) throw new Error('MAIL_TRANSPORT=postmark requires POSTMARK_SERVER_TOKEN'); - if (!from) throw new Error('MAIL_TRANSPORT=postmark requires MAIL_FROM'); - const stream = process.env.POSTMARK_MESSAGE_STREAM ?? 'outbound'; - mailerInstance = new PostmarkMailer(token, from, stream); - } else { - mailerInstance = new DevMailer(); - } - return mailerInstance; + return override ?? routing; } -/** - * Reset for tests that want to change env vars between runs. Not used - * in production code paths. - */ -export function __resetMailerForTests(): void { - mailerInstance = null; +/** For tests: inject a capturing / mock mailer. Pass null to reset. */ +export function __setMailerForTests(mailer: Mailer | null): void { + override = mailer; } diff --git a/apps/api/src/middleware/trial-gate.ts b/apps/api/src/middleware/trial-gate.ts new file mode 100644 index 0000000..c37fc22 --- /dev/null +++ b/apps/api/src/middleware/trial-gate.ts @@ -0,0 +1,83 @@ +/** + * Soft trial-gate. + * + * When a tenant's billing state is "trial expired without subscription" + * (paid plan picked, trial used, no current Stripe sub), this middleware + * returns 402 Payment Required on a small allowlist of "expensive" + * write endpoints. The product keeps working — clicks still get + * recorded, attribution still runs, the dashboard still renders, the + * admin can still subscribe — but they can't expand the program until + * billing is restored. + * + * What's gated: + * - POST /campaigns (create new program) + * - POST /partners (invite new partner) + * - POST /partners/:id/coupons (mint coupon) + * - POST /partners/:id/campaigns (grant program to partner) + * - POST /import/partners-csv (bulk roster import) + * - POST /admin/network/offerings (publish on the Network) + * + * What stays open (deliberate): + * - GET * (read; show their data) + * - POST /attribution/identify (SDK callback — customer signed up) + * - POST /attribution/events (SDK callback — revenue happened) + * - POST /coupons/redeem (customer used a coupon at checkout) + * - POST /webhooks/stripe (Stripe → us) + * - POST /billing/* (resubscribe, open portal) + * - POST /signin /signup /admins/login etc. (auth) + * - POST /attribution/* and click-router endpoints + * + * Rationale: don't silently lose attribution data the customer might + * resubscribe to retrieve, and don't make them debug "why does the + * SDK return errors" before they've seen the trial-expired banner. + */ + +import type { NextFunction, Request, Response } from 'express'; +import { tenantOf } from '../tenancy.js'; +import { getTenantBillingState } from '../billing-plan.js'; + +// Methods+path patterns that get the 402. Keep narrow — every entry is +// a pinch point on the user's program-expansion workflow, not a +// catch-all "block everything that mutates". +interface GatedRoute { + method: string; + test: (path: string) => boolean; +} + +const GATED: GatedRoute[] = [ + { method: 'POST', test: (p) => p === '/campaigns' }, + { method: 'POST', test: (p) => p === '/partners' }, + { method: 'POST', test: (p) => /^\/partners\/[^/]+\/coupons$/.test(p) }, + { method: 'POST', test: (p) => /^\/partners\/[^/]+\/campaigns$/.test(p) }, + { method: 'POST', test: (p) => p === '/import/partners-csv' }, + { method: 'POST', test: (p) => p === '/admin/network/offerings' }, +]; + +export async function trialGate(req: Request, res: Response, next: NextFunction): Promise { + // Match before the more expensive billing-state lookup — most + // requests are not gated, and we don't want to add a DB hop to + // every read. + const matched = GATED.some((g) => g.method === req.method && g.test(req.path)); + if (!matched) return next(); + + // Tenant scope is required for the lookup. If the request hasn't + // been through tenantMiddleware (mounted globally before this), we + // can't evaluate — let it through and rely on auth middleware to + // handle it. + let scope: ReturnType | null = null; + try { + scope = tenantOf(req); + } catch { + return next(); + } + + const state = await getTenantBillingState(scope.db, scope.tenantId); + if (!state.trialExpiredWithoutSubscription) return next(); + + res.status(402).json({ + error: 'trial_expired', + detail: + 'Your 14-day trial has ended without an active subscription. Re-subscribe at /admin/billing to restore this action.', + plan: state.plan, + }); +} diff --git a/apps/api/src/network-client.ts b/apps/api/src/network-client.ts new file mode 100644 index 0000000..ac79bec --- /dev/null +++ b/apps/api/src/network-client.ts @@ -0,0 +1,861 @@ +/** + * Vendor-side client for the OpenPartner Network. + * + * The Network is the marketplace coordinator at network.openpartner.dev + * (a separate service in the openpartner-network repo). Vendors push + * partner upserts and revokes to it so a creator joining one vendor's + * program can be matched with other vendors' programs. + * + * Hard rule: a vendor request that succeeds locally must never fail + * because the Network is down. Every push goes through `dispatch()` + * which tries the call once with a 5s timeout and, on failure, + * persists a NetworkOutbox row for the scheduler to drain. + * + * Per-tenant: every call is scoped via `network_membership` Config. + * Tenants without that row treat the Network as disabled and short- + * circuit (the partner mutation path is a no-op for them). + * + * The wire contract is documented in docs/network-protocol.md. If you + * change a payload shape here, update the doc — the Network repo + * builds against it. + */ + +import { ulid } from 'ulid'; +import type { Knex } from 'knex'; +import { TABLES, type NetworkOutboxRow } from '@openpartner/db'; +import { decryptSecret, encryptSecret } from './crypto.js'; + +const CONFIG_KEY = 'network_membership'; +// Outbox / fire-and-forget pushes — short timeout, the scheduler retries. +const PUSH_TIMEOUT_MS = 5_000; +// Synchronous admin proxy calls (/admin/network/*). Network can cold-start +// from idle on App Platform basic-xs, which takes 10-20s, so a 5s budget +// here meant every first-after-idle render of the Network admin page +// failed with 504. Keep it generous; the request blocks the admin UI. +const PROXY_TIMEOUT_MS = 30_000; +const MAX_ATTEMPTS = 8; // exponential backoff: ~24h max wall time + +// ---------- config shape ---------- + +export interface NetworkMembership { + enabled: boolean; + networkUrl: string; // e.g. https://network.openpartner.dev + vendorTokenCiphertext: string; // encrypted bearer for vendor → Network + scopedKeyId: string | null; // ApiKey.id of the scoped key Network calls back with + autoEnroll: boolean; + /** Network's id for our vendor row. Persisted at signup so account + * restore can call /vendors/admin-restore (which needs the id — + * by then the vendorToken is gone). Optional for backward compat + * with rows written before this field existed. */ + vendorId?: string; +} + +interface PublicNetworkMembership { + enabled: boolean; + networkUrl: string; + hasVendorToken: boolean; + scopedKeyId: string | null; + autoEnroll: boolean; +} + +export async function getNetworkMembership( + db: Knex, + tenantId: string, +): Promise { + const row = await db(TABLES.Config).where({ tenantId, key: CONFIG_KEY }).first(); + return (row?.value as NetworkMembership | undefined) ?? null; +} + +export async function getPublicNetworkMembership( + db: Knex, + tenantId: string, +): Promise { + const m = await getNetworkMembership(db, tenantId); + if (!m) { + return { enabled: false, networkUrl: '', hasVendorToken: false, scopedKeyId: null, autoEnroll: false }; + } + return { + enabled: m.enabled, + networkUrl: m.networkUrl, + hasVendorToken: !!m.vendorTokenCiphertext, + scopedKeyId: m.scopedKeyId, + autoEnroll: m.autoEnroll, + }; +} + +export interface SaveNetworkMembershipInput { + enabled?: boolean; + networkUrl?: string; + /** Plaintext token. Undefined = keep existing; '' = clear. */ + vendorToken?: string; + scopedKeyId?: string | null; + autoEnroll?: boolean; + vendorId?: string; +} + +export async function saveNetworkMembership( + db: Knex, + tenantId: string, + input: SaveNetworkMembershipInput, +): Promise { + const current = (await getNetworkMembership(db, tenantId)) ?? { + enabled: false, + networkUrl: '', + vendorTokenCiphertext: '', + scopedKeyId: null, + autoEnroll: false, + }; + + const next: NetworkMembership = { + enabled: input.enabled ?? current.enabled, + networkUrl: input.networkUrl ?? current.networkUrl, + vendorTokenCiphertext: + input.vendorToken === undefined + ? current.vendorTokenCiphertext + : input.vendorToken === '' + ? '' + : encryptSecret(input.vendorToken), + scopedKeyId: input.scopedKeyId === undefined ? current.scopedKeyId : input.scopedKeyId, + autoEnroll: input.autoEnroll ?? current.autoEnroll, + vendorId: input.vendorId ?? current.vendorId, + }; + + const now = new Date(); + await db(TABLES.Config) + .insert({ tenantId, key: CONFIG_KEY, value: next as unknown as never, updatedAt: now }) + .onConflict(['tenantId', 'key']) + .merge({ value: next as unknown as never, updatedAt: now }); +} + +// ---------- payload shapes (must match docs/network-protocol.md) ---------- + +export interface PartnerUpsertPayload { + vendorPartnerId: string; + email: string; + name: string; + profile?: Record; + joinedVendorAt: string; + status: 'pending' | 'active' | 'revoked'; + metadata?: { source: 'self_signup' | 'admin_invite' | 'backfill' }; +} + +export interface PartnerUpsertResponse { + networkCreatorId: string; + alreadyExisted: boolean; + affiliations: Array<{ + vendorId: string; + vendorPartnerId: string; + status: string; + displayName: string; + }>; +} + +// ---------- dispatch ---------- + +/** + * Run an op against the Network. Tries once synchronously with a short + * timeout; on failure persists to NetworkOutbox and returns null. The + * caller MUST treat null as a non-fatal "queued for retry". + * + * Returns the parsed response on success. Specific call sites that + * care about the response (e.g. signup wanting networkCreatorId + * immediately) await this; revoke fires void. + */ +export async function dispatch( + db: Knex, + tenantId: string, + op: NetworkOutboxRow['op'], + payload: Record, +): Promise { + const m = await getNetworkMembership(db, tenantId); + if (!m || !m.enabled || !m.vendorTokenCiphertext) { + return null; // Network not configured for this tenant — silent no-op. + } + + // Same fallback as callNetwork: heal tenants that ended up with an + // empty networkUrl on their membership row (email-verify path before + // /start-connect was called) by reading from process.env.NETWORK_URL. + const networkUrl = m.networkUrl || process.env.NETWORK_URL; + if (!networkUrl) { + console.error('[network] no networkUrl on membership and NETWORK_URL env unset'); + return null; + } + + let token: string; + try { + token = decryptSecret(m.vendorTokenCiphertext); + } catch (err) { + // Bad ciphertext (encryption key rotated without re-storing). Don't + // queue — the outbox would have the same problem on every retry. + console.error('[network] vendor token undecryptable, skipping push', err); + return null; + } + + const url = endpointForOp(networkUrl, op); + try { + const res = await fetch(url, { + method: 'POST', + headers: { + 'content-type': 'application/json', + authorization: `Bearer ${token}`, + 'user-agent': 'OpenPartner-Vendor/1', + }, + body: JSON.stringify(payload), + signal: AbortSignal.timeout(PUSH_TIMEOUT_MS), + }); + if (!res.ok) { + throw new Error(`network ${res.status}: ${await res.text().catch(() => '')}`); + } + return (await res.json().catch(() => null)) as T | null; + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + await enqueue(db, tenantId, op, payload, msg); + return null; + } +} + +async function enqueue( + db: Knex, + tenantId: string, + op: NetworkOutboxRow['op'], + payload: Record, + err: string, +): Promise { + await db(TABLES.NetworkOutbox).insert({ + id: ulid(), + tenantId, + op, + payload: payload as unknown as never, + attempts: 1, + nextAttemptAt: nextBackoff(1), + lastAttemptAt: new Date(), + lastError: err, + status: 'pending', + }); +} + +function nextBackoff(attempts: number): Date { + // 30s, 1m, 2m, 5m, 15m, 1h, 4h, 12h — total ~24h with MAX_ATTEMPTS=8. + const schedule = [30, 60, 120, 300, 900, 3600, 14400, 43200]; + const seconds = schedule[Math.min(attempts - 1, schedule.length - 1)] ?? 43200; + return new Date(Date.now() + seconds * 1000); +} + +function endpointForOp(networkUrl: string, op: NetworkOutboxRow['op']): string { + const base = networkUrl.replace(/\/$/, ''); + switch (op) { + case 'partner_upsert': + case 'partner_revoke': + case 'backfill_partner': + return `${base}/partners/upsert`; + default: + throw new Error(`unknown network op: ${op}`); + } +} + +// ---------- public surface ---------- + +export async function pushPartnerUpsert( + db: Knex, + tenantId: string, + payload: PartnerUpsertPayload, +): Promise { + return dispatch(db, tenantId, 'partner_upsert', payload as unknown as Record); +} + +export async function pushPartnerRevoke( + db: Knex, + tenantId: string, + vendorPartnerId: string, +): Promise { + await dispatch(db, tenantId, 'partner_revoke', { + vendorPartnerId, + status: 'revoked', + revokedAt: new Date().toISOString(), + }); +} + +// ---------- backfill (used by Settings → Network → "Backfill") ---------- + +export interface BackfillPartnersResult { + total: number; + pushed: number; + queued: number; // pushed to outbox after Network failure +} + +export async function backfillPartners( + db: Knex, + tenantId: string, + partners: Array<{ id: string; email: string; name: string; createdAt: Date; activatedAt: Date | null; revokedAt: Date | null }>, +): Promise { + let pushed = 0; + let queued = 0; + for (const p of partners) { + const status: PartnerUpsertPayload['status'] = p.revokedAt + ? 'revoked' + : p.activatedAt + ? 'active' + : 'pending'; + const result = await pushPartnerUpsert(db, tenantId, { + vendorPartnerId: p.id, + email: p.email, + name: p.name, + joinedVendorAt: p.createdAt.toISOString(), + status, + metadata: { source: 'backfill' }, + }); + if (result) { + // Stamp the canonical Network id back onto the Partner row so + // future admin views can show "this creator is on the Network". + await db(TABLES.Partner) + .where({ id: p.id }) + .update({ + metadata: db.raw( + `jsonb_set(coalesce("metadata", '{}'::jsonb), '{network}', ?::jsonb, true)`, + [ + JSON.stringify({ + creatorId: result.networkCreatorId, + preExisting: result.alreadyExisted, + affiliations: result.affiliations.length, + syncedAt: new Date().toISOString(), + }), + ], + ), + updatedAt: new Date(), + }); + pushed += 1; + } else { + queued += 1; + } + } + return { total: partners.length, pushed, queued }; +} + +// ---------- Self-serve onboarding helpers ---------- +// These talk to Network /vendors/signup + /vendors/verify-and-issue-token. +// Unlike the upsert path, failures here surface to the admin immediately +// (no outbox); a failed signup is something the admin will retry by hand. + +export interface SignupInput { + networkUrl: string; + instanceUrl: string; + scopedKey: string; + displayName: string; + contactEmail: string; + contactName?: string; + tier: 'hosted' | 'self_hosted'; + portalCallbackUrl: string; + /** When set, send as admin bearer for the fast-path signup (skips + * email verify, returns vendorToken inline). Only set on hosted + * tenants where the brand admin's email was already verified by + * openpartner's signup. */ + adminAuthToken?: string; +} + +export type SignupResult = + | { vendorId: string; status: 'pending'; emailSent: boolean } + | { vendorId: string; vendorToken: string; status: 'active'; displayName: string }; + +export async function signupWithNetwork(input: SignupInput): Promise { + const url = `${input.networkUrl.replace(/\/$/, '')}/vendors/signup`; + const headers: Record = { + 'content-type': 'application/json', + 'user-agent': 'OpenPartner-Vendor/1', + }; + if (input.adminAuthToken) { + headers.authorization = `Bearer ${input.adminAuthToken}`; + } + const res = await fetch(url, { + method: 'POST', + headers, + body: JSON.stringify({ + instanceUrl: input.instanceUrl, + scopedKey: input.scopedKey, + displayName: input.displayName, + tier: input.tier, + contact: { email: input.contactEmail, name: input.contactName }, + portalCallbackUrl: input.portalCallbackUrl, + }), + // Network's signup is synchronous: row insert + magic-link insert + + // Postmark send all inside one transaction. Cold-start can push the + // mail send past the old 10s budget; bump to 30s so we don't ETIMEDOUT + // a request that ultimately succeeds (the user gets the email but + // sees a 502 in the UI, then double-clicks and lands on already-pending). + signal: AbortSignal.timeout(30_000), + }); + const text = await res.text(); + if (!res.ok) { + throw new Error(`network signup failed (${res.status}): ${text.slice(0, 300)}`); + } + return JSON.parse(text) as SignupResult; +} + +export interface VerifyResult { + vendorId: string; + vendorToken: string; + displayName: string; + issuedAt: string; +} + +export async function completeNetworkConnect(networkUrl: string, ntoken: string): Promise { + const url = `${networkUrl.replace(/\/$/, '')}/vendors/verify-and-issue-token`; + const res = await fetch(url, { + method: 'POST', + headers: { 'content-type': 'application/json', 'user-agent': 'OpenPartner-Vendor/1' }, + body: JSON.stringify({ token: ntoken }), + signal: AbortSignal.timeout(10_000), + }); + const text = await res.text(); + if (!res.ok) { + throw new Error(`network verify failed (${res.status}): ${text.slice(0, 300)}`); + } + return JSON.parse(text) as VerifyResult; +} + +// ---------- Direct Network proxy calls (used by /admin/network/* routes) ---------- +// +// The vendor admin's portal needs to manage offerings + review +// partnership requests against the Network. Both require the +// vendorToken (server-side secret in network_membership). The portal +// can't hold it, so the openpartner backend proxies on behalf of the +// admin: load membership → use vendorToken → call Network → return. + +async function callNetwork( + db: Knex, + tenantId: string, + method: 'GET' | 'POST' | 'PATCH' | 'DELETE', + path: string, + body?: unknown, + opts?: { actingVendorPartnerId?: string }, +): Promise { + const m = await getNetworkMembership(db, tenantId); + if (!m || !m.enabled || !m.vendorTokenCiphertext) { + throw new NetworkProxyError(503, 'network_not_configured'); + } + // Fall back to process.env.NETWORK_URL when the membership row has + // an empty networkUrl. Some tenants ended up in this state when the + // email-verify path ran without a prior /start-connect to seed the + // row — they still have a vendorToken but no URL. Heal silently + // here; the next saveNetworkMembership write persists the env value. + const networkUrl = m.networkUrl || process.env.NETWORK_URL; + if (!networkUrl) { + throw new NetworkProxyError(503, 'network_url_unresolvable'); + } + let token: string; + try { + token = decryptSecret(m.vendorTokenCiphertext); + } catch (err) { + throw new NetworkProxyError(500, `vendor_token_undecryptable: ${err instanceof Error ? err.message : String(err)}`); + } + const url = `${networkUrl.replace(/\/$/, '')}${path}`; + const headers: Record = { + 'content-type': 'application/json', + authorization: `Bearer ${token}`, + 'user-agent': 'OpenPartner-Vendor/1', + }; + if (opts?.actingVendorPartnerId) { + // Tells the Network: this call is the vendor proxying on behalf of one + // of its own partners. The Network resolves the Creator via + // VendorAffiliation(vendorId, vendorPartnerId). + headers['x-act-as-vendor-partner'] = opts.actingVendorPartnerId; + } + const res = await fetch(url, { + method, + headers, + body: body !== undefined ? JSON.stringify(body) : undefined, + signal: AbortSignal.timeout(PROXY_TIMEOUT_MS), + }); + const text = await res.text(); + if (!res.ok) { + // Log the full upstream response so federation_failed / similar + // errors with a long detail field are visible in API logs without + // having to dig through browser devtools. Status ≥400 only — 2xx + // would be noisy. + console.error('[network-client] upstream error', { + method, + path, + status: res.status, + body: text.length > 1000 ? `${text.slice(0, 1000)}…(truncated)` : text, + }); + throw new NetworkProxyError(res.status, text.slice(0, 500) || `http_${res.status}`); + } + return text ? (JSON.parse(text) as T) : (null as unknown as T); +} + +export class NetworkProxyError extends Error { + constructor(public status: number, message: string) { + super(message); + this.name = 'NetworkProxyError'; + } +} + +// Thin wrappers — they're a 1:1 with Network endpoints. Kept here so +// the route handlers stay declarative. +export const networkProxy = { + listOfferings: (db: Knex, tenantId: string) => + callNetwork<{ offerings: unknown[] }>(db, tenantId, 'GET', '/vendors/me/offerings'), + createOffering: (db: Knex, tenantId: string, body: unknown) => + callNetwork(db, tenantId, 'POST', '/vendors/me/offerings', body), + updateOffering: (db: Knex, tenantId: string, id: string, body: unknown) => + callNetwork(db, tenantId, 'PATCH', `/vendors/me/offerings/${encodeURIComponent(id)}`, body), + deleteOffering: (db: Knex, tenantId: string, id: string) => + callNetwork<{ ok: boolean }>(db, tenantId, 'DELETE', `/vendors/me/offerings/${encodeURIComponent(id)}`), + listRequests: (db: Knex, tenantId: string, status?: string) => + callNetwork<{ requests: unknown[] }>(db, tenantId, 'GET', `/vendors/me/requests${status ? `?status=${encodeURIComponent(status)}` : ''}`), + approveRequest: (db: Knex, tenantId: string, id: string, body: unknown) => + callNetwork(db, tenantId, 'POST', `/vendors/me/requests/${encodeURIComponent(id)}/approve`, body), + rejectRequest: (db: Knex, tenantId: string, id: string, body: unknown) => + callNetwork(db, tenantId, 'POST', `/vendors/me/requests/${encodeURIComponent(id)}/reject`, body), + whoami: (db: Knex, tenantId: string) => + callNetwork<{ id: string; displayName: string; status: string; partnerCount: number }>( + db, + tenantId, + 'GET', + '/vendors/me', + ), + + /** Brand-side creator discovery — search the Network's creator + * directory with full-text + filter params. The querystring is + * forwarded verbatim, so the brand UI can pass whatever Network's + * schema accepts (q, categories, locations, platforms, minFollowers, + * maxFollowers, minRevenue90d, sort, limit). */ + discoverCreators: (db: Knex, tenantId: string, querystring: string) => + callNetwork<{ creators: unknown[] }>( + db, + tenantId, + 'GET', + `/vendors/me/discover/creators${querystring ? `?${querystring}` : ''}`, + ), + + /** Invite a creator to apply to one of this vendor's offerings. + * Network mints an OfferingInvitation + emails the creator a + * deeplink. Idempotent per (offering, creator) — re-inviting the + * same creator refreshes the token + expiry. */ + inviteCreator: (db: Knex, tenantId: string, offeringId: string, body: { creatorId: string; message?: string }) => + callNetwork<{ ok: true; expiresAt: string }>( + db, + tenantId, + 'POST', + `/vendors/me/offerings/${encodeURIComponent(offeringId)}/invitations`, + body, + ), + + // Billing — same pattern (vendor token held server-side, portal can't + // hit Stripe directly). Forwards body straight through; the Network's + // route does all validation. + getBilling: (db: Knex, tenantId: string) => + callNetwork<{ + billingRequired: boolean; + bundledWithMainPlan: boolean; + subscriptionStatus: string | null; + stripeCustomerId: string | null; + stripeSubscriptionId: string | null; + currentPeriodEnd: string | null; + networkPriceConfigured: boolean; + networkUsagePriceConfigured: boolean; + billingEnabled: boolean; + }>(db, tenantId, 'GET', '/vendors/me/billing'), + + createCheckout: (db: Knex, tenantId: string, body: { successUrl: string; cancelUrl: string }) => + callNetwork<{ url: string }>(db, tenantId, 'POST', '/vendors/me/billing/checkout', body), + + openPortal: (db: Knex, tenantId: string, body: { returnUrl: string }) => + callNetwork<{ url: string }>(db, tenantId, 'POST', '/vendors/me/billing/portal', body), + + // Account deletion lifecycle. delete uses the tenant's vendorToken so + // the call is naturally scoped to "this vendor"; restore goes through + // the admin endpoint because by that point the vendorToken is gone. + deleteVendor: (db: Knex, tenantId: string) => + callNetwork<{ ok: boolean }>(db, tenantId, 'POST', '/vendors/me/delete'), +}; + +export async function adminRestoreVendor(networkUrl: string, vendorId: string): Promise<{ vendorId: string; vendorToken: string }> { + const adminKey = process.env.NETWORK_ADMIN_API_KEY; + if (!adminKey) throw new Error('NETWORK_ADMIN_API_KEY not set — cannot restore vendor'); + const url = `${networkUrl.replace(/\/$/, '')}/vendors/admin-restore`; + const res = await fetch(url, { + method: 'POST', + headers: { + 'content-type': 'application/json', + authorization: `Bearer ${adminKey}`, + 'user-agent': 'OpenPartner-Vendor/1', + }, + body: JSON.stringify({ vendorId }), + signal: AbortSignal.timeout(10_000), + }); + const text = await res.text(); + if (!res.ok) throw new Error(`network restore failed (${res.status}): ${text.slice(0, 300)}`); + return JSON.parse(text) as { vendorId: string; vendorToken: string }; +} + +// ---------- Partner-acting Network proxy ---------- +// Used by openpartner partner-role routes (/api/network/partner/*) to +// forward calls to Network's existing /creators/me/* + /offerings + /vendors/:id +// endpoints. Auth is the vendor bearer plus an x-act-as-vendor-partner +// header carrying the partner's vendor-local id; Network's requireCreator +// resolves the creator from VendorAffiliation(vendorId, vendorPartnerId). + +export const partnerProxy = { + // Discovery (public on Network — no acting header needed) + listOfferings: (db: Knex, tenantId: string, qs: string) => + callNetwork<{ offerings: unknown[] }>(db, tenantId, 'GET', `/offerings${qs ? `?${qs}` : ''}`), + getOffering: (db: Knex, tenantId: string, id: string) => + callNetwork(db, tenantId, 'GET', `/offerings/${encodeURIComponent(id)}`), + getVendor: (db: Knex, tenantId: string, id: string) => + callNetwork(db, tenantId, 'GET', `/vendors/${encodeURIComponent(id)}`), + + // Acting-as-creator (need the header) + applyToOffering: (db: Knex, tenantId: string, vendorPartnerId: string, id: string, body: unknown) => + callNetwork(db, tenantId, 'POST', `/offerings/${encodeURIComponent(id)}/apply`, body, { + actingVendorPartnerId: vendorPartnerId, + }), + listMyAffiliations: (db: Knex, tenantId: string, vendorPartnerId: string) => + callNetwork<{ affiliations: unknown[] }>(db, tenantId, 'GET', '/creators/me/affiliations', undefined, { + actingVendorPartnerId: vendorPartnerId, + }), + getAffiliationEarnings: (db: Knex, tenantId: string, vendorPartnerId: string, affId: string) => + callNetwork( + db, + tenantId, + 'GET', + `/creators/me/affiliations/${encodeURIComponent(affId)}/earnings`, + undefined, + { actingVendorPartnerId: vendorPartnerId }, + ), + listMyRequests: (db: Knex, tenantId: string, vendorPartnerId: string) => + callNetwork<{ requests: unknown[] }>(db, tenantId, 'GET', '/creators/me/requests', undefined, { + actingVendorPartnerId: vendorPartnerId, + }), + cancelRequest: (db: Knex, tenantId: string, vendorPartnerId: string, reqId: string) => + callNetwork<{ ok: boolean }>( + db, + tenantId, + 'POST', + `/creators/me/requests/${encodeURIComponent(reqId)}/cancel`, + undefined, + { actingVendorPartnerId: vendorPartnerId }, + ), + getMyProfile: (db: Knex, tenantId: string, vendorPartnerId: string) => + callNetwork(db, tenantId, 'GET', '/creators/me', undefined, { + actingVendorPartnerId: vendorPartnerId, + }), + updateMyProfile: (db: Knex, tenantId: string, vendorPartnerId: string, body: unknown) => + callNetwork(db, tenantId, 'PATCH', '/creators/me', body, { + actingVendorPartnerId: vendorPartnerId, + }), +}; + +// ---------- Network-originated payouts reporting ---------- +// +// The Network bills self-hosted vendors 3% on payouts whose Partner +// came from the marketplace. Vendor side aggregates the period's +// Network-originated payout total and POSTs to /vendors/me/report- +// payouts; the Network turns it into a Stripe meter event. +// +// Cadence: piggyback on the openpartner scheduler (daily), keyed +// against a Config high-water mark. Hosted-tier vendors get 204'd by +// the Network (their billing is bundled), so this is a no-op cost +// from their perspective. + +export interface NetworkPayoutsReportResult { + rangeStart: Date | null; + rangeEnd: Date; + amountUsd: number; + reported: boolean; + reason?: string; +} + +export async function reportNetworkPayoutsToNetwork( + db: Knex, + tenantId: string, +): Promise { + const m = await getNetworkMembership(db, tenantId); + const rangeEnd = new Date(); + if (!m || !m.enabled || !m.networkUrl || !m.vendorTokenCiphertext) { + return { rangeStart: null, rangeEnd, amountUsd: 0, reported: false, reason: 'network_not_configured' }; + } + let token: string; + try { + token = decryptSecret(m.vendorTokenCiphertext); + } catch { + return { rangeStart: null, rangeEnd, amountUsd: 0, reported: false, reason: 'token_undecryptable' }; + } + + const { CONFIG_KEYS, getConfig, setConfig } = await import('./config.js'); + const { aggregateNetworkOriginatedPayouts } = await import('./usage-billing.js'); + + const lastIso = await getConfig(db, tenantId, CONFIG_KEYS.LastNetworkPayoutsReportedAt); + const rangeStart = lastIso ? new Date(lastIso) : null; + const amountUsd = await aggregateNetworkOriginatedPayouts(db, rangeStart, rangeEnd); + + if (amountUsd <= 0) { + // Advance the high-water mark anyway so we don't re-scan the + // same window. The Network endpoint short-circuits on 0 too, + // but skipping the round-trip is cheaper. + await setConfig(db, tenantId, CONFIG_KEYS.LastNetworkPayoutsReportedAt, rangeEnd.toISOString()); + return { rangeStart, rangeEnd, amountUsd: 0, reported: false, reason: 'no_network_payouts_in_range' }; + } + + const url = `${m.networkUrl.replace(/\/$/, '')}/vendors/me/report-payouts`; + const res = await fetch(url, { + method: 'POST', + headers: { + 'content-type': 'application/json', + authorization: `Bearer ${token}`, + 'user-agent': 'OpenPartner-Vendor/1', + }, + body: JSON.stringify({ + amountUsd, + sinceIso: rangeStart ? rangeStart.toISOString() : null, + untilIso: rangeEnd.toISOString(), + }), + signal: AbortSignal.timeout(PUSH_TIMEOUT_MS), + }); + if (!res.ok) { + const text = await res.text().catch(() => ''); + // DON'T advance the high-water mark on Network failures — next + // tick re-includes this period's payouts. Same idempotency model + // as the meter event: the Network's identifier dedups at Stripe. + return { + rangeStart, + rangeEnd, + amountUsd, + reported: false, + reason: `network ${res.status}: ${text.slice(0, 200)}`, + }; + } + await setConfig(db, tenantId, CONFIG_KEYS.LastNetworkPayoutsReportedAt, rangeEnd.toISOString()); + return { rangeStart, rangeEnd, amountUsd, reported: true }; +} + +// ---------- Heartbeat (Network liveness probe) ---------- +// +// The Network uses partnerCount + lastHeartbeatAt to (a) populate the +// vendor's "active partners" stat in admin UIs, and (b) prune abandoned +// instances from creator-facing search after extended silence. +// +// Cadence: hourly via the scheduler — frequent enough that the displayed +// count feels live to brand admins, infrequent enough to be polite to +// the Network. Also opportunistically fired by `/admin/network/me` so a +// brand admin who just landed on the page after a partner was added +// sees fresh data on the next request. +// +// Counting: non-revoked Partners. lastEventAt is the most recent Event +// timestamp (any kind), giving the Network signal even from tenants +// who haven't added partners but are processing conversions. + +export interface HeartbeatResult { + sent: boolean; + partnerCount: number; + reason?: string; +} + +export async function sendHeartbeat(db: Knex, tenantId: string): Promise { + const m = await getNetworkMembership(db, tenantId); + if (!m || !m.enabled || !m.networkUrl || !m.vendorTokenCiphertext) { + return { sent: false, partnerCount: 0, reason: 'network_not_configured' }; + } + let token: string; + try { + token = decryptSecret(m.vendorTokenCiphertext); + } catch { + return { sent: false, partnerCount: 0, reason: 'token_undecryptable' }; + } + + const partnerRows = await db(TABLES.Partner) + .whereNull('revokedAt') + .count>({ count: '*' }); + const partnerCount = Number(partnerRows[0]?.count ?? 0); + + const lastEventRow = await db(TABLES.Event) + .orderBy('createdAt', 'desc') + .first<{ createdAt: Date } | undefined>('createdAt'); + const lastEventAt = lastEventRow?.createdAt ? new Date(lastEventRow.createdAt).toISOString() : undefined; + + const url = `${m.networkUrl.replace(/\/$/, '')}/vendors/me/heartbeat`; + try { + const res = await fetch(url, { + method: 'POST', + headers: { + 'content-type': 'application/json', + authorization: `Bearer ${token}`, + 'user-agent': 'OpenPartner-Vendor/1', + }, + body: JSON.stringify({ partnerCount, ...(lastEventAt ? { lastEventAt } : {}) }), + signal: AbortSignal.timeout(PUSH_TIMEOUT_MS), + }); + if (!res.ok) { + const text = await res.text().catch(() => ''); + return { sent: false, partnerCount, reason: `network ${res.status}: ${text.slice(0, 200)}` }; + } + return { sent: true, partnerCount }; + } catch (err) { + return { sent: false, partnerCount, reason: err instanceof Error ? err.message : String(err) }; + } +} + +// ---------- outbox drain (called from scheduler.ts) ---------- + +export async function drainOutbox(db: Knex, tenantId: string): Promise<{ drained: number; succeeded: number; dead: number }> { + const m = await getNetworkMembership(db, tenantId); + if (!m || !m.enabled || !m.networkUrl || !m.vendorTokenCiphertext) { + return { drained: 0, succeeded: 0, dead: 0 }; + } + let token: string; + try { + token = decryptSecret(m.vendorTokenCiphertext); + } catch { + return { drained: 0, succeeded: 0, dead: 0 }; + } + + const due = await db(TABLES.NetworkOutbox) + .where({ status: 'pending' }) + .andWhere('nextAttemptAt', '<=', new Date()) + .orderBy('nextAttemptAt', 'asc') + .limit(100); + + let succeeded = 0; + let dead = 0; + + for (const row of due) { + try { + const url = endpointForOp(m.networkUrl, row.op); + const res = await fetch(url, { + method: 'POST', + headers: { + 'content-type': 'application/json', + authorization: `Bearer ${token}`, + 'user-agent': 'OpenPartner-Vendor/1', + }, + body: JSON.stringify(row.payload), + signal: AbortSignal.timeout(PUSH_TIMEOUT_MS), + }); + if (!res.ok) { + throw new Error(`network ${res.status}`); + } + await db(TABLES.NetworkOutbox).where({ id: row.id }).del(); + succeeded += 1; + } catch (err) { + const attempts = row.attempts + 1; + const msg = err instanceof Error ? err.message : String(err); + if (attempts >= MAX_ATTEMPTS) { + await db(TABLES.NetworkOutbox).where({ id: row.id }).update({ + status: 'dead', + attempts, + lastAttemptAt: new Date(), + lastError: msg, + }); + dead += 1; + } else { + await db(TABLES.NetworkOutbox).where({ id: row.id }).update({ + attempts, + nextAttemptAt: nextBackoff(attempts), + lastAttemptAt: new Date(), + lastError: msg, + }); + } + } + } + + return { drained: due.length, succeeded, dead }; +} diff --git a/apps/api/src/network/crypto.ts b/apps/api/src/network/crypto.ts deleted file mode 100644 index e4ff0d5..0000000 --- a/apps/api/src/network/crypto.ts +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Envelope encryption for federation keys. - * - * Why: the Network needs to call out to a vendor's OpenPartner instance - * admin API on partnership approval. That means holding the plaintext key - * somewhere — a sha256 hash would be useless for outbound calls. - * - * We use AES-256-GCM with a master key pulled from NETWORK_ENCRYPTION_KEY - * (32 bytes, base64 or hex). In dev, if no key is set, we use a fixed - * dev-only key and log a warning — this is NEVER OK in production. The - * env-loader startup check enforces presence in production builds. - */ - -import { createCipheriv, createDecipheriv, randomBytes } from 'node:crypto'; - -const ALG = 'aes-256-gcm'; -const IV_LEN = 12; // GCM recommends 12 bytes - -let cachedKey: Buffer | null = null; - -function masterKey(): Buffer { - if (cachedKey) return cachedKey; - const raw = process.env.NETWORK_ENCRYPTION_KEY; - if (!raw) { - if (process.env.NODE_ENV === 'production') { - throw new Error('NETWORK_ENCRYPTION_KEY is required in production'); - } - console.warn('[network.crypto] NETWORK_ENCRYPTION_KEY not set — using dev-only fallback. DO NOT USE IN PROD.'); - cachedKey = Buffer.alloc(32, 0x42); - return cachedKey; - } - // accept either hex or base64 - const buf = raw.length === 64 ? Buffer.from(raw, 'hex') : Buffer.from(raw, 'base64'); - if (buf.length !== 32) throw new Error('NETWORK_ENCRYPTION_KEY must decode to exactly 32 bytes'); - cachedKey = buf; - return buf; -} - -export function encryptKey(plaintext: string): string { - const iv = randomBytes(IV_LEN); - const cipher = createCipheriv(ALG, masterKey(), iv); - const ct = Buffer.concat([cipher.update(plaintext, 'utf8'), cipher.final()]); - const tag = cipher.getAuthTag(); - return Buffer.concat([iv, tag, ct]).toString('base64'); -} - -export function decryptKey(envelope: string): string { - const buf = Buffer.from(envelope, 'base64'); - const iv = buf.subarray(0, IV_LEN); - const tag = buf.subarray(IV_LEN, IV_LEN + 16); - const ct = buf.subarray(IV_LEN + 16); - const decipher = createDecipheriv(ALG, masterKey(), iv); - decipher.setAuthTag(tag); - return Buffer.concat([decipher.update(ct), decipher.final()]).toString('utf8'); -} diff --git a/apps/api/src/network/federation.ts b/apps/api/src/network/federation.ts deleted file mode 100644 index 5df854b..0000000 --- a/apps/api/src/network/federation.ts +++ /dev/null @@ -1,195 +0,0 @@ -/** - * Federation client. - * - * When the Network approves a Partnership, we provision the actual - * Partner + Link on the vendor's OpenPartner instance — that's where - * attribution and payouts live. We call the vendor's admin API using the - * encrypted key the vendor supplied at registration. - * - * The client is resilient: if the vendor's instance is unreachable, the - * approval is rolled back so the creator isn't left with a half-provisioned - * partnership. (See network-requests.ts for the transaction boundary.) - */ - -import type { NetworkVendorRow, OfferingRow } from '@openpartner/db'; -import { decryptKey } from './crypto.js'; - -export interface FederationCreator { - name: string; - email: string; - handle: string; - promoCode?: string | null; -} - -export interface PartnerDashboardStats { - partnerId: string; - since: string; - clicks: number; - attributedEvents: number; - attributedRevenue: number; - commissionByStatus: Record; -} - -/** - * Read-side federation: pull a partner's dashboard off their vendor's - * OpenPartner instance. Used by the Network to surface per-partnership - * earnings to creators (and to vendors, inverted — "how much has this - * creator earned you?"). Attribution never leaves the vendor's instance; - * we just project it into the Network UI. - */ -export async function fetchPartnerDashboard( - vendor: NetworkVendorRow, - partnerId: string, -): Promise { - const key = decryptKey(vendor.instanceKeyCiphertext); - const res = await fetchJson(`${vendor.instanceUrl}/partners/${partnerId}/dashboard`, { - method: 'GET', - key, - }); - return res as unknown as PartnerDashboardStats; -} - -export interface PartnerCommission { - id: string; - partnerId: string; - amount: string; - currency: string; - status: 'accrued' | 'approved' | 'paid' | 'reversed' | (string & {}); - accruedAt: string; - paidAt: string | null; -} - -export async function fetchPartnerCommissions( - vendor: NetworkVendorRow, - partnerId: string, -): Promise { - const key = decryptKey(vendor.instanceKeyCiphertext); - const res = (await fetchJson(`${vendor.instanceUrl}/partners/${partnerId}/commissions?limit=500`, { - method: 'GET', - key, - })) as { commissions?: PartnerCommission[] }; - return res.commissions ?? []; -} - -export interface FederatedPartner { - partnerId: string; - linkKey: string; - publicShareUrl: string; - routerUrl: string; -} - -export async function provisionPartnerOnVendor(params: { - vendor: NetworkVendorRow; - offering: OfferingRow; - creator: FederationCreator; -}): Promise { - const { vendor, offering, creator } = params; - const key = decryptKey(vendor.instanceKeyCiphertext); - - const createPartnerRes = await fetchJson(`${vendor.instanceUrl}/partners`, { - method: 'POST', - key, - body: { - email: creator.email, - name: creator.name, - metadata: { source: 'openpartner_network', creatorHandle: creator.handle }, - }, - }); - - const partnerId = String(createPartnerRes.id); - - // Preferred link key order: request-level promoCode → creator handle. The - // slug is what appears in the share URL (e.g. getcoherence.io/r/), - // so we respect whatever the creator chose up-front. If uniqueness - // collides on the vendor's instance, fetchJsonWithFallback retries with - // a short suffix so the creator still gets something close to what they - // picked instead of the provisioning failing outright. - const linkKey = sanitizeLinkKey(creator.promoCode || creator.handle); - const linkPayload = { - linkKey, - campaignId: offering.vendorCampaignId, - destinationUrl: offering.productUrl, - }; - - const linkRes = await fetchJsonWithFallback(`${vendor.instanceUrl}/partners/${partnerId}/links`, { - method: 'POST', - key, - body: linkPayload, - fallbackBody: { ...linkPayload, linkKey: `${linkKey}-${partnerId.slice(-6).toLowerCase()}` }, - }); - - // Router URL is co-deployed with the vendor's OpenPartner. Convention: - // swap the API host's default port (4601) for the router's (4701), or - // honor a routerUrl override we could add to NetworkVendor later. - const routerUrl = deriveRouterUrl(vendor); - const actualLinkKey = String(linkRes.linkKey); - const publicShareUrl = `${routerUrl}/r/${actualLinkKey}`; - - return { partnerId, linkKey: actualLinkKey, publicShareUrl, routerUrl }; -} - -function sanitizeLinkKey(raw: string): string { - const cleaned = raw - .toLowerCase() - .replace(/[^a-z0-9_-]+/g, '_') - .replace(/^_+|_+$/g, '') - .slice(0, 40); - return cleaned || 'creator'; -} - -function deriveRouterUrl(vendor: NetworkVendorRow): string { - // Priority: explicit NetworkVendor.routerUrl → env override → port-swap - // convention (API 4601 → router 4701) for localhost dev. Production - // vendors should set routerUrl to their branded apex (e.g. - // https://getcoherence.io) so share URLs land at the right hostname. - if (vendor.routerUrl) return vendor.routerUrl; - const env = process.env.NETWORK_ROUTER_URL; - if (env) return env; - try { - const url = new URL(vendor.instanceUrl); - if (url.port === '4601') { - url.port = '4701'; - return url.origin; - } - } catch { - /* ignore */ - } - return vendor.instanceUrl; -} - -interface FetchParams { - method: 'POST' | 'GET'; - key: string; - body?: unknown; -} - -async function fetchJson(url: string, params: FetchParams): Promise> { - const res = await fetch(url, { - method: params.method, - headers: { - authorization: `Bearer ${params.key}`, - 'content-type': 'application/json', - }, - body: params.body !== undefined ? JSON.stringify(params.body) : undefined, - }); - const text = await res.text(); - if (!res.ok) { - throw new Error(`${params.method} ${url} → ${res.status}: ${text.slice(0, 300)}`); - } - return text ? (JSON.parse(text) as Record) : {}; -} - -async function fetchJsonWithFallback( - url: string, - params: FetchParams & { fallbackBody: unknown }, -): Promise> { - try { - return await fetchJson(url, { method: params.method, key: params.key, body: params.body }); - } catch (err) { - const msg = err instanceof Error ? err.message : String(err); - if (msg.includes('409') || msg.includes('linkKey_taken')) { - return fetchJson(url, { method: params.method, key: params.key, body: params.fallbackBody }); - } - throw err; - } -} diff --git a/apps/api/src/network/safe-fetch.ts b/apps/api/src/network/safe-fetch.ts deleted file mode 100644 index ff7575d..0000000 --- a/apps/api/src/network/safe-fetch.ts +++ /dev/null @@ -1,86 +0,0 @@ -/** - * SSRF-safe outbound fetch to user-provided URLs. - * - * The Network flow accepts a vendor's `instanceUrl` unauthenticated — a - * prospective vendor is still signing up and doesn't have an account - * yet. That means we can't fix the SSRF surface with auth; we have to - * validate the URL itself. Defence in depth: - * - * 1. Only http:// or https:// (no file:, gopher:, etc.) - * 2. Resolve DNS; reject if ANY resolved address lives in a private - * / loopback / link-local / cgnat range. All addresses must be - * public because an attacker who controls DNS can rebind between - * this check and fetch(), but matching hostname-lookup-then-fetch - * with a Node agent that dials only the checked IPs is beyond - * v1 scope — the loud-default rejection here closes 95% of - * exploits in the wild. - * 3. Self-hosters running everything on a VPN or private network can - * opt out by setting NETWORK_ALLOW_PRIVATE_HOSTS=1. - * - * Still returns a standard Response — the caller pipes through to - * their existing logic. - */ - -import { lookup } from 'node:dns/promises'; -import { isIP } from 'node:net'; - -const PRIVATE_V4 = [ - /^0\./, - /^10\./, - /^127\./, - /^169\.254\./, - /^172\.(1[6-9]|2\d|3[01])\./, - /^192\.168\./, - /^100\.(6[4-9]|[7-9]\d|1[01]\d|12[0-7])\./, // CGNAT 100.64.0.0/10 -]; - -function isPrivateAddress(addr: string): boolean { - const lower = addr.toLowerCase(); - if (lower === '::1' || lower === '::' || lower.startsWith('fe80:') || lower.startsWith('fc') || lower.startsWith('fd')) { - return true; - } - // IPv4-mapped IPv6 - const mapped = lower.match(/^::ffff:([\d.]+)$/); - if (mapped && mapped[1]) return PRIVATE_V4.some((re) => re.test(mapped[1]!)); - return PRIVATE_V4.some((re) => re.test(lower)); -} - -async function assertPublicHost(host: string): Promise { - if (process.env.NETWORK_ALLOW_PRIVATE_HOSTS === '1') return; - // Tests spin up loopback vendor instances on ephemeral ports. The - // bypass only triggers under NODE_ENV=test (vitest default) — not a - // knob a deployed instance can flip accidentally. - if (process.env.NODE_ENV === 'test') return; - - if (isIP(host) !== 0) { - if (isPrivateAddress(host)) { - throw Object.assign(new Error('private_host_blocked'), { code: 'private_host_blocked' }); - } - return; - } - - const records = await lookup(host, { all: true }); - if (records.length === 0) { - throw Object.assign(new Error('dns_no_records'), { code: 'dns_no_records' }); - } - for (const r of records) { - if (isPrivateAddress(r.address)) { - throw Object.assign(new Error('private_host_blocked'), { code: 'private_host_blocked' }); - } - } -} - -export async function safeFetch(urlString: string, init: RequestInit = {}): Promise { - const url = new URL(urlString); - if (url.protocol !== 'http:' && url.protocol !== 'https:') { - throw Object.assign(new Error('unsupported_protocol'), { code: 'unsupported_protocol' }); - } - await assertPublicHost(url.hostname); - - return fetch(url, { - ...init, - // 10s ceiling — enough for a slow TLS handshake on a distant box, - // short enough that an attacker can't use us as a long-tail probe. - signal: AbortSignal.timeout(10_000), - }); -} diff --git a/apps/api/src/network/validation.ts b/apps/api/src/network/validation.ts deleted file mode 100644 index bb4f6a1..0000000 --- a/apps/api/src/network/validation.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { z } from 'zod'; - -export const platformSchema = z.object({ - platform: z.enum(['youtube', 'twitter', 'instagram', 'tiktok', 'blog', 'podcast', 'other']), - url: z.string().url(), - followers: z.number().int().nonnegative().optional(), -}); - -export const payoutSchema = z.discriminatedUnion('type', [ - z.object({ - type: z.literal('recurring_percent'), - percent: z.number().positive().max(100), - durationMonths: z.number().int().positive().nullable(), - }), - z.object({ - type: z.literal('one_time_fee'), - amount: z.number().positive(), - currency: z.string().length(3).optional(), - }), - z.object({ - type: z.literal('tiered_percent'), - tiers: z - .array(z.object({ minRevenueUsd: z.number().nonnegative(), percent: z.number().positive().max(100) })) - .min(1), - }), -]); - -export const bonusSchema = z.object({ - description: z.string().min(1), - triggerRevenueUsd: z.number().positive(), - bonusUsd: z.number().positive(), -}); - -export const termsSchema = z.object({ - payout: payoutSchema, - bonuses: z.array(bonusSchema).optional(), - cookieWindowDays: z.number().int().min(1).max(365), - exclusions: z.array(z.string()).optional(), -}); - -// Same shape as vendor-side Link.linkKey — URL-safe, 3–40 chars. Applies -// to both creator.defaultPromoCode and request.promoCode. -export const promoCodeSchema = z - .string() - .min(3) - .max(40) - .regex(/^[a-zA-Z0-9_-]+$/, 'promo code must be url-safe (letters, digits, _ or -)'); - -export const vendorCreateSchema = z.object({ - name: z.string().min(2), - slug: z - .string() - .min(2) - .max(40) - .regex(/^[a-z0-9][a-z0-9-]*$/), - email: z.string().email().optional(), - websiteUrl: z.string().url().optional(), - logoUrl: z.string().url().optional(), - description: z.string().max(1000).optional(), - instanceUrl: z.string().url(), - instanceKey: z.string().min(8), // the admin key on the vendor's instance - // Optional for dev (we fall back to the port-swap convention); required - // in practice for production so share URLs resolve at the vendor's apex. - routerUrl: z.string().url().optional(), -}); - -export const creatorCreateSchema = z.object({ - name: z.string().min(2), - handle: z - .string() - .min(2) - .max(40) - .regex(/^[a-z0-9_]+$/), - email: z.string().email(), - bio: z.string().max(2000).optional(), - avatarUrl: z.string().url().optional(), - platforms: z.array(platformSchema).optional(), - defaultPromoCode: promoCodeSchema.optional(), -}); - -// PATCH — every field optional; empty string clears the value, missing -// keys mean "leave unchanged." Handle + email NOT editable: handle change -// invalidates partnership linkKeys on every vendor instance simultaneously -// and email is the magic-link identity. -export const creatorUpdateSchema = z.object({ - name: z.string().min(2).max(80).optional(), - bio: z.string().max(2000).nullable().optional(), - avatarUrl: z.string().url().nullable().optional(), - platforms: z.array(platformSchema).optional(), - defaultPromoCode: promoCodeSchema.nullable().optional(), -}); - -export const offeringCreateSchema = z.object({ - title: z.string().min(2).max(120), - productUrl: z.string().url(), - description: z.string().max(4000).optional(), - heroImageUrl: z.string().url().optional(), - vendorCampaignId: z.string().min(1), - terms: termsSchema, - published: z.boolean().optional(), -}); - -export const offeringUpdateSchema = offeringCreateSchema.partial(); - -export const requestCreateSchema = z.object({ - offeringId: z.string().min(1), - message: z.string().max(2000).optional(), - promoCode: promoCodeSchema.optional(), -}); - -export const requestDecideSchema = z.object({ - decisionNote: z.string().max(2000).optional(), -}); diff --git a/apps/api/src/payouts.ts b/apps/api/src/payouts.ts index 45cc94a..d70f16e 100644 --- a/apps/api/src/payouts.ts +++ b/apps/api/src/payouts.ts @@ -14,8 +14,13 @@ * * Idempotency: we stamp the transfer's idempotency key with the Payout id * (generated up front) so a retry after a crash does not double-transfer. + * + * Multi-tenant: takes (db, tenantId). Pass req.db from a route handler, or + * the privileged db with app.tenant_id pinned in the calling transaction + * from the scheduler. */ +import type { Knex } from 'knex'; import { ulid } from 'ulid'; import { TABLES, @@ -23,7 +28,6 @@ import { type PartnerRow, type PayoutMethod, } from '@openpartner/db'; -import { db } from './db.js'; import { REVSHARE_FEE_BPS, getMode, requireStripe, type OpenPartnerMode } from './stripe.js'; import { dispatchEvent } from './webhook-dispatcher.js'; @@ -42,7 +46,7 @@ export interface PayoutRunResult { }>; } -export async function runPayouts(): Promise { +export async function runPayouts(db: Knex, tenantId: string): Promise { const mode = getMode(); const runId = ulid(); @@ -88,29 +92,28 @@ export async function runPayouts(): Promise { onboardingIncomplete ? 'failed' : /* manual */ 'pending'; - await db.transaction(async (trx) => { - await trx(TABLES.Payout).insert({ - id: payoutId, - partnerId: partner.id, - amount: amount.toFixed(2), - currency: group.currency, - method, - status: finalStatus, - metadata: { - runId, - platformFee, - commissionCount: commissions.length, - ...(onboardingIncomplete ? { error: 'stripe_onboarding_incomplete' } : {}), - }, - }); - // Only mark commissions paid on the manual-commit path. Connect - // path defers until after the transfer succeeds. - if (method === 'manual') { - await trx(TABLES.Commission) - .whereIn('id', commissions.map((c) => c.id)) - .update({ status: 'paid', paidAt: new Date(), payoutId }); - } + await db(TABLES.Payout).insert({ + id: payoutId, + tenantId, + partnerId: partner.id, + amount: amount.toFixed(2), + currency: group.currency, + method, + status: finalStatus, + metadata: { + runId, + platformFee, + commissionCount: commissions.length, + ...(onboardingIncomplete ? { error: 'stripe_onboarding_incomplete' } : {}), + }, }); + // Only mark commissions paid on the manual-commit path. Connect + // path defers until after the transfer succeeds. + if (method === 'manual') { + await db(TABLES.Commission) + .whereIn('id', commissions.map((c) => c.id)) + .update({ status: 'paid', paidAt: new Date(), payoutId }); + } if (onboardingIncomplete) { results.push({ @@ -134,24 +137,22 @@ export async function runPayouts(): Promise { amount: Math.round(amount * 100), currency: group.currency.toLowerCase(), destination: partner.stripeConnectAccountId!, - metadata: { openpartner_payout_id: payoutId, mode }, + metadata: { openpartner_payout_id: payoutId, openpartner_tenant_id: tenantId, mode }, }, { idempotencyKey: `payout_${payoutId}` }, ); - await db.transaction(async (trx) => { - await trx(TABLES.Payout).where({ id: payoutId }).update({ - stripeTransferId: transfer.id, - status: 'paid', - completedAt: new Date(), - }); - await trx(TABLES.Commission) - .whereIn('id', commissions.map((c) => c.id)) - .update({ status: 'paid', paidAt: new Date(), payoutId }); + await db(TABLES.Payout).where({ id: payoutId }).update({ + stripeTransferId: transfer.id, + status: 'paid', + completedAt: new Date(), }); + await db(TABLES.Commission) + .whereIn('id', commissions.map((c) => c.id)) + .update({ status: 'paid', paidAt: new Date(), payoutId }); // Webhooks fire only after the success is durable. - dispatchEvent('payout.created', { + dispatchEvent(tenantId, 'payout.created', { payoutId, partnerId: partner.id, amount: amount.toFixed(2), @@ -161,7 +162,7 @@ export async function runPayouts(): Promise { platformFee: platformFee || undefined, }); for (const c of commissions) { - dispatchEvent('commission.paid', { + dispatchEvent(tenantId, 'commission.paid', { commissionId: c.id, partnerId: c.partnerId, amount: c.amount, @@ -198,7 +199,7 @@ export async function runPayouts(): Promise { } else { // Manual path: commissions are already marked paid in the tx above. // Fire webhooks now — the operator owns the out-of-band transfer. - dispatchEvent('payout.created', { + dispatchEvent(tenantId, 'payout.created', { payoutId, partnerId: partner.id, amount: amount.toFixed(2), @@ -208,7 +209,7 @@ export async function runPayouts(): Promise { platformFee: platformFee || undefined, }); for (const c of commissions) { - dispatchEvent('commission.paid', { + dispatchEvent(tenantId, 'commission.paid', { commissionId: c.id, partnerId: c.partnerId, amount: c.amount, @@ -230,4 +231,3 @@ export async function runPayouts(): Promise { return { runId, mode, payouts: results }; } - diff --git a/apps/api/src/platform-sessions.ts b/apps/api/src/platform-sessions.ts new file mode 100644 index 0000000..261ff10 --- /dev/null +++ b/apps/api/src/platform-sessions.ts @@ -0,0 +1,92 @@ +/** + * Platform-identity session helpers. + * + * The multi-tenant deployment's workspace picker hangs on this: after + * a magic link verifies, we issue a PlatformSession instead of a regular + * (tenant-scoped) Session. The SPA then reads /api/me/workspaces and + * exchanges the platform session for a regular Session via + * /api/workspaces/:slug/enter. + * + * Storage table: PlatformSession (no tenantId — this is identity, not + * a workspace acting state). + * + * Cookie: op_platform_session, scoped to '/'. + */ + +import { createHash, randomBytes, timingSafeEqual } from 'node:crypto'; +import { ulid } from 'ulid'; +import type { CookieOptions } from 'express'; +import type { Knex } from 'knex'; +import { TABLES, type PlatformSessionRow } from '@openpartner/db'; + +export const PLATFORM_SESSION_COOKIE = 'op_platform_session'; +const TOKEN_PREFIX_LEN = 8; +const SESSION_TTL_MS = 30 * 24 * 60 * 60 * 1000; + +function hash(s: string): string { + return createHash('sha256').update(s).digest('hex'); +} + +function constantTimeEqual(a: string, b: string): boolean { + if (a.length !== b.length) { + timingSafeEqual(Buffer.alloc(32), Buffer.alloc(32)); + return false; + } + return timingSafeEqual(Buffer.from(a), Buffer.from(b)); +} + +export interface IssuedPlatformSession { + plaintext: string; + expiresAt: Date; +} + +export async function createPlatformSession(db: Knex, email: string): Promise { + const raw = randomBytes(24).toString('hex'); + const plaintext = `opsplat_${raw}`; + const prefix = plaintext.slice(0, TOKEN_PREFIX_LEN); + const tokenHash = hash(plaintext); + const expiresAt = new Date(Date.now() + SESSION_TTL_MS); + await db(TABLES.PlatformSession).insert({ + id: ulid(), + prefix, + tokenHash, + email: email.toLowerCase(), + expiresAt, + }); + return { plaintext, expiresAt }; +} + +export async function resolvePlatformSession(db: Knex, plaintext: string): Promise { + if (!plaintext || plaintext.length < TOKEN_PREFIX_LEN) return null; + const prefix = plaintext.slice(0, TOKEN_PREFIX_LEN); + const tokenHash = hash(plaintext); + const now = new Date(); + const row = await db(TABLES.PlatformSession) + .where({ prefix, tokenHash }) + .whereNull('revokedAt') + .andWhere('expiresAt', '>', now) + .first(); + if (!row) return null; + if (!constantTimeEqual(row.tokenHash, tokenHash)) return null; + void db(TABLES.PlatformSession).where({ id: row.id }).update({ lastSeenAt: now }); + return row; +} + +export async function revokePlatformSession(db: Knex, plaintext: string): Promise { + if (!plaintext || plaintext.length < TOKEN_PREFIX_LEN) return; + const prefix = plaintext.slice(0, TOKEN_PREFIX_LEN); + const tokenHash = hash(plaintext); + await db(TABLES.PlatformSession) + .where({ prefix, tokenHash }) + .update({ revokedAt: new Date() }); +} + +export function platformSessionCookieOptions(): CookieOptions { + return { + httpOnly: true, + secure: process.env.NODE_ENV === 'production', + sameSite: 'lax', + path: '/', + maxAge: SESSION_TTL_MS, + }; +} diff --git a/apps/api/src/routes/account-deletion.ts b/apps/api/src/routes/account-deletion.ts new file mode 100644 index 0000000..62503f4 --- /dev/null +++ b/apps/api/src/routes/account-deletion.ts @@ -0,0 +1,221 @@ +/** + * Brand-side account deletion. Two-phase, GDPR-aligned: + * + * Phase 1 — soft delete: admin clicks Delete, we stamp + * Tenant.pendingDeletionAt + reason, revoke all admin Sessions, + * and respond. The brand is immediately locked out (the tenancy + * middleware refuses requests for a tenant that's pending deletion). + * + * Phase 2 — hard delete: a scheduler sweep finds tenants past the + * 30-day grace window and cascades the wipe. Until then, an admin + * can call /account/restore to clear pendingDeletionAt and recover. + * + * NOT yet handled (TODOs that should land before public launch): + * - Stripe subscription cancellation. Today the brand keeps getting + * billed inside the grace window. Should at least set the sub to + * cancel-at-period-end at delete-time. + * - Network-side vendor row revoke (the Network keeps thinking we're + * federated). Needs a /vendors/me/delete on the Network. + * - Any pending payouts / unpaid commissions guard — right now we + * allow deletion regardless. A real production gate would refuse + * until the ledger is settled. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { TABLES, type TenantRow } from '@openpartner/db'; +import { db } from '../db.js'; +import { requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { stripe } from '../stripe.js'; +import { adminRestoreVendor, getNetworkMembership, networkProxy, saveNetworkMembership, NetworkProxyError } from '../network-client.js'; + +export const accountDeletionRouter = Router(); + +interface PendingObligations { + /** Sum of unpaid commission amounts (decimal, in the Commission's + * currency — typically USD). Field renamed from the original + * *Cents to match the actual `amount` decimal column. */ + unpaidCommissionAmount: number; + pendingPayoutCount: number; +} + +async function checkPendingObligations(tenantId: string): Promise { + // Commissions are unpaid when status is 'accrued' (newly recorded, + // pending admin approval) or 'approved' (cleared but not paid out). + // 'paid' and 'reversed' are settled. + const [c] = await db(TABLES.Commission) + .where({ tenantId }) + .whereIn('status', ['accrued', 'approved']) + .sum>({ sum: 'amount' }); + // Payouts are mid-flight when 'pending'. 'paid' and 'failed' are + // resolved states. (No 'processing' status exists.) + const [p] = await db(TABLES.Payout) + .where({ tenantId }) + .where('status', 'pending') + .count>({ count: '*' }); + return { + unpaidCommissionAmount: Number(c?.sum ?? 0), + pendingPayoutCount: Number(p?.count ?? 0), + }; +} + +const deleteSchema = z.object({ + confirmSlug: z.string().min(1), + reason: z.string().max(2000).optional(), + /** When set, ignore the pending-payouts/unpaid-commissions guard. */ + forceDespiteObligations: z.boolean().optional(), +}); + +accountDeletionRouter.post('/account/delete', requireAuth, requireAdmin, async (req, res) => { + const { tenantId } = tenantOf(req); + + const tenant = await db(TABLES.Tenant).where({ id: tenantId }).first(); + if (!tenant) return res.status(404).json({ error: 'tenant_not_found' }); + if (tenant.pendingDeletionAt) { + return res.status(409).json({ + error: 'already_pending_deletion', + pendingDeletionAt: tenant.pendingDeletionAt, + }); + } + + const body = deleteSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + // Defensive: require the admin to type their own slug to confirm. + if (body.data.confirmSlug !== tenant.slug) { + return res.status(400).json({ error: 'confirm_slug_mismatch' }); + } + + // Financial obligations gate. The brand can override with + // forceDespiteObligations after they've seen the warning surface in + // the UI; without that flag, refuse so we don't strand creators. + const obligations = await checkPendingObligations(tenantId); + const hasObligations = obligations.unpaidCommissionAmount > 0 || obligations.pendingPayoutCount > 0; + if (hasObligations && !body.data.forceDespiteObligations) { + return res.status(409).json({ + error: 'pending_obligations', + detail: obligations, + hint: 'Settle these or pass forceDespiteObligations=true to delete anyway.', + }); + } + + // Stripe: cancel-at-period-end (not immediate) so the brand isn't + // charged again at renewal. They keep access for the rest of the + // current billing period — which is fine, because soft-delete + // immediately locks them out of the app anyway. The Stripe sub + // closing on its own is the cleanest thing for refunds + accounting. + if (tenant.stripeSubscriptionId && stripe) { + try { + await stripe.subscriptions.update(tenant.stripeSubscriptionId, { + cancel_at_period_end: true, + metadata: { openpartner_deletion_reason: body.data.reason ?? '' }, + }); + } catch (err) { + console.error('[account-deletion] stripe cancel failed', { tenantId, err }); + // Don't block deletion on Stripe errors — operator can clean up + // billing manually if needed. Log loudly. + } + } + + // Network: tell the federation hub this brand is going away. The + // Network marks the vendor cancelled + revokes affiliations so the + // creator UI hides the brand. Membership stays in our Config (with + // enabled=false) so a restore can re-mint a vendorToken. + const membership = await getNetworkMembership(db, tenantId); + if (membership && membership.enabled && membership.networkUrl) { + try { + await networkProxy.deleteVendor(db, tenantId); + await saveNetworkMembership(db, tenantId, { enabled: false }); + } catch (err) { + const detail = err instanceof NetworkProxyError ? err.message : String(err); + console.error('[account-deletion] network delete failed', { tenantId, detail }); + // Same reasoning as Stripe — don't block. The vendor row stays + // alive on Network until either the operator cleans it up or + // the scheduler hard-delete pass eventually purges it. + } + } + + await db.transaction(async (trx) => { + await trx(TABLES.Tenant).where({ id: tenantId }).update({ + pendingDeletionAt: new Date(), + deletionReason: body.data.reason ?? null, + updatedAt: new Date(), + }); + await trx(TABLES.Session) + .where({ tenantId }) + .whereNull('revokedAt') + .update({ revokedAt: new Date() }); + }); + + res.json({ + ok: true, + pendingDeletionAt: new Date(), + graceWindowDays: 30, + obligationsAtDelete: obligations, + }); +}); + +accountDeletionRouter.post('/account/restore', requireAuth, requireAdmin, async (req, res) => { + // Reachable inside the grace window because tenantMiddleware lets + // recovery routes through (see TenantPendingDeletionError exemption). + const { tenantId } = tenantOf(req); + + const tenant = await db(TABLES.Tenant).where({ id: tenantId }).first(); + if (!tenant) return res.status(404).json({ error: 'tenant_not_found' }); + if (!tenant.pendingDeletionAt) { + return res.status(409).json({ error: 'not_pending_deletion' }); + } + + // Restore Network vendor row + grab a fresh vendorToken (the original + // was burned when delete fired). Needs NETWORK_ADMIN_API_KEY + the + // vendorId we persisted at signup. If either is missing the local + // restore still works; the operator can manually re-onboard the + // brand to the Network. + const membership = await getNetworkMembership(db, tenantId); + if (membership && membership.networkUrl && membership.vendorId && process.env.NETWORK_ADMIN_API_KEY) { + try { + const restored = await adminRestoreVendor(membership.networkUrl, membership.vendorId); + await saveNetworkMembership(db, tenantId, { + enabled: true, + vendorToken: restored.vendorToken, + }); + } catch (err) { + console.error('[account-deletion] network restore failed', { tenantId, err }); + } + } + + await db(TABLES.Tenant).where({ id: tenantId }).update({ + pendingDeletionAt: null, + deletionReason: null, + updatedAt: new Date(), + }); + + // Stripe: clear cancel_at_period_end so the brand keeps billing. + if (tenant.stripeSubscriptionId && stripe) { + try { + await stripe.subscriptions.update(tenant.stripeSubscriptionId, { + cancel_at_period_end: false, + }); + } catch (err) { + console.error('[account-deletion] stripe uncancel failed', { tenantId, err }); + } + } + + res.json({ ok: true }); +}); + +accountDeletionRouter.get('/account/deletion-status', requireAuth, requireAdmin, async (req, res) => { + const { tenantId } = tenantOf(req); + const tenant = await db(TABLES.Tenant).where({ id: tenantId }).first(); + if (!tenant) return res.status(404).json({ error: 'tenant_not_found' }); + + res.json({ + pendingDeletionAt: tenant.pendingDeletionAt, + deletionReason: tenant.deletionReason, + graceWindowDays: 30, + hardDeleteAt: tenant.pendingDeletionAt + ? new Date(tenant.pendingDeletionAt.getTime() + 30 * 24 * 60 * 60 * 1000) + : null, + }); +}); diff --git a/apps/api/src/routes/admin-overview.ts b/apps/api/src/routes/admin-overview.ts index 392c66e..6ab95b1 100644 --- a/apps/api/src/routes/admin-overview.ts +++ b/apps/api/src/routes/admin-overview.ts @@ -1,7 +1,7 @@ import { Router } from 'express'; import { TABLES, type PartnerRow } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; export const adminOverviewRouter = Router(); @@ -12,7 +12,8 @@ export const adminOverviewRouter = Router(); * Window defaults to 30 days; the Dub-style partner cards don't need more * granularity than that for the high-level view. */ -adminOverviewRouter.get('/admin/overview', requireAuth, requireAdmin, async (_req, res) => { +adminOverviewRouter.get('/admin/overview', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const since = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); const partners = await db(TABLES.Partner).orderBy('createdAt', 'desc').limit(200); diff --git a/apps/api/src/routes/admins.ts b/apps/api/src/routes/admins.ts new file mode 100644 index 0000000..352cdbd --- /dev/null +++ b/apps/api/src/routes/admins.ts @@ -0,0 +1,174 @@ +/** + * Admin persona management — list, invite, revoke, reinstate. + * + * Guard rails: + * - Can't revoke the last active admin (would lock everyone out) + * - Can't revoke yourself (session-sourced admins only; env-sourced + * admins can revoke anyone since they're a fallback root) + * + * The ADMIN_API_KEY env stays valid alongside admin personas as a + * bootstrap / headless path — think `doctl` or CI running migrations. + */ + +import { Router } from 'express'; +import type { Knex } from 'knex'; +import { z } from 'zod'; +import { ulid } from 'ulid'; +import { TABLES, type AdminRow, type ConfigRow, type SessionRow } from '@openpartner/db'; +import { requireAdmin, requireAuth } from '../auth.js'; +import { issueMagicLink } from '../auth-sessions.js'; +import { getMailer } from '../mailer.js'; +import { adminInviteEmail, buildMagicLinkUrl } from '../email-templates.js'; +import { tenantOf } from '../tenancy.js'; + +export const adminsRouter = Router(); + +const inviteSchema = z.object({ + email: z.string().email(), + name: z.string().min(1).max(120), +}); + +const revokeSchema = z.object({ + reason: z.string().max(500).optional(), +}); + +async function getProgramName(db: Knex): Promise { + const row = await db(TABLES.Config).where({ key: 'program_settings' }).first(); + const value = (row?.value ?? {}) as { programName?: string }; + return value.programName ?? null; +} + +async function sendInvite(db: Knex, tenantId: string, admin: AdminRow, tenantSlug?: string | null): Promise { + const issued = await issueMagicLink(db, { + tenantId, + email: admin.email, + purpose: 'admin_invite', + principalKind: 'admin', + principalId: admin.id, + }); + const tmpl = adminInviteEmail(admin.name, buildMagicLinkUrl(issued.plaintext, tenantSlug), await getProgramName(db)); + await getMailer().send({ db, tenantId }, { + to: admin.email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'admin_invite', + metadata: { purpose: 'admin_invite', adminId: admin.id }, + }); +} + +// -------- List -------- + +adminsRouter.get('/admins', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const admins = await db(TABLES.Admin).orderBy('createdAt', 'desc'); + res.json({ admins }); +}); + +// -------- Invite -------- + +adminsRouter.post('/admins', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = inviteSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const email = body.data.email.toLowerCase(); + const existing = await db(TABLES.Admin).where({ email }).first(); + if (existing) return res.status(409).json({ error: 'email_taken' }); + + const id = ulid(); + const [admin] = await db(TABLES.Admin) + .insert({ id, tenantId, email, name: body.data.name, activatedAt: null }) + .returning('*'); + await sendInvite(db, tenantId, admin as AdminRow, req.tenantSlug); + res.status(201).json(admin); +}); + +// -------- Resend invite -------- + +adminsRouter.post('/admins/:id/invite', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const admin = await db(TABLES.Admin).where({ id: req.params.id }).first(); + if (!admin) return res.status(404).json({ error: 'not_found' }); + if (admin.revokedAt) return res.status(409).json({ error: 'admin_revoked' }); + if (admin.activatedAt) return res.status(409).json({ error: 'already_activated' }); + await sendInvite(db, tenantId, admin, req.tenantSlug); + res.json({ ok: true }); +}); + +// -------- Revoke -------- + +class RevokeGuardError extends Error { + constructor(public code: string) { + super(code); + } +} + +adminsRouter.post('/admins/:id/revoke', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const body = revokeSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const caller = req.principal; + const now = new Date(); + + try { + // The request is already in a transaction (per-request via tenantMiddleware), + // so use it directly. No nested transaction needed — the FOR UPDATE locks + // are scoped to this transaction and released on commit. + // Lock the target admin + all active admins FOR UPDATE so + // concurrent revoke calls serialize. Without this, two simultaneous + // revokes of the only two active admins both pass the "count > 1" + // check and lock everyone out. + const admin = await db(TABLES.Admin).where({ id: req.params.id }).forUpdate().first(); + if (!admin) throw new RevokeGuardError('not_found'); + if (admin.revokedAt) throw new RevokeGuardError('already_revoked'); + + // Can't revoke yourself (session-sourced admins only — env-bearer + // callers are a headless / emergency path and can revoke anyone). + if (caller?.role === 'admin' && caller.source === 'session' && caller.adminId === admin.id) { + throw new RevokeGuardError('cannot_revoke_self'); + } + + // Last-active-admin guard inside the same tx. Count active admins + // under FOR UPDATE so the count is stable through commit. + const actives = await db(TABLES.Admin) + .whereNotNull('activatedAt') + .whereNull('revokedAt') + .forUpdate(); + const willRemove = admin.activatedAt && !admin.revokedAt; + if (willRemove && actives.length <= 1) { + throw new RevokeGuardError('cannot_revoke_last_active_admin'); + } + + await db(TABLES.Admin) + .where({ id: admin.id }) + .update({ revokedAt: now, revokeReason: body.data.reason ?? null, updatedAt: now }); + // Kill admin sessions. + await db(TABLES.Session) + .where({ principalKind: 'admin', principalId: admin.id }) + .whereNull('revokedAt') + .update({ revokedAt: now }); + } catch (err) { + if (err instanceof RevokeGuardError) { + const status = err.code === 'not_found' ? 404 : 409; + return res.status(status).json({ error: err.code }); + } + throw err; + } + + res.json({ ok: true, revokedAt: now }); +}); + +// -------- Reinstate -------- + +adminsRouter.post('/admins/:id/reinstate', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const admin = await db(TABLES.Admin).where({ id: req.params.id }).first(); + if (!admin) return res.status(404).json({ error: 'not_found' }); + if (!admin.revokedAt) return res.status(409).json({ error: 'not_revoked' }); + await db(TABLES.Admin) + .where({ id: admin.id }) + .update({ revokedAt: null, revokeReason: null, updatedAt: new Date() }); + res.json({ ok: true }); +}); diff --git a/apps/api/src/routes/api-keys.ts b/apps/api/src/routes/api-keys.ts index ef87cf1..e46a233 100644 --- a/apps/api/src/routes/api-keys.ts +++ b/apps/api/src/routes/api-keys.ts @@ -1,8 +1,8 @@ import { Router } from 'express'; import { z } from 'zod'; import { TABLES, type ApiKeyRow } from '@openpartner/db'; -import { db } from '../db.js'; import { createApiKeyRow, requireAdmin, requireAuth, requirePartnerOrAdmin } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; const createSchema = z.object({ label: z.string().optional() }); @@ -18,15 +18,19 @@ export const NETWORK_FEDERATION_SCOPES = [ 'partners:read', 'links:write', 'commissions:read', + // Network pushes federated clicks (creator-domain traffic) back to + // the vendor instance so brand-side analytics still see them. + 'clicks:write', ] as const; export const apiKeysRouter = Router(); // Admin: create admin key (ADMIN_API_KEY env is the first-class bootstrap; this is for rotation). apiKeysRouter.post('/api-keys', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = createSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - const key = await createApiKeyRow({ partnerId: null, label: body.data.label ?? undefined }); + const key = await createApiKeyRow(db, { tenantId, partnerId: null, label: body.data.label ?? undefined }); res.status(201).json({ id: key.id, plaintext: key.plaintext }); }); @@ -34,9 +38,10 @@ apiKeysRouter.post('/api-keys', requireAuth, requireAdmin, async (req, res) => { // integrations (OpenPartner Network federation is the canonical example) // so a leak of the stored credential can't escalate to full admin. apiKeysRouter.post('/api-keys/scoped', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = scopedCreateSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - const key = await createApiKeyRow({ scopes: body.data.scopes, label: body.data.label ?? 'scoped' }); + const key = await createApiKeyRow(db, { tenantId, scopes: body.data.scopes, label: body.data.label ?? 'scoped' }); res.status(201).json({ id: key.id, plaintext: key.plaintext, scopes: body.data.scopes }); }); @@ -46,12 +51,13 @@ apiKeysRouter.post( requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = createSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); const partnerId = req.params.id!; const partner = await db(TABLES.Partner).where({ id: partnerId }).first(); if (!partner) return res.status(404).json({ error: 'partner_not_found' }); - const key = await createApiKeyRow({ partnerId, label: body.data.label }); + const key = await createApiKeyRow(db, { tenantId, partnerId, label: body.data.label }); res.status(201).json({ id: key.id, plaintext: key.plaintext }); }, ); @@ -61,6 +67,7 @@ apiKeysRouter.get( requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const keys = await db(TABLES.ApiKey) .where({ partnerId: req.params.id }) .select('id', 'prefix', 'label', 'createdAt', 'lastUsedAt', 'revokedAt') @@ -71,6 +78,7 @@ apiKeysRouter.get( // Revoke. Admin can revoke any key; partner can only revoke their own. apiKeysRouter.delete('/api-keys/:keyId', requireAuth, async (req, res) => { + const { db } = tenantOf(req); const key = await db(TABLES.ApiKey).where({ id: req.params.keyId }).first(); if (!key) return res.status(404).json({ error: 'not_found' }); diff --git a/apps/api/src/routes/auth.ts b/apps/api/src/routes/auth.ts index e2f5e9f..acc0a85 100644 --- a/apps/api/src/routes/auth.ts +++ b/apps/api/src/routes/auth.ts @@ -1,7 +1,7 @@ import { Router } from 'express'; -import { TABLES, type NetworkCreatorRow, type NetworkVendorRow, type PartnerRow } from '@openpartner/db'; -import { db } from '../db.js'; +import { TABLES, type AdminRow, type PartnerRow } from '@openpartner/db'; import { requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; export const authRouter = Router(); @@ -9,10 +9,6 @@ export const authRouter = Router(); * Reports the calling key's permission set so upstream integrations (like * the OpenPartner Network) can verify the key they've been handed actually * has the scopes they need — and loudly warn if it's unrestricted. - * - * scoped key → { role: 'scoped', scopes: [...] } - * admin / env → { role: 'admin', unrestricted: true } - * partner / vendor → { role, restrictedTo: ... } */ authRouter.get('/auth/introspect', requireAuth, async (req, res) => { const p = req.principal!; @@ -25,22 +21,24 @@ authRouter.get('/auth/introspect', requireAuth, async (req, res) => { if (p.role === 'partner') { return res.json({ role: 'partner', restrictedTo: { partnerId: p.partnerId } }); } - if (p.role === 'network_vendor') { - return res.json({ role: 'network_vendor', restrictedTo: { networkVendorId: p.networkVendorId } }); - } - if (p.role === 'network_creator') { - return res.json({ role: 'network_creator', restrictedTo: { networkCreatorId: p.networkCreatorId } }); - } }); /** * Returns the caller's principal shape — used by the portal to decide what - * to render after login. Surfaces the Network role when the key belongs to - * a vendor or creator so the portal can route to /network views. + * to render after login. */ authRouter.get('/auth/whoami', requireAuth, async (req, res) => { + const { db } = tenantOf(req); const p = req.principal!; if (p.role === 'admin') { + if (p.source === 'session') { + const admin = await db(TABLES.Admin).where({ id: p.adminId }).first(); + return res.json({ + role: 'admin', + source: 'session', + admin: admin ? { id: admin.id, email: admin.email, name: admin.name } : null, + }); + } return res.json({ role: 'admin', source: p.source }); } if (p.role === 'partner') { @@ -53,39 +51,6 @@ authRouter.get('/auth/whoami', requireAuth, async (req, res) => { : null, }); } - if (p.role === 'network_vendor') { - const vendor = await db(TABLES.NetworkVendor).where({ id: p.networkVendorId }).first(); - return res.json({ - role: 'network_vendor', - networkVendorId: p.networkVendorId, - vendor: vendor - ? { - id: vendor.id, - name: vendor.name, - slug: vendor.slug, - logoUrl: vendor.logoUrl, - websiteUrl: vendor.websiteUrl, - status: vendor.status, - } - : null, - }); - } - if (p.role === 'network_creator') { - const creator = await db(TABLES.NetworkCreator).where({ id: p.networkCreatorId }).first(); - return res.json({ - role: 'network_creator', - networkCreatorId: p.networkCreatorId, - creator: creator - ? { - id: creator.id, - name: creator.name, - handle: creator.handle, - email: creator.email, - avatarUrl: creator.avatarUrl, - defaultPromoCode: creator.defaultPromoCode, - status: creator.status, - } - : null, - }); - } + // Scoped keys used by federation clients don't need a human-facing whoami. + res.json({ role: p.role }); }); diff --git a/apps/api/src/routes/billing.ts b/apps/api/src/routes/billing.ts index d660b18..117f483 100644 --- a/apps/api/src/routes/billing.ts +++ b/apps/api/src/routes/billing.ts @@ -1,43 +1,80 @@ /** - * Merchant billing endpoints — only meaningful in hosted modes. + * Per-tenant billing endpoints. * - * selfhost → /billing/status responds 'selfhost', everything else 404. - * flat → Stripe Checkout + Customer Portal for the merchant's - * monthly subscription. - * revshare → /billing/status surfaces accrued platform fees; collection is - * handled out-of-band against the 3% retained on payouts. + * Each tenant on a hosted multi-tenant deployment picks one of: + * + * flex $49/mo + 1.5% metered. Stripe subscription with two + * line items (base + usage), 14-day trial. + * revshare 3% metered, no monthly. Stripe subscription with the + * metered line item only, 14-day trial. + * enterprise Custom — no Stripe sub, billed out of band. + * + * Selfhost installs run as one tenant under the global OPENPARTNER_MODE + * env (selfhost / flat / revshare). The per-tenant resolver in + * billing-plan.ts unifies both paths so this route stays mode-agnostic. + * + * Stripe Customer + Subscription IDs live on the Tenant row + * (stripeCustomerId, stripeSubscriptionId). The legacy Config-keyed + * store (stripe.merchant.{customerId,subscriptionId}) is back-filled + * by the tenant_billing_plan migration; this file only reads/writes + * Tenant. */ import { Router } from 'express'; +import type { Knex } from 'knex'; import { z } from 'zod'; -import { TABLES } from '@openpartner/db'; -import { db } from '../db.js'; +import { TABLES, BILLING_PLANS, type BillingPlan, type TenantRow } from '@openpartner/db'; import { requireAdmin, requireAuth } from '../auth.js'; -import { REVSHARE_FEE_BPS, getMode, requireStripe } from '../stripe.js'; -import { CONFIG_KEYS, getConfig, setConfig } from '../config.js'; +import { REVSHARE_FEE_BPS, requireStripe } from '../stripe.js'; +import { getTenantBillingState, priceIdsForPlan, TRIAL_DAYS } from '../billing-plan.js'; +import { reportUsageToStripe } from '../usage-billing.js'; +import { tenantOf } from '../tenancy.js'; export const billingRouter = Router(); -billingRouter.get('/billing/status', requireAuth, requireAdmin, async (_req, res) => { - const mode = getMode(); +billingRouter.get('/billing/status', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const state = await getTenantBillingState(db, tenantId); + const mode = state.mode; + if (mode === 'selfhost') { - return res.json({ mode, billed: false }); + return res.json({ mode, plan: null, billed: false }); + } + + if (state.plan === 'enterprise') { + return res.json({ + mode, + plan: state.plan, + enterprise: true, + message: 'Billed out of band — contact your account manager.', + }); } if (mode === 'flat') { - const subscriptionId = await getConfig(CONFIG_KEYS.StripeMerchantSubscriptionId); - if (!subscriptionId) return res.json({ mode, subscribed: false }); + if (!state.stripeSubscriptionId) { + return res.json({ + mode, + plan: state.plan, + subscribed: false, + trialEndsAt: state.trialEndsAt, + inTrial: state.inTrial, + hasUsedTrial: state.hasUsedTrial, + trialExpiredWithoutSubscription: state.trialExpiredWithoutSubscription, + }); + } const stripe = requireStripe(); - const sub = await stripe.subscriptions.retrieve(subscriptionId); + const sub = await stripe.subscriptions.retrieve(state.stripeSubscriptionId); const periodEnd = (sub as unknown as { current_period_end?: number }).current_period_end ?? sub.items.data[0]?.current_period_end ?? null; return res.json({ mode, + plan: state.plan, subscribed: true, subscriptionId: sub.id, status: sub.status, currentPeriodEnd: periodEnd, + trialEnd: sub.trial_end, }); } @@ -50,58 +87,263 @@ billingRouter.get('/billing/status', requireAuth, requireAdmin, async (_req, res res.json({ mode, + plan: state.plan, feeRate: `${REVSHARE_FEE_BPS / 100}%`, accruedPlatformFees: fees.reduce>((acc, f) => { acc[f.currency] = Number(f.fee); return acc; }, {}), + subscribed: !!state.stripeSubscriptionId, + trialEndsAt: state.trialEndsAt, + inTrial: state.inTrial, }); }); +/** + * Set the tenant's plan in-place when none was picked at signup + * (legacy tenants + anyone who signed up before the marketing + * pricing CTAs). Refuses to overwrite an existing plan — those go + * through the Stripe Customer Portal so the subscription's price + * IDs change in lockstep with our local mirror. + */ +const setPlanSchema = z.object({ + plan: z.enum(BILLING_PLANS as readonly [string, ...string[]]), +}); +billingRouter.post('/billing/plan', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = setPlanSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const state = await getTenantBillingState(db, tenantId); + if (state.plan) { + return res.status(409).json({ + error: 'plan_already_set', + detail: 'Use the Stripe Customer Portal to switch plans on an active subscription.', + currentPlan: state.plan, + }); + } + + await db(TABLES.Tenant) + .where({ id: tenantId }) + .update({ billingPlan: body.data.plan as BillingPlan, updatedAt: new Date() }); + + res.json({ ok: true, plan: body.data.plan }); +}); + const checkoutSchema = z.object({ - priceId: z.string().min(1).optional(), successUrl: z.string().url(), cancelUrl: z.string().url(), customerEmail: z.string().email().optional(), }); billingRouter.post('/billing/checkout', requireAuth, requireAdmin, async (req, res) => { - if (getMode() !== 'flat') return res.status(400).json({ error: 'only_flat_mode' }); + const { db, tenantId } = tenantOf(req); + const state = await getTenantBillingState(db, tenantId); + + if (state.plan === 'enterprise') { + return res.status(400).json({ error: 'enterprise_plan_no_checkout', detail: 'Enterprise tenants are billed out of band.' }); + } + if (state.mode === 'selfhost') { + return res.status(400).json({ error: 'no_billing_in_selfhost' }); + } + if (state.stripeSubscriptionId) { + return res.status(409).json({ error: 'already_subscribed', detail: 'Use the Customer Portal to change plans.' }); + } + if (!state.plan) { + return res.status(400).json({ error: 'no_plan_chosen', detail: 'Pick a plan first via /admin/billing.' }); + } + const body = checkoutSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - const priceId = body.data.priceId ?? process.env.STRIPE_FLAT_PRICE_ID; - if (!priceId) return res.status(500).json({ error: 'no_flat_price_configured' }); + let lineItems: ReturnType; + try { + lineItems = priceIdsForPlan(state.plan); + } catch (err) { + return res.status(500).json({ error: 'stripe_price_not_configured', detail: err instanceof Error ? err.message : String(err) }); + } + if (!lineItems) { + return res.status(400).json({ error: 'enterprise_plan_no_checkout' }); + } const stripe = requireStripe(); + + // Reuse Stripe Customer if one exists from a prior aborted Checkout + // attempt; otherwise create + persist on Tenant.stripeCustomerId so + // the Customer Portal works on the same record after subscription + // completion. + let customerId = state.stripeCustomerId; + if (!customerId) { + const customer = await stripe.customers.create({ + email: body.data.customerEmail, + metadata: { + openpartner_role: 'merchant_self_subscription', + openpartner_tenant_id: tenantId, + }, + }); + customerId = customer.id; + await db(TABLES.Tenant) + .where({ id: tenantId }) + .update({ stripeCustomerId: customerId, updatedAt: new Date() }); + } + + // Trial: 14 days on first activation, no payment method required to + // start. Stripe emails the customer ~3 days before the trial ends + // asking for a card; if they don't provide one, the subscription + // cancels automatically (trial_settings.end_behavior). + // + // Re-subscription after a prior trial: skip the trial entirely and + // require a card up front. firstTrialActivatedAt is set the first + // time a checkout-with-trial completes; once it's stamped, no second + // trial. + const includeTrial = !state.hasUsedTrial; const session = await stripe.checkout.sessions.create({ mode: 'subscription', - line_items: [{ price: priceId, quantity: 1 }], + customer: customerId, + line_items: lineItems, + subscription_data: includeTrial + ? { + trial_period_days: TRIAL_DAYS, + trial_settings: { + end_behavior: { missing_payment_method: 'cancel' }, + }, + } + : undefined, + payment_method_collection: includeTrial ? 'if_required' : 'always', success_url: body.data.successUrl, cancel_url: body.data.cancelUrl, - customer_email: body.data.customerEmail, + metadata: { + openpartner_tenant_id: tenantId, + openpartner_plan: state.plan, + openpartner_trial: includeTrial ? '1' : '0', + }, }); - res.json({ url: session.url }); + res.json({ url: session.url, trial: includeTrial }); +}); + +billingRouter.post('/billing/report-usage', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const state = await getTenantBillingState(db, tenantId); + if (state.mode === 'selfhost') { + return res.status(400).json({ error: 'no_billing_in_selfhost' }); + } + try { + const result = await reportUsageToStripe(db, tenantId); + res.json(result); + } catch (err) { + res.status(500).json({ + error: 'usage_report_failed', + detail: err instanceof Error ? err.message : String(err), + }); + } }); billingRouter.post('/billing/portal', requireAuth, requireAdmin, async (req, res) => { - if (getMode() !== 'flat') return res.status(400).json({ error: 'only_flat_mode' }); + const { db, tenantId } = tenantOf(req); + const state = await getTenantBillingState(db, tenantId); + // Both flat and revshare have a Stripe Customer + subscription, both + // benefit from the Portal (payment method updates, invoice history, + // plan switching). Selfhost + enterprise: nothing to manage here. + if (state.mode === 'selfhost' || state.plan === 'enterprise') { + return res.status(400).json({ error: 'no_portal_for_this_plan' }); + } const body = z.object({ returnUrl: z.string().url() }).safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - const customerId = await getConfig(CONFIG_KEYS.StripeMerchantCustomerId); - if (!customerId) return res.status(404).json({ error: 'no_customer_on_file' }); + if (!state.stripeCustomerId) return res.status(404).json({ error: 'no_customer_on_file' }); const stripe = requireStripe(); const session = await stripe.billingPortal.sessions.create({ - customer: customerId, + customer: state.stripeCustomerId, return_url: body.data.returnUrl, }); res.json({ url: session.url }); }); -// Exposed for the stripe webhook to call on checkout.session.completed. -export async function persistMerchantSubscription(customerId: string, subscriptionId: string): Promise { - await setConfig(CONFIG_KEYS.StripeMerchantCustomerId, customerId); - await setConfig(CONFIG_KEYS.StripeMerchantSubscriptionId, subscriptionId); +/** + * Recent invoices for the tenant. Used by the Admin → Billing page. + * Returns a thin slice — full detail lives in the Stripe Customer Portal. + */ +billingRouter.get('/billing/invoices', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const state = await getTenantBillingState(db, tenantId); + if (!state.stripeCustomerId) return res.json({ invoices: [] }); + + const stripe = requireStripe(); + const list = await stripe.invoices.list({ customer: state.stripeCustomerId, limit: 12 }); + res.json({ + invoices: list.data.map((inv) => ({ + id: inv.id, + number: inv.number, + status: inv.status, + amountDue: inv.amount_due, + amountPaid: inv.amount_paid, + currency: inv.currency, + created: inv.created, + periodStart: inv.period_start, + periodEnd: inv.period_end, + hostedInvoiceUrl: inv.hosted_invoice_url, + invoicePdf: inv.invoice_pdf, + })), + }); +}); + +/** + * Webhook helper — stamps Stripe state on the Tenant row. Used from + * checkout.session.completed (subscribe), customer.subscription.updated + * (plan switch / trial conversion), and customer.subscription.deleted + * (cancel). + * + * Patch semantics: undefined = "leave field alone", null = "explicitly + * clear" (used on subscription.deleted to drop stripeSubscriptionId). + */ +export interface MerchantSubscriptionPatch { + stripeCustomerId?: string | null; + stripeSubscriptionId?: string | null; + trialEndsAt?: Date | null; + /** Stamp the trial-used marker on this update. Webhook sets it to + * `new Date()` only on the first checkout-with-trial completion; + * never cleared afterward. */ + firstTrialActivatedAt?: Date | null; +} + +export async function persistMerchantSubscription( + db: Knex, + tenantId: string, + patch: MerchantSubscriptionPatch, +): Promise { + const dbPatch: Partial = { updatedAt: new Date() }; + if (patch.stripeCustomerId !== undefined) dbPatch.stripeCustomerId = patch.stripeCustomerId; + if (patch.stripeSubscriptionId !== undefined) dbPatch.stripeSubscriptionId = patch.stripeSubscriptionId; + if (patch.trialEndsAt !== undefined) dbPatch.trialEndsAt = patch.trialEndsAt; + if (patch.firstTrialActivatedAt !== undefined) dbPatch.firstTrialActivatedAt = patch.firstTrialActivatedAt; + await db(TABLES.Tenant).where({ id: tenantId }).update(dbPatch); +} + +/** Update a tenant's plan after a Stripe Customer Portal plan switch. + * Called from the customer.subscription.updated webhook handler when + * the subscription's price IDs change. */ +export async function updateTenantPlanFromStripeSub( + db: Knex, + tenantId: string, + newPlan: 'flex' | 'revshare', +): Promise { + await db(TABLES.Tenant) + .where({ id: tenantId }) + .update({ billingPlan: newPlan as TenantRow['billingPlan'], updatedAt: new Date() }); +} + +/** Detect the openpartner plan from a Stripe subscription's line-item + * price IDs. Returns null when the IDs don't match known prices — + * caller treats that as "don't reclassify the tenant". */ +export function inferPlanFromPriceIds(priceIds: string[]): 'flex' | 'revshare' | null { + const flatBase = process.env.STRIPE_FLAT_PRICE_ID; + const flatUsage = process.env.STRIPE_FLAT_USAGE_PRICE_ID; + const rev = process.env.STRIPE_REVSHARE_USAGE_PRICE_ID; + for (const id of priceIds) { + if (flatBase && id === flatBase) return 'flex'; + if (flatUsage && id === flatUsage) return 'flex'; + if (rev && id === rev) return 'revshare'; + } + return null; } diff --git a/apps/api/src/routes/campaigns.ts b/apps/api/src/routes/campaigns.ts index b3bb914..3ce5605 100644 --- a/apps/api/src/routes/campaigns.ts +++ b/apps/api/src/routes/campaigns.ts @@ -2,8 +2,9 @@ import { Router } from 'express'; import { z } from 'zod'; import { ulid } from 'ulid'; import { TABLES, type CampaignRow } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { campaignAcceptsNewActivity } from '../campaign-lifecycle.js'; const commissionRuleSchema = z.discriminatedUnion('type', [ z.object({ type: z.literal('percent'), value: z.number().positive(), recurring: z.boolean().optional() }), @@ -20,16 +21,97 @@ const createSchema = z.object({ commissionRule: commissionRuleSchema, attributionWindowDays: z.number().int().min(1).max(365).optional(), attributionModel: z.enum(['last_click', 'first_click', 'linear', 'position']).optional(), + destinationUrl: z.string().url(), + /** Comma-separated host allowlist for partner deep-linking. Null/omitted + * means partners can't override the destination. */ + deepLinkAllowedDomains: z.string().max(1000).optional(), + startsAt: z.string().datetime().nullable().optional(), + endsAt: z.string().datetime().nullable().optional(), + /** When true, after creating the campaign, also grant every existing + * non-revoked partner access to it (source='admin'). Defaults to + * false so VIP / scoped campaigns stay private unless the brand opts + * in. New partners invited later still need to be granted explicitly + * at invite time — this only covers the existing roster. */ + grantToAllPartners: z.boolean().optional(), +}); + +const updateSchema = z.object({ + name: z.string().min(1).optional(), + commissionRule: commissionRuleSchema.optional(), + attributionWindowDays: z.number().int().min(1).max(365).optional(), + attributionModel: z.enum(['last_click', 'first_click', 'linear', 'position']).optional(), + destinationUrl: z.string().url().optional(), + deepLinkAllowedDomains: z.string().max(1000).nullable().optional(), + startsAt: z.string().datetime().nullable().optional(), + endsAt: z.string().datetime().nullable().optional(), }); export const campaignsRouter = Router(); -campaignsRouter.get('/campaigns', requireAuth, requireAdmin, async (_req, res) => { +campaignsRouter.get('/campaigns', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const campaigns = await db(TABLES.Campaign).orderBy('createdAt', 'desc'); res.json({ campaigns }); }); +/** + * Partner-facing campaign list — only the Programs the calling partner + * was granted access to (via admin assignment or Network-offering + * approval). Admins see all campaigns in their tenant. + * + * Fields are limited to what a partner needs to create a Link + * (id, name, destinationUrl, deepLinkAllowedDomains, source). + * Commission rules + attribution settings are admin-only and stay out + * of the response. + */ +campaignsRouter.get('/me/campaigns', requireAuth, async (req, res) => { + const p = req.principal; + if (!p) return res.status(401).json({ error: 'unauthorized' }); + if (p.role !== 'partner' && p.role !== 'admin') { + return res.status(403).json({ error: 'forbidden' }); + } + const { db } = tenantOf(req); + + if (p.role === 'admin') { + const campaigns = (await db(TABLES.Campaign) + .select('id', 'name', 'destinationUrl', 'deepLinkAllowedDomains', 'startsAt', 'endsAt') + .orderBy('createdAt', 'desc')) as Array< + Pick + >; + return res.json({ campaigns }); + } + + // Partner: filter through PartnerCampaign join. Hide scheduled (not + // yet started) and ended campaigns — partners shouldn't be picking + // those when they create a Link. Admins see them all so they can + // edit dates. + const rows = (await db(TABLES.Campaign) + .join(TABLES.PartnerCampaign, `${TABLES.PartnerCampaign}.campaignId`, `${TABLES.Campaign}.id`) + .where(`${TABLES.PartnerCampaign}.partnerId`, p.partnerId!) + .select( + `${TABLES.Campaign}.id`, + `${TABLES.Campaign}.name`, + `${TABLES.Campaign}.destinationUrl`, + `${TABLES.Campaign}.deepLinkAllowedDomains`, + `${TABLES.Campaign}.startsAt as startsAt`, + `${TABLES.Campaign}.endsAt as endsAt`, + `${TABLES.PartnerCampaign}.source as source`, + ) + .orderBy(`${TABLES.Campaign}.createdAt`, 'desc')) as Array<{ + id: string; + name: string; + destinationUrl: string; + deepLinkAllowedDomains: string | null; + startsAt: Date | null; + endsAt: Date | null; + source: 'admin' | 'offering'; + }>; + const campaigns = rows.filter((r) => campaignAcceptsNewActivity(r)); + res.json({ campaigns }); +}); + campaignsRouter.post('/campaigns', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = createSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); @@ -37,12 +119,64 @@ campaignsRouter.post('/campaigns', requireAuth, requireAdmin, async (req, res) = const [campaign] = await db(TABLES.Campaign) .insert({ id, + tenantId, name: body.data.name, commissionRule: body.data.commissionRule, attributionWindowDays: body.data.attributionWindowDays ?? 60, attributionModel: body.data.attributionModel ?? 'last_click', + destinationUrl: body.data.destinationUrl, + deepLinkAllowedDomains: body.data.deepLinkAllowedDomains ?? null, + startsAt: body.data.startsAt ? new Date(body.data.startsAt) : null, + endsAt: body.data.endsAt ? new Date(body.data.endsAt) : null, }) .returning('*'); + // Optional bulk-grant to existing non-revoked partners. Mirrors the + // invite-time snapshot semantics (revokedAt is the only filter — we + // include not-yet-activated invitees so a freshly-sent invite still + // gets the new program). + if (body.data.grantToAllPartners) { + const partners = (await db(TABLES.Partner) + .whereNull('revokedAt') + .select('id')) as Array<{ id: string }>; + if (partners.length > 0) { + await db(TABLES.PartnerCampaign) + .insert( + partners.map((p) => ({ + id: `pc_${ulid()}`, + tenantId, + partnerId: p.id, + campaignId: id, + source: 'admin', + })), + ) + .onConflict(['tenantId', 'partnerId', 'campaignId']) + .ignore(); + } + } + res.status(201).json(campaign); }); + +campaignsRouter.patch('/campaigns/:id', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const body = updateSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const existing = await db(TABLES.Campaign).where({ id: req.params.id }).first(); + if (!existing) return res.status(404).json({ error: 'not_found' }); + + const patch: Partial = {}; + if (body.data.name !== undefined) patch.name = body.data.name; + if (body.data.commissionRule !== undefined) patch.commissionRule = body.data.commissionRule; + if (body.data.attributionWindowDays !== undefined) patch.attributionWindowDays = body.data.attributionWindowDays; + if (body.data.attributionModel !== undefined) patch.attributionModel = body.data.attributionModel; + if (body.data.destinationUrl !== undefined) patch.destinationUrl = body.data.destinationUrl; + if (body.data.deepLinkAllowedDomains !== undefined) patch.deepLinkAllowedDomains = body.data.deepLinkAllowedDomains; + if (body.data.startsAt !== undefined) patch.startsAt = body.data.startsAt ? new Date(body.data.startsAt) : null; + if (body.data.endsAt !== undefined) patch.endsAt = body.data.endsAt ? new Date(body.data.endsAt) : null; + + await db(TABLES.Campaign).where({ id: req.params.id }).update(patch); + const updated = await db(TABLES.Campaign).where({ id: req.params.id }).first(); + res.json(updated); +}); diff --git a/apps/api/src/routes/clicks.ts b/apps/api/src/routes/clicks.ts new file mode 100644 index 0000000..6b11349 --- /dev/null +++ b/apps/api/src/routes/clicks.ts @@ -0,0 +1,78 @@ +/** + * Click ingestion endpoint for federated clicks. + * + * The OpenPartner Network records clicks on its own DB when traffic + * arrives via a creator's custom share domain (efficient.link/r/), + * then federates each click to the brand's instance so brand-side + * analytics + attribution still see it. This is the receiving side. + * + * Authoritative click record lives here on the vendor's instance — + * Network's NetworkClick is a parallel copy for creator-side analytics. + * The same clickId (ULID) is used in both places so attribution stitches + * across systems via the cref cookie set by the Network router. + * + * Idempotent on (tenantId, id) — replaying the same outbox push (e.g., + * after a transient 5xx) won't double-insert. Network never invents a + * new id on retry, only on first record. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { TABLES, type ClickRow, type LinkRow } from '@openpartner/db'; +import { grantScope, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; + +const ingestSchema = z.object({ + /** ULID minted by the federating caller (Network). Replays carry the + * same id so we can dedupe via the unique primary key. */ + id: z.string().min(20).max(40), + /** Resolves to a Link in this tenant. We derive partnerId + campaignId + * from the Link so the caller can't forge attribution. */ + linkKey: z.string().min(3).max(64), + landingUrl: z.string().url().max(2048), + ipHash: z.string().max(64).nullable().optional(), + userAgent: z.string().max(500).nullable().optional(), + referer: z.string().max(500).nullable().optional(), + fraudFlag: z.enum(['velocity', 'manual', 'revoked']).nullable().optional(), + /** Click timestamp from the federating caller — preserves the actual + * click time even if the federation push is delayed. ISO 8601. */ + ts: z.string().datetime(), +}); + +export const clicksRouter = Router(); + +clicksRouter.post('/clicks', requireAuth, grantScope('clicks:write'), async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = ingestSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + // Look up the Link by linkKey within this tenant. Don't trust the + // caller's claimed partnerId/campaignId — derive from the Link row. + const link = await db(TABLES.Link).where({ linkKey: body.data.linkKey }).first(); + if (!link) return res.status(404).json({ error: 'link_not_found' }); + + try { + await db(TABLES.Click).insert({ + id: body.data.id, + tenantId, + linkId: link.id, + partnerId: link.partnerId, + campaignId: link.campaignId, + landingUrl: body.data.landingUrl, + ipHash: body.data.ipHash ?? null, + userAgent: body.data.userAgent ?? null, + referer: body.data.referer ?? null, + fraudFlag: body.data.fraudFlag ?? null, + ts: new Date(body.data.ts), + }); + return res.status(201).json({ ok: true, id: body.data.id }); + } catch (err) { + // Replay protection: same id already inserted → 200 idempotent ok. + // Network's outbox retries on transient failure; the second push + // shouldn't error just because the first eventually wrote. + if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { + return res.status(200).json({ ok: true, id: body.data.id, replayed: true }); + } + throw err; + } +}); diff --git a/apps/api/src/routes/commissions.ts b/apps/api/src/routes/commissions.ts index ca59152..994dd23 100644 --- a/apps/api/src/routes/commissions.ts +++ b/apps/api/src/routes/commissions.ts @@ -13,9 +13,9 @@ import { Router } from 'express'; import { z } from 'zod'; import { TABLES, type CommissionRow } from '@openpartner/db'; -import { db } from '../db.js'; import { grantScope, requireAdmin, requireAuth, requirePartnerOrAdmin } from '../auth.js'; import { dispatchEvent } from '../webhook-dispatcher.js'; +import { tenantOf } from '../tenancy.js'; const listQuerySchema = z.object({ status: z.enum(['accrued', 'approved', 'paid', 'reversed']).optional(), @@ -30,6 +30,7 @@ commissionsRouter.get( grantScope('commissions:read'), requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const q = listQuerySchema.safeParse(req.query); if (!q.success) return res.status(400).json({ error: 'invalid_query', detail: q.error.flatten() }); @@ -45,6 +46,7 @@ commissionsRouter.get( ); commissionsRouter.get('/commissions', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const q = listQuerySchema.safeParse(req.query); if (!q.success) return res.status(400).json({ error: 'invalid_query', detail: q.error.flatten() }); @@ -56,6 +58,7 @@ commissionsRouter.get('/commissions', requireAuth, requireAdmin, async (req, res }); commissionsRouter.post('/commissions/:id/approve', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const updated = await db(TABLES.Commission) .where({ id: req.params.id, status: 'accrued' }) .update({ status: 'approved' }) @@ -64,7 +67,7 @@ commissionsRouter.post('/commissions/:id/approve', requireAuth, requireAdmin, as return res.status(409).json({ error: 'not_approvable', detail: 'must be in accrued state' }); } const c = updated[0]!; - dispatchEvent('commission.approved', { + dispatchEvent(tenantId, 'commission.approved', { commissionId: c.id, partnerId: c.partnerId, amount: c.amount, @@ -75,6 +78,7 @@ commissionsRouter.post('/commissions/:id/approve', requireAuth, requireAdmin, as }); commissionsRouter.post('/commissions/:id/reverse', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const updated = await db(TABLES.Commission) .where({ id: req.params.id }) .whereIn('status', ['accrued', 'approved']) @@ -84,7 +88,7 @@ commissionsRouter.post('/commissions/:id/reverse', requireAuth, requireAdmin, as return res.status(409).json({ error: 'not_reversible', detail: 'only accrued or approved commissions' }); } const c = updated[0]!; - dispatchEvent('commission.reversed', { + dispatchEvent(tenantId, 'commission.reversed', { commissionId: c.id, partnerId: c.partnerId, amount: c.amount, diff --git a/apps/api/src/routes/connect.ts b/apps/api/src/routes/connect.ts index 4901698..e86d4ed 100644 --- a/apps/api/src/routes/connect.ts +++ b/apps/api/src/routes/connect.ts @@ -8,14 +8,17 @@ * * Active/ready status is confirmed via the `account.updated` webhook, not at * the callback — the callback fires before Stripe has finalized verification. + * + * Multi-tenant: Connect accounts are stamped with openpartner_tenant_id so + * the webhook handler can resolve the tenant on inbound account.* events. */ import { Router } from 'express'; import { z } from 'zod'; import { TABLES, type PartnerRow } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAuth, requirePartnerOrAdmin } from '../auth.js'; import { requireStripe } from '../stripe.js'; +import { tenantOf } from '../tenancy.js'; const startSchema = z.object({ returnUrl: z.string().url(), @@ -29,6 +32,7 @@ connectRouter.post( requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = startSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); @@ -41,7 +45,10 @@ connectRouter.post( const account = await stripe.accounts.create({ type: 'standard', email: partner.email, - metadata: { openpartner_partner_id: partner.id }, + metadata: { + openpartner_partner_id: partner.id, + openpartner_tenant_id: tenantId, + }, }); accountId = account.id; await db(TABLES.Partner) @@ -65,6 +72,7 @@ connectRouter.get( requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); if (!partner) return res.status(404).json({ error: 'partner_not_found' }); diff --git a/apps/api/src/routes/coupons.ts b/apps/api/src/routes/coupons.ts new file mode 100644 index 0000000..ddf0127 --- /dev/null +++ b/apps/api/src/routes/coupons.ts @@ -0,0 +1,382 @@ +/** + * Coupon-code attribution. + * + * GET /partners/:id/coupons list a partner's coupons + * POST /partners/:id/coupons mint a coupon (admin only) + * body: { campaignId, code? } + * code defaults to + * POST /coupons/redeem brand-side conversion path + * body: { code, eventType, value?, + * currency?, externalEventId, + * userId, ts? } + * writes Click + Identity + Event so + * the existing attribution engine + * processes the redemption identically + * to a clicked share-link conversion. + * + * Scope: one Coupon per (partner, campaign). The auto-mint on + * PartnerCampaign insert is wired in routes/partners.ts + the + * partner-campaigns add path so coupons appear without admin action. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { ulid } from 'ulid'; +import { randomBytes } from 'node:crypto'; +import { + TABLES, + type CampaignRow, + type ClickRow, + type CouponRow, + type EventRow, + type IdentityRow, + type PartnerRow, +} from '@openpartner/db'; +import { grantScope, requireAdmin, requireAuth, requirePartnerOrAdmin } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { attributeEvent } from '../attribution.js'; + +export const couponsRouter = Router(); + +// ---------- List + create per partner ---------- + +couponsRouter.get('/partners/:id/coupons', requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); + const rows = await db(TABLES.Coupon) + .where({ partnerId: req.params.id }) + .orderBy('createdAt', 'asc'); + if (rows.length === 0) return res.json({ coupons: [] }); + + // 90-day redemption stats per coupon. Coupon-driven Clicks are + // identifiable by landingUrl='coupon://'. Two batched queries + // (one for click counts, one for revenue) keep this O(1) regardless + // of how many coupons the partner has. + const since = new Date(Date.now() - 90 * 24 * 60 * 60 * 1000); + const landingUrls = rows.map((r) => `coupon://${r.code}`); + + const clickCounts = (await db(TABLES.Click) + .where({ partnerId: req.params.id }) + .whereIn('landingUrl', landingUrls) + .andWhere('ts', '>=', since) + .groupBy('landingUrl') + .select('landingUrl') + .count<{ landingUrl: string; count: string }[]>({ count: '*' })) as Array<{ landingUrl: string; count: string }>; + + const revenue = (await db(TABLES.Click) + .join(TABLES.Attribution, `${TABLES.Attribution}.clickId`, `${TABLES.Click}.id`) + .join(TABLES.Event, `${TABLES.Event}.id`, `${TABLES.Attribution}.eventId`) + .where(`${TABLES.Click}.partnerId`, req.params.id) + .whereIn(`${TABLES.Click}.landingUrl`, landingUrls) + .andWhere(`${TABLES.Attribution}.computedAt`, '>=', since) + .groupBy(`${TABLES.Click}.landingUrl`) + .select(`${TABLES.Click}.landingUrl as landingUrl`) + .select(db.raw(`COALESCE(SUM("Event".value * "Attribution".weight), 0) as revenue`))) as Array<{ landingUrl: string; revenue: string }>; + + const countByUrl = new Map(clickCounts.map((r) => [r.landingUrl, Number(r.count)])); + const revenueByUrl = new Map(revenue.map((r) => [r.landingUrl, Number(r.revenue)])); + + res.json({ + coupons: rows.map((r) => { + const url = `coupon://${r.code}`; + return { + ...r, + redemptions90d: countByUrl.get(url) ?? 0, + revenue90d: revenueByUrl.get(url) ?? 0, + }; + }), + }); +}); + +const createSchema = z.object({ + campaignId: z.string().min(1), + code: z + .string() + .trim() + .min(3) + .max(40) + .regex(/^[A-Z0-9-]+$/, 'code must be uppercase alphanumeric (with optional hyphens)') + .optional(), +}); + +/** Free-trial threshold: a brand can mint up to this many coupons + * manually before we require integration verification. Auto-mint on + * PartnerCampaign grant doesn't trip the gate (system action, not + * admin intent). */ +const COUPON_VERIFICATION_THRESHOLD = 5; + +couponsRouter.post('/partners/:id/coupons', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = createSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + // Verification gate: past N coupons without a successful redemption + // ever recorded, refuse new manual mints. Forces the brand to + // actually wire up /coupons/redeem (or the Stripe webhook auto- + // redemption path) before they hand more codes to creators that + // won't attribute. + const tenant = (await db('Tenant').where({ id: tenantId }).first(['couponIntegrationVerifiedAt'])) as + | { couponIntegrationVerifiedAt: Date | null } + | undefined; + if (!tenant?.couponIntegrationVerifiedAt) { + const countRow = (await db(TABLES.Coupon).count<{ count: string }[]>({ count: '*' })) as Array<{ count: string }>; + const existing = Number(countRow[0]?.count ?? 0); + if (existing >= COUPON_VERIFICATION_THRESHOLD) { + return res.status(409).json({ + error: 'verification_required', + detail: `You have ${existing} coupons but no successful redemption yet. Wire up POST /coupons/redeem on your checkout — or set up a Stripe webhook pointing at /webhooks/stripe — and process one test redemption to unlock further mints. This protects creators from sharing codes that don't actually attribute.`, + threshold: COUPON_VERIFICATION_THRESHOLD, + existing, + }); + } + } + + const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); + if (!partner) return res.status(404).json({ error: 'partner_not_found' }); + + const campaign = await db(TABLES.Campaign).where({ id: body.data.campaignId }).first(); + if (!campaign) return res.status(404).json({ error: 'campaign_not_found' }); + + const code = body.data.code ?? defaultCode(partner.email); + try { + const id = `cpn_${ulid()}`; + await db(TABLES.Coupon).insert({ + id, + tenantId, + partnerId: partner.id, + campaignId: campaign.id, + code, + }); + const row = await db(TABLES.Coupon).where({ id }).first(); + return res.status(201).json(row); + } catch (err) { + if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { + return res.status(409).json({ error: 'code_taken_or_partner_already_has_one_for_this_campaign' }); + } + throw err; + } +}); + +// ---------- Redeem ---------- +// +// Brand calls this from checkout when a customer enters a coupon code. +// We resolve the code → (partnerId, campaignId), then synthesize the +// click → identity → event chain so the existing attribution engine +// processes the redemption identically to a real clicked conversion. +// +// The synthetic Click has landingUrl=coupon://, ipHash=null, +// userAgent='OpenPartner-CouponRedeem/1' so coupon-driven attribution +// is identifiable in the Click table for analytics + audit. +// +// Idempotent on Event.externalEventId — re-sending the same redemption +// (e.g. from a webhook retry) doesn't double-attribute. + +const redeemSchema = z.object({ + code: z.string().trim().min(3).max(40), + /** What kind of event the redemption represents. Same enum the + * existing /events route accepts — signup, trial_started, + * subscription_created, invoice_paid, etc. */ + eventType: z.string().min(1).max(80), + value: z.number().nonnegative().optional(), + currency: z.string().length(3).optional(), + /** Required so we can dedup retries. Use the brand's checkout-session + * ID, order ID, or whatever's stable. */ + externalEventId: z.string().min(1).max(120), + /** The brand's internal user ID. Identity row links userId ↔ + * synthetic clickId so subsequent events from the same user + * attribute via the standard path. */ + userId: z.string().min(1).max(120), + ts: z.string().datetime().optional(), +}); + +couponsRouter.post('/coupons/redeem', requireAuth, grantScope('events:write'), async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = redeemSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + // Case-insensitive lookup so customers entering keithf15a2x or + // KEITHF15A2X both resolve. Storage stays whatever the partner set. + const coupon = await db(TABLES.Coupon) + .whereRaw('lower("code") = ?', [body.data.code.toLowerCase()]) + .first(); + if (!coupon) return res.status(404).json({ error: 'coupon_not_found' }); + + // Idempotency: same externalEventId already processed → 200 + replayed flag. + const existingEvent = await db(TABLES.Event) + .where({ externalEventId: body.data.externalEventId, type: body.data.eventType }) + .first(); + if (existingEvent) { + return res.status(200).json({ ok: true, replayed: true, eventId: existingEvent.id }); + } + + const ts = body.data.ts ? new Date(body.data.ts) : new Date(); + + // Synthesize Click + Identity + Event in one trx so the attribution + // engine sees a consistent picture. + const result = await db.transaction(async (trx) => { + await ensureCouponClickAndIdentity(trx, tenantId, coupon, body.data.userId, ts); + + const eventId = ulid(); + const [eventRow] = (await trx(TABLES.Event) + .insert({ + id: eventId, + tenantId, + userId: body.data.userId, + type: body.data.eventType, + value: body.data.value != null ? String(body.data.value) : null, + currency: body.data.currency ?? null, + externalEventId: body.data.externalEventId, + metadata: { source: 'coupon', code: coupon.code }, + ts, + }) + .returning('*')) as EventRow[]; + return eventRow!; + }); + + // Attribution + commission run outside the trx — same pattern as the + // /events route. If they fail, the event still exists and a backlog + // run can catch it. + await attributeEvent(db, result); + + res.status(201).json({ ok: true, eventId: result.id, partnerId: coupon.partnerId }); +}); + +/** + * Lookup a Coupon by code (case-insensitive) within the current tenant. + * Used by both the /coupons/redeem route and the Stripe webhook auto- + * redemption path. Returns null when the code doesn't match anything in + * this tenant — letting the caller no-op rather than 404. + */ +export async function findCouponByCode( + db: import('knex').Knex, + code: string, +): Promise { + const row = await db(TABLES.Coupon) + .whereRaw('lower("code") = ?', [code.toLowerCase()]) + .first(); + return row ?? null; +} + +/** + * Ensure a synthetic coupon Click + Identity exists for the given + * (coupon, userId). If the user already has an Identity for any + * Click on this coupon's partner, no-op (the user is already + * attributed). Otherwise insert a fresh Click + Identity so the + * attribution engine credits this partner on the next Event for + * this user. + * + * The check is "any click for this partner" not "this exact coupon" + * — re-redemptions of the same code by the same user shouldn't add + * a new touchpoint, but a click on partner A's link followed by a + * coupon for partner B should add a second touch (multi-touch). + */ +export async function ensureCouponClickAndIdentity( + trx: import('knex').Knex, + tenantId: string, + coupon: CouponRow, + userId: string, + ts: Date, +): Promise<{ clickId: string; reused: boolean }> { + // Has the user already been linked (via any Click) to this partner? + // If so, no-op — they're already attributed and adding another + // synthetic click would shift multi-touch weights unfairly. + const existing = await trx(TABLES.Identity) + .join(TABLES.Click, `${TABLES.Click}.id`, `${TABLES.Identity}.clickId`) + .where(`${TABLES.Identity}.userId`, userId) + .andWhere(`${TABLES.Click}.partnerId`, coupon.partnerId) + .first(`${TABLES.Identity}.clickId as clickId`) as { clickId: string } | undefined; + if (existing) return { clickId: existing.clickId, reused: true }; + + const clickId = ulid(); + await trx(TABLES.Click).insert({ + id: clickId, + tenantId, + linkId: null, // no Link — coupon path doesn't have one + partnerId: coupon.partnerId, + campaignId: coupon.campaignId, + landingUrl: `coupon://${coupon.code}`, + ipHash: null, + userAgent: 'OpenPartner-CouponRedeem/1', + referer: null, + fraudFlag: null, + ts, + }); + await trx(TABLES.Identity) + .insert({ id: ulid(), tenantId, clickId, userId }) + .onConflict(['clickId', 'userId']) + .ignore(); + + // Stamp the tenant's "coupon integration is verified" flag on the + // FIRST successful redemption ever. COALESCE preserves the original + // verification date on subsequent redemptions. Cross-tenant write, + // which means the privileged db (not RLS-scoped trx) — but we + // already know the tenantId came from a verified Coupon lookup. + await trx('Tenant') + .where({ id: tenantId }) + .update({ + couponIntegrationVerifiedAt: trx.raw('COALESCE("couponIntegrationVerifiedAt", NOW())'), + }); + + return { clickId, reused: false }; +} + +// ---------- Helpers ---------- + +function defaultCode(email: string): string { + // + 4 random chars. + // Example: ada.lovelace+test@example.com → ADALOVELACE + 4F2A + const local = email.split('@')[0] ?? 'partner'; + const slug = local.replace(/[^A-Za-z0-9]/g, '').toUpperCase().slice(0, 12) || 'PARTNER'; + const rand = randomBytes(2).toString('hex').toUpperCase(); + return `${slug}${rand}`; +} + +/** + * Best-effort coupon mint after a PartnerCampaign grant. Used by: + * - POST /partners (auto-grants all current campaigns by default) + * - PUT /partners/:id/campaigns/:campaignId (admin grant) + * - Federation Network → /partners with campaignIds (offering approval) + * + * Idempotent: skips silently if a coupon already exists for the + * (partnerId, campaignId) pair (admin may have minted one already). + * On code collision (rare, since email-derived suffixes are usually + * unique), retries once with a fresh suffix. + */ +export async function autoMintCouponsForGrants( + db: import('knex').Knex, + tenantId: string, + partner: { id: string; email: string }, + campaignIds: string[], +): Promise { + if (campaignIds.length === 0) return; + for (const campaignId of campaignIds) { + let attempts = 0; + while (attempts < 2) { + attempts += 1; + const code = defaultCode(partner.email); + try { + await db(TABLES.Coupon) + .insert({ + id: `cpn_${ulid()}`, + tenantId, + partnerId: partner.id, + campaignId, + code, + }) + .onConflict(['tenantId', 'partnerId', 'campaignId']) + .ignore(); // already exists for this (partner, campaign) — admin minted manually + break; + } catch (err) { + // Code collision (the (tenantId, code) unique constraint + // fired). Retry once with a fresh random suffix; defaultCode + // re-rolls 16 bits each call. If it still collides, drop on + // the floor — better to skip auto-mint than to fail the + // partner-grant operation. + if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505' && attempts < 2) { + continue; + } + console.error('[coupons] auto-mint failed', { partnerId: partner.id, campaignId, err }); + break; + } + } + } +} diff --git a/apps/api/src/routes/creator-portal.ts b/apps/api/src/routes/creator-portal.ts new file mode 100644 index 0000000..b1f8361 --- /dev/null +++ b/apps/api/src/routes/creator-portal.ts @@ -0,0 +1,197 @@ +/** + * Platform-level Creator portal proxy. + * + * The multi-tenant deployment at app.openpartner.dev needs a creator + * surface that lives outside any single vendor tenant. Creators sign up + * here, get a Network-issued session, browse offerings across the + * federation, and apply. When a vendor approves, that vendor's instance + * federation flow creates a Partner row inside their tenant — but the + * creator's "home" stays here. + * + * Implementation: thin reverse proxy from /api/creator/* into the Network + * API. We pass cookies straight through so Network's + * `op_network_creator_session` cookie ends up scoped to app.openpartner.dev + * (Network sets the cookie without a Domain attribute, so the browser + * scopes it to whichever host responded — that's us). The Network already + * has all the Creator/magic-link/discover/apply endpoints we need; we + * don't duplicate any of that here. + * + * Public endpoints (no creator session needed) are also proxied so the + * Discover/Vendor pages render before signup. + */ + +import express, { Router, type Request, type Response } from 'express'; + +export const creatorPortalRouter = Router(); + +// Image-upload bodies arrive as raw binary, not JSON. The global +// express.json() middleware ignores image content-types, so without this +// raw parser req.body would be {} and the proxy would forward an empty +// payload to the Network. Cap matches the Network-side limit. +creatorPortalRouter.use( + express.raw({ + type: ['image/jpeg', 'image/png', 'image/webp'], + limit: 2 * 1024 * 1024, + }), +); + +// Whitelist: exact path match or prefix-with-glob. We only forward paths +// the Network exposes for creator-side flows, never anything else. +const ALLOWED_EXACT = new Set([ + '/creators/signup', + '/creators/signin', + '/creators/verify', + '/creators/signout', + '/creators/whoami', + '/creators/me', + '/creators/me/affiliations', + '/creators/me/requests', + '/creators/me/delete', + '/creators/me/restore', + '/creators/handle-available', + '/creators/me/domains', + '/creators/me/partnerships', + '/creators/me/platforms', + '/creators/me/recommendations', + '/creators/me/uploads/avatar', + '/offerings', +]); + +const ALLOWED_PREFIX: Array<{ prefix: string; methods: Set }> = [ + { prefix: '/offerings/', methods: new Set(['GET', 'POST']) }, // /offerings/:id, /offerings/:id/apply + { prefix: '/vendors/', methods: new Set(['GET']) }, // /vendors/:id (public profile) + { prefix: '/creators/me/affiliations/', methods: new Set(['GET']) }, + { prefix: '/creators/me/requests/', methods: new Set(['POST']) }, + // Custom share-domain management — POST /verify, DELETE the domain. + { prefix: '/creators/me/domains/', methods: new Set(['POST', 'DELETE']) }, + // Per-partnership slug edit (creatorSlug — the path under custom domain). + { prefix: '/creators/me/partnerships/', methods: new Set(['PATCH']) }, + // Per-platform handle upsert (PUT) + delete. + { prefix: '/creators/me/platforms/', methods: new Set(['PUT', 'DELETE']) }, + // Public profile lookup — no creator session required, lets brands + // and the open web hit /creators/by-handle/ on the portal. + { prefix: '/creators/by-handle/', methods: new Set(['GET']) }, + // Invitation deeplink resolver (no auth) + consume (creator auth). + // GET /invitations/:token, POST /invitations/:token/consume + { prefix: '/invitations/', methods: new Set(['GET', 'POST']) }, +]; + +function isAllowed(method: string, subpath: string): boolean { + if (ALLOWED_EXACT.has(subpath)) return true; + for (const { prefix, methods } of ALLOWED_PREFIX) { + if (subpath.startsWith(prefix) && methods.has(method)) return true; + } + return false; +} + +creatorPortalRouter.all(/^\/creator-api(\/.*)?$/, async (req: Request, res: Response) => { + const networkUrl = process.env.NETWORK_URL; + if (!networkUrl) { + return res.status(503).json({ error: 'network_not_configured' }); + } + + const subpath = req.path.replace(/^\/creator-api/, '') || '/'; + if (!isAllowed(req.method, subpath)) { + return res.status(404).json({ error: 'not_found' }); + } + + // Outer try/catch returns proxy_error instead of letting the global + // 500 handler swallow the cause as opaque "internal_error". Anything + // that throws past the fetch (header parsing, body decode, cookie + // splitting) lands here with the upstream subpath in the log line. + try { + const qs = req.url.includes('?') ? req.url.slice(req.url.indexOf('?')) : ''; + const upstreamUrl = `${networkUrl.replace(/\/$/, '')}${subpath}${qs}`; + + // Forward only the bits the upstream cares about. We strip Authorization + // (no openpartner bearer should leak to Network) and Host (let fetch set it). + const headers: Record = {}; + const ct = req.header('content-type'); + if (ct) headers['content-type'] = ct; + const cookie = req.header('cookie'); + if (cookie) headers['cookie'] = cookie; + headers['user-agent'] = 'OpenPartner-CreatorPortal/1'; + headers['x-forwarded-for'] = req.ip ?? ''; + + const init: RequestInit = { + method: req.method, + headers, + signal: AbortSignal.timeout(10_000), + }; + if (req.method !== 'GET' && req.method !== 'HEAD') { + // Binary bodies (image uploads, populated by the express.raw + // middleware above) forward as-is. Everything else gets JSON + // serialized — the global express.json() middleware put a parsed + // object on req.body for application/json requests. + if (Buffer.isBuffer(req.body)) { + init.body = req.body; + } else if (req.body !== undefined) { + init.body = JSON.stringify(req.body); + } + } + + let upstream: globalThis.Response; + try { + upstream = await fetch(upstreamUrl, init); + } catch (err) { + console.error('[creator-portal] fetch failed', { subpath, err }); + return res.status(502).json({ + error: 'network_unreachable', + detail: err instanceof Error ? err.message : String(err), + }); + } + + // Set-Cookie handling has two gotchas: + // 1. Node fetch returns multiple Set-Cookie headers comma-joined when + // you call .get('set-cookie'); the browser then can't parse them. + // Use getSetCookie() (Node 19.7+) to get each one separately. + // 2. Cloudflare in front of network.openpartner.dev injects its own + // __cf_bm cookie with Domain=network.openpartner.dev — relaying + // that to app.openpartner.dev confuses the browser. Filter it out. + // We also strip any Domain attribute on the cookies we DO forward so + // they scope to app.openpartner.dev (the host that responded). + const cookies = upstream.headers.getSetCookie?.() ?? []; + const forward: string[] = []; + for (const c of cookies) { + const name = c.split('=', 1)[0]?.trim() ?? ''; + if (!name || name.startsWith('__cf') || name.startsWith('_cf')) continue; + forward.push(c.replace(/;\s*Domain=[^;]+/i, '')); + } + if (forward.length > 0) res.setHeader('set-cookie', forward); + + const upstreamCt = upstream.headers.get('content-type') ?? ''; + const body = await upstream.text(); + + // Log non-2xx upstream responses with body so we can debug Network-side + // failures from API logs without needing to wire client-side telemetry. + // Truncate at 500 chars in case Network returns a large HTML error page. + if (upstream.status >= 400) { + console.error('[creator-portal] upstream error', { + method: req.method, + subpath, + status: upstream.status, + body: body.length > 500 ? `${body.slice(0, 500)}…(truncated)` : body, + }); + } + + res.status(upstream.status); + // Force JSON content-type on the response. The upstream is the + // Network — which we control — but pinning the type here means a + // future Network change that returns html/plain (e.g. an error + // page) can't end up XSS'd into a browser-rendered page on + // app.openpartner.dev. All Network endpoints we proxy already + // return JSON; if they didn't, the body string is JSON-serialized + // and rendered as text by the browser regardless. + if (upstreamCt.includes('application/json')) { + res.type('application/json').send(body); + } else { + res.type('application/json').send(JSON.stringify({ raw: body, contentType: upstreamCt || null })); + } + } catch (err) { + console.error('[creator-portal] proxy failed', { subpath, method: req.method, err }); + res.status(502).json({ + error: 'proxy_error', + detail: err instanceof Error ? err.message : String(err), + }); + } +}); diff --git a/apps/api/src/routes/dashboard.ts b/apps/api/src/routes/dashboard.ts index fe61c1e..d28cb32 100644 --- a/apps/api/src/routes/dashboard.ts +++ b/apps/api/src/routes/dashboard.ts @@ -1,17 +1,23 @@ import { Router } from 'express'; -import { TABLES } from '@openpartner/db'; -import { db } from '../db.js'; +import { TABLES, type LinkRow } from '@openpartner/db'; import { grantScope, requireAuth, requirePartnerOrAdmin } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; export const dashboardRouter = Router(); // Partner dashboard — top-line counts, attributed revenue, commission by status. // Read-optimized via denormalized partnerId on Click/Attribution/Commission. +// +// Optional `?includeLinks=true` adds a per-Link breakdown so the partner / +// creator portal can show channel-level performance ("newsletter converted +// at 8%, TikTok bio at 0.3% — focus on the newsletter"). dashboardRouter.get('/partners/:id/dashboard', requireAuth, grantScope('partners:read'), requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const partnerId = req.params.id; const since = req.query.since ? new Date(String(req.query.since)) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); + const includeLinks = req.query.includeLinks === 'true' || req.query.includeLinks === '1'; const [clicksRow] = await db(TABLES.Click) .where({ partnerId }) @@ -39,6 +45,65 @@ dashboardRouter.get('/partners/:id/dashboard', requireAuth, grantScope('partners .select('status') .sum({ amount: 'amount' })) as Array<{ status: string; amount: string | null }>; + let links: Array<{ + linkKey: string; + clicks: number; + attributedEvents: number; + attributedRevenue: number; + }> | undefined; + + if (includeLinks) { + // Per-Link breakdown: clicks via Click.linkId, conversions via the + // attribution → event join with the same linkId. We aggregate by + // Link (not by linkKey directly) so a deleted-and-recreated link + // with the same key shows as one row tied to the current Link. + // Then surface linkKey for display. + const partnerLinks = (await db(TABLES.Link) + .where({ partnerId }) + .select('id', 'linkKey')) as Array>; + const linkIds = partnerLinks.map((l) => l.id); + + if (linkIds.length > 0) { + const clicksByLink = (await db(TABLES.Click) + .whereIn('linkId', linkIds) + .andWhere('ts', '>=', since) + .groupBy('linkId') + .select('linkId') + .count<{ linkId: string; count: string }[]>({ count: '*' })) as Array<{ linkId: string; count: string }>; + + // Attribution rows don't carry linkId directly — they reference clickId. + // Join through Click + Event to get per-link conversion + revenue. + const eventsByLink = (await db(TABLES.Attribution) + .join(TABLES.Click, `${TABLES.Click}.id`, `${TABLES.Attribution}.clickId`) + .join(TABLES.Event, `${TABLES.Event}.id`, `${TABLES.Attribution}.eventId`) + .where(`${TABLES.Attribution}.partnerId`, partnerId) + .andWhere(`${TABLES.Attribution}.computedAt`, '>=', since) + .whereIn(`${TABLES.Click}.linkId`, linkIds) + .groupBy(`${TABLES.Click}.linkId`) + .select(`${TABLES.Click}.linkId as linkId`) + .select( + db.raw(`COUNT(DISTINCT "Attribution"."eventId") as events`), + db.raw(`COALESCE(SUM("Event".value * "Attribution".weight), 0) as revenue`), + )) as Array<{ linkId: string; events: string; revenue: string }>; + + const clicksMap = new Map(clicksByLink.map((c) => [c.linkId, Number(c.count)])); + const eventsMap = new Map(eventsByLink.map((e) => [e.linkId, { events: Number(e.events), revenue: Number(e.revenue) }])); + + links = partnerLinks + .map((l) => ({ + linkKey: l.linkKey, + clicks: clicksMap.get(l.id) ?? 0, + attributedEvents: eventsMap.get(l.id)?.events ?? 0, + attributedRevenue: eventsMap.get(l.id)?.revenue ?? 0, + })) + // Sort by attributed revenue desc, then clicks desc — most useful + // surface for "what's working" comparisons. + .sort((a, b) => b.attributedRevenue - a.attributedRevenue || b.clicks - a.clicks); + } else { + links = []; + } + } + res.json({ partnerId, since: since.toISOString(), @@ -48,5 +113,6 @@ dashboardRouter.get('/partners/:id/dashboard', requireAuth, grantScope('partners commissionByStatus: Object.fromEntries( commissionByStatus.map((r) => [r.status, Number(r.amount ?? 0)]), ), + ...(links !== undefined ? { links } : {}), }); }); diff --git a/apps/api/src/routes/events.ts b/apps/api/src/routes/events.ts index dd4295c..b416c32 100644 --- a/apps/api/src/routes/events.ts +++ b/apps/api/src/routes/events.ts @@ -2,9 +2,9 @@ import { Router } from 'express'; import { z } from 'zod'; import { ulid } from 'ulid'; import { TABLES, type EventRow } from '@openpartner/db'; -import { db } from '../db.js'; import { attributeEvent } from '../attribution.js'; import { requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; const schema = z.object({ userId: z.string().min(1), @@ -17,10 +17,10 @@ const schema = z.object({ export const eventsRouter = Router(); -// Server-to-server conversion event ingest. // Server-to-server conversion event ingest — requires admin credentials // (this is the merchant's backend speaking, not a browser). eventsRouter.post('/attribution/events', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = schema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); @@ -30,6 +30,7 @@ eventsRouter.post('/attribution/events', requireAuth, requireAdmin, async (req, const [event] = await db(TABLES.Event) .insert({ id: eventId, + tenantId, userId, type, value: value != null ? value.toFixed(2) : null, diff --git a/apps/api/src/routes/export.ts b/apps/api/src/routes/export.ts index ccb0124..b6262bd 100644 --- a/apps/api/src/routes/export.ts +++ b/apps/api/src/routes/export.ts @@ -1,13 +1,14 @@ import { Router } from 'express'; import { z } from 'zod'; -import { db } from '../db.js'; import { requireAdmin, requireAuth } from '../auth.js'; import { exportAll, exportTable, importBundle, isExportable, rowsToCsv } from '../export.js'; import { getMode } from '../stripe.js'; +import { tenantOf } from '../tenancy.js'; export const exportRouter = Router(); exportRouter.get('/export/:table.:format', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const table = req.params.table ?? ''; const format = req.params.format ?? ''; if (!isExportable(table)) return res.status(404).json({ error: 'table_not_exportable' }); @@ -27,7 +28,8 @@ exportRouter.get('/export/:table.:format', requireAuth, requireAdmin, async (req res.status(400).json({ error: 'unsupported_format', detail: 'use json or csv' }); }); -exportRouter.get('/export.json', requireAuth, requireAdmin, async (_req, res) => { +exportRouter.get('/export.json', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const bundle = await exportAll(db); res.setHeader('Content-Type', 'application/json'); res.setHeader('Content-Disposition', 'attachment; filename="openpartner-export.json"'); @@ -44,6 +46,7 @@ const importSchema = z.object({ }); exportRouter.post('/import', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); // Safety rail: re-importing someone else's export into a shared hosted DB // would collide primary keys and leak cross-tenant data. Gate it to selfhost. if (getMode() !== 'selfhost') { @@ -54,6 +57,6 @@ exportRouter.post('/import', requireAuth, requireAdmin, async (req, res) => { if (body.data.schemaVersion !== 1) { return res.status(400).json({ error: 'unsupported_schema_version' }); } - const report = await importBundle(db, body.data.tables); + const report = await importBundle(db, tenantId, body.data.tables); res.json({ ok: true, report }); }); diff --git a/apps/api/src/routes/fraud-review.ts b/apps/api/src/routes/fraud-review.ts index 13ebac8..29aa2ca 100644 --- a/apps/api/src/routes/fraud-review.ts +++ b/apps/api/src/routes/fraud-review.ts @@ -24,9 +24,9 @@ import { Router } from 'express'; import { z } from 'zod'; import { TABLES, type ClickRow, type IdentityRow } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAdmin, requireAuth } from '../auth.js'; import { attributeBacklogForUser } from '../attribution.js'; +import { tenantOf } from '../tenancy.js'; export const fraudReviewRouter = Router(); @@ -36,6 +36,7 @@ const listQuerySchema = z.object({ }); fraudReviewRouter.get('/admin/clicks/flagged', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const q = listQuerySchema.safeParse(req.query); if (!q.success) return res.status(400).json({ error: 'invalid_query', detail: q.error.flatten() }); const limit = q.data.limit ?? 100; @@ -67,6 +68,7 @@ fraudReviewRouter.get('/admin/clicks/flagged', requireAuth, requireAdmin, async }); fraudReviewRouter.post('/clicks/:id/unflag', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const clickId = req.params.id!; const click = await db(TABLES.Click).where({ id: clickId }).first(); if (!click) return res.status(404).json({ error: 'click_not_found' }); @@ -75,54 +77,55 @@ fraudReviewRouter.post('/clicks/:id/unflag', requireAuth, requireAdmin, async (r const identities = await db(TABLES.Identity).where({ clickId }); let reattributed = 0; - await db.transaction(async (trx) => { - await trx(TABLES.Click).where({ id: clickId }).update({ fraudFlag: null }); - - // Re-run attribution for any user that stitched to this click. If we - // just called the backlog helper directly, events already attributed - // under a different click-set (e.g. linear with two clicks, weight - // 0.5 each) would gain a third row for the newly-unflagged click - // without adjusting the existing weights — the partner would add up - // to > 1x the event's revenue. Before re-running, drop any - // non-finalized attribution rows for this user's events and their - // accrued commissions, so the backlog recomputes clean. We leave - // approved / paid commissions alone — you don't retroactively - // rewrite the ledger. - for (const identity of identities) { - const eventIds = ( - await trx(TABLES.Event).where({ userId: identity.userId }).select('id') - ).map((r) => (r as { id: string }).id); - if (eventIds.length === 0) continue; - - const accruedCommissions = await trx(TABLES.Commission) - .whereIn('attributionId', function () { - this.select('id').from(TABLES.Attribution).whereIn('eventId', eventIds); - }) - .where({ status: 'accrued' }) - .select('id', 'attributionId'); - - const attributionsToDelete = accruedCommissions - .map((c) => (c as { attributionId: string }).attributionId) - .filter(Boolean); - - if (accruedCommissions.length > 0) { - await trx(TABLES.Commission) - .whereIn('id', accruedCommissions.map((c) => (c as { id: string }).id)) - .del(); - } - if (attributionsToDelete.length > 0) { - await trx(TABLES.Attribution).whereIn('id', attributionsToDelete).del(); - } - - const n = await attributeBacklogForUser(trx, identity.userId); - reattributed += n; + // The request is already in a transaction (per-request via tenantMiddleware). + // No nested transaction needed — operate on req.db directly. + await db(TABLES.Click).where({ id: clickId }).update({ fraudFlag: null }); + + // Re-run attribution for any user that stitched to this click. If we + // just called the backlog helper directly, events already attributed + // under a different click-set (e.g. linear with two clicks, weight + // 0.5 each) would gain a third row for the newly-unflagged click + // without adjusting the existing weights — the partner would add up + // to > 1x the event's revenue. Before re-running, drop any + // non-finalized attribution rows for this user's events and their + // accrued commissions, so the backlog recomputes clean. We leave + // approved / paid commissions alone — you don't retroactively + // rewrite the ledger. + for (const identity of identities) { + const eventIds = ( + await db(TABLES.Event).where({ userId: identity.userId }).select('id') + ).map((r) => (r as { id: string }).id); + if (eventIds.length === 0) continue; + + const accruedCommissions = await db(TABLES.Commission) + .whereIn('attributionId', function () { + this.select('id').from(TABLES.Attribution).whereIn('eventId', eventIds); + }) + .where({ status: 'accrued' }) + .select('id', 'attributionId'); + + const attributionsToDelete = accruedCommissions + .map((c) => (c as { attributionId: string }).attributionId) + .filter(Boolean); + + if (accruedCommissions.length > 0) { + await db(TABLES.Commission) + .whereIn('id', accruedCommissions.map((c) => (c as { id: string }).id)) + .del(); } - }); + if (attributionsToDelete.length > 0) { + await db(TABLES.Attribution).whereIn('id', attributionsToDelete).del(); + } + + const n = await attributeBacklogForUser(db, identity.userId); + reattributed += n; + } res.json({ ok: true, clickId, reattributedEvents: reattributed }); }); fraudReviewRouter.post('/clicks/:id/flag', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const clickId = req.params.id!; const click = await db(TABLES.Click).where({ id: clickId }).first(); if (!click) return res.status(404).json({ error: 'click_not_found' }); @@ -131,4 +134,3 @@ fraudReviewRouter.post('/clicks/:id/flag', requireAuth, requireAdmin, async (req await db(TABLES.Click).where({ id: clickId }).update({ fraudFlag: 'manual' }); res.json({ ok: true, clickId }); }); - diff --git a/apps/api/src/routes/funnel.ts b/apps/api/src/routes/funnel.ts index 717828b..ef278ee 100644 --- a/apps/api/src/routes/funnel.ts +++ b/apps/api/src/routes/funnel.ts @@ -23,12 +23,13 @@ import { Router } from 'express'; import { TABLES } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAuth, requirePartnerOrAdmin } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; export const funnelRouter = Router(); funnelRouter.get('/partners/:id/funnel', requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const partnerId = req.params.id!; const since = req.query.since ? new Date(String(req.query.since)) diff --git a/apps/api/src/routes/identify.ts b/apps/api/src/routes/identify.ts index f9eb294..0a8d277 100644 --- a/apps/api/src/routes/identify.ts +++ b/apps/api/src/routes/identify.ts @@ -2,9 +2,9 @@ import { Router } from 'express'; import { z } from 'zod'; import { ulid } from 'ulid'; import { TABLES, type ClickRow, type IdentityRow } from '@openpartner/db'; -import { db } from '../db.js'; import { attributeBacklogForUser } from '../attribution.js'; import { ipRateLimit } from '../middleware/rate-limit.js'; +import { tenantOf } from '../tenancy.js'; const schema = z.object({ cref: z.string().min(1), @@ -15,12 +15,19 @@ const schema = z.object({ export const identifyRouter = Router(); // Stitch a click (cref) to an authenticated user. Called by the SDK on -// login/signup, so it's unauth'd and public. 120/min per IP is generous -// for legitimate use (stitches once per login) but cuts off spray. +// login/signup. The endpoint is unauth'd, but it IS tenant-scoped — in +// multi-tenant mode the SDK calls /t//attribution/identify so the +// click lookup is bounded to that tenant. Routing through tenantMiddleware +// gives us a tenant-bound transaction so the Identity insert is scoped +// correctly via the WITH CHECK clause on RLS. +// +// 120/min per IP is generous for legitimate use (stitches once per +// login) but cuts off spray. identifyRouter.post( '/attribution/identify', ipRateLimit({ name: 'identify', max: 120, windowMs: 60_000 }), async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = schema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); @@ -33,7 +40,7 @@ identifyRouter.post( // re-identify() calls for the same click a no-op. const identityId = ulid(); const inserted = await db(TABLES.Identity) - .insert({ id: identityId, clickId: cref, userId }) + .insert({ id: identityId, tenantId, clickId: cref, userId }) .onConflict(['clickId', 'userId']) .ignore() .returning('id'); diff --git a/apps/api/src/routes/import-partners.ts b/apps/api/src/routes/import-partners.ts new file mode 100644 index 0000000..e2a49dd --- /dev/null +++ b/apps/api/src/routes/import-partners.ts @@ -0,0 +1,224 @@ +/** + * Partner-roster CSV importer. + * + * POST /import/partners-csv?dryRun=true|false + * Content-Type: text/csv (raw CSV body) + * + * Lets brands migrate from competitors (Impact, Rewardful, Refersion, etc.) + * by adapting their export to a single canonical OpenPartner format and + * uploading. Multi-format auto-detection is deferred — documenting one + * canonical format is simpler and ships now. + * + * Canonical CSV columns (header row required): + * + * email required, becomes Partner.email + * name required, becomes Partner.name + * activatedAt optional ISO datetime, defaults to now (existing + * partners come in already-activated; new invites + * should leave blank + use POST /partners with + * sendInvite=true instead) + * metadata optional JSON object string, merged into Partner.metadata + * + * Behavior: + * - dryRun=true parses + validates + returns preview + would-be results + * without writing anything + * - dryRun=false (or missing) commits the import — creates Partner rows, + * granting access to ALL current campaigns by default + * (matches the per-partner default behavior elsewhere) + * - Per-row email_taken collisions are reported as skipped, not errors — + * re-running the import is idempotent on already-imported emails. + */ + +import { Router } from 'express'; +import { ulid } from 'ulid'; +import { TABLES, type PartnerRow } from '@openpartner/db'; +import { grantScope, requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; + +export const importPartnersRouter = Router(); + +interface RowReport { + row: number; + email: string; + status: 'created' | 'skipped_email_taken' | 'invalid'; + reason?: string; + partnerId?: string; +} + +importPartnersRouter.post( + '/import/partners-csv', + requireAuth, + grantScope('partners:write'), + requireAdmin, + // Read raw text body — we don't want express.json() to interpret it. + // The /import route already uses 256mb json limit; partner rosters are + // small enough that the global 1mb limit suits. + (req, res, next) => { + if (req.is('text/*') || req.is('application/csv') || req.is('text/csv')) { + let raw = ''; + req.setEncoding('utf8'); + req.on('data', (chunk) => { raw += chunk; }); + req.on('end', () => { + (req as { rawCsv?: string }).rawCsv = raw; + next(); + }); + } else { + // Allow JSON shape `{ csv: "...." }` as a fallback for tools that + // can't set the content-type. + const csv = typeof req.body?.csv === 'string' ? req.body.csv : ''; + (req as { rawCsv?: string }).rawCsv = csv; + next(); + } + }, + async (req, res) => { + const { db, tenantId } = tenantOf(req); + const csv = ((req as { rawCsv?: string }).rawCsv ?? '').trim(); + if (!csv) return res.status(400).json({ error: 'empty_body', detail: 'POST the CSV as text/csv body' }); + + const dryRun = req.query.dryRun === 'true' || req.query.dryRun === '1'; + + let rows: Array>; + try { + rows = parseCsv(csv); + } catch (err) { + return res.status(400).json({ error: 'invalid_csv', detail: err instanceof Error ? err.message : String(err) }); + } + if (rows.length === 0) return res.status(400).json({ error: 'no_rows', detail: 'CSV had a header but no data rows' }); + + // Pull the campaign list once — every newly-created partner gets + // access to all current campaigns (the same default as POST /partners + // when campaignIds is omitted). Done outside the loop so we don't + // re-query per row. + const allCampaigns = (await db(TABLES.Campaign).select('id')) as Array<{ id: string }>; + const allCampaignIds = allCampaigns.map((c) => c.id); + + const report: RowReport[] = []; + + for (let i = 0; i < rows.length; i += 1) { + const r = rows[i]!; + const rowNum = i + 2; // +1 for header, +1 to match human row counting + + const email = (r.email ?? '').trim().toLowerCase(); + const name = (r.name ?? '').trim(); + if (!email || !email.includes('@')) { + report.push({ row: rowNum, email, status: 'invalid', reason: 'invalid_email' }); + continue; + } + if (!name) { + report.push({ row: rowNum, email, status: 'invalid', reason: 'missing_name' }); + continue; + } + + const existing = await db(TABLES.Partner).where({ email }).first(); + if (existing) { + report.push({ row: rowNum, email, status: 'skipped_email_taken', partnerId: existing.id }); + continue; + } + + let metadata: Record = { importedFromCsv: true }; + if (r.metadata) { + try { + const parsed = JSON.parse(r.metadata); + if (parsed && typeof parsed === 'object') metadata = { ...metadata, ...parsed }; + } catch { + report.push({ row: rowNum, email, status: 'invalid', reason: 'metadata_not_valid_json' }); + continue; + } + } + + const activatedAt = r.activatedAt ? new Date(r.activatedAt) : new Date(); + if (Number.isNaN(activatedAt.getTime())) { + report.push({ row: rowNum, email, status: 'invalid', reason: 'invalid_activatedAt' }); + continue; + } + + const id = ulid(); + if (!dryRun) { + await db(TABLES.Partner).insert({ + id, + tenantId, + email, + name, + metadata, + activatedAt, + }); + if (allCampaignIds.length > 0) { + await db(TABLES.PartnerCampaign) + .insert( + allCampaignIds.map((cid) => ({ + id: `pc_${ulid()}`, + tenantId, + partnerId: id, + campaignId: cid, + source: 'admin', + })), + ) + .onConflict(['tenantId', 'partnerId', 'campaignId']) + .ignore(); + } + } + report.push({ row: rowNum, email, status: 'created', partnerId: id }); + } + + const summary = { + total: rows.length, + created: report.filter((r) => r.status === 'created').length, + skipped: report.filter((r) => r.status === 'skipped_email_taken').length, + invalid: report.filter((r) => r.status === 'invalid').length, + }; + res.json({ dryRun, summary, report }); + }, +); + +/** + * Minimal CSV parser. Handles the canonical case (no quoted fields with + * embedded commas/newlines). Rejects anything weird so users notice + * the format mismatch instead of silently importing garbage. + */ +function parseCsv(input: string): Array> { + const lines = input.split(/\r?\n/).filter((l) => l.trim().length > 0); + if (lines.length < 2) throw new Error('expected a header row plus at least one data row'); + const header = splitCsvLine(lines[0]!); + const rows: Array> = []; + for (let i = 1; i < lines.length; i += 1) { + const cells = splitCsvLine(lines[i]!); + if (cells.length !== header.length) { + throw new Error(`row ${i + 1} has ${cells.length} columns, expected ${header.length} from header`); + } + const row: Record = {}; + for (let j = 0; j < header.length; j += 1) { + row[header[j]!] = cells[j]!; + } + rows.push(row); + } + return rows; +} + +/** + * Splits a CSV line on commas, respecting double-quoted fields with + * doubled-quote escaping ("Smith, John" or "she said ""hi"""). Doesn't + * handle multi-line quoted fields — those need a real parser, and they + * don't show up in partner-roster imports in practice. + */ +function splitCsvLine(line: string): string[] { + const out: string[] = []; + let cur = ''; + let inQuotes = false; + for (let i = 0; i < line.length; i += 1) { + const ch = line[i]!; + if (inQuotes) { + if (ch === '"') { + if (line[i + 1] === '"') { cur += '"'; i += 1; } + else { inQuotes = false; } + } else { + cur += ch; + } + } else { + if (ch === ',') { out.push(cur); cur = ''; } + else if (ch === '"' && cur.length === 0) { inQuotes = true; } + else { cur += ch; } + } + } + out.push(cur); + return out.map((s) => s.trim()); +} diff --git a/apps/api/src/routes/install.ts b/apps/api/src/routes/install.ts new file mode 100644 index 0000000..aaa9b62 --- /dev/null +++ b/apps/api/src/routes/install.ts @@ -0,0 +1,212 @@ +/** + * First-run install endpoint — WordPress-style. Only usable while zero + * admins are activated. Once the first admin exists, the endpoint 409s + * so a second "installer" can't take over. + * + * Creates the first admin in a single round-trip along with the program + * settings (name + support email) and emails them a magic-link to + * activate. After they verify, they're the first admin and can invite + * others + rotate ADMIN_API_KEY. + * + * Multi-tenant: install is the self-host bootstrap. In multi-tenant mode + * it rejects — hosted operators provision tenants via /signup, and the + * platform is never "uninstalled". The endpoint stays mounted as a + * public route (no tenantMiddleware) and uses the privileged `db` to + * stamp rows with the seeded default tenant. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { ulid } from 'ulid'; +import { DEFAULT_TENANT_ID, TABLES, type AdminRow, type ConfigRow } from '@openpartner/db'; +import { db } from '../db.js'; +import { ipRateLimit } from '../middleware/rate-limit.js'; +import { issueMagicLink } from '../auth-sessions.js'; +import { getMailer } from '../mailer.js'; +import { adminInviteEmail, buildMagicLinkUrl } from '../email-templates.js'; +import { saveMailSettings } from '../mail-settings.js'; +import { getTenancyMode } from '../tenancy.js'; + +export const installRouter = Router(); + +class AlreadyInstalledError extends Error { + readonly name = 'AlreadyInstalledError'; +} + +const installLimit = ipRateLimit({ name: 'install', max: 5, windowMs: 60_000 }); + +const installSchema = z + .object({ + adminName: z.string().trim().min(1).max(120), + adminEmail: z.string().trim().email().max(254), + programName: z.string().trim().min(1).max(120), + supportEmail: z.string().trim().email().max(254).optional().or(z.literal('')), + mail: z + .object({ + kind: z.enum(['smtp', 'postmark', 'none']), + from: z.string().trim().max(254).optional(), + smtp: z + .object({ + host: z.string().trim().min(1).max(253), + port: z.number().int().min(1).max(65535).default(587), + secure: z.boolean().default(false), + user: z.string().trim().max(320).optional(), + password: z.string().max(500).optional(), + }) + .optional(), + postmark: z + .object({ + serverToken: z.string().min(1).max(500), + messageStream: z.string().trim().max(120).default('outbound'), + }) + .optional(), + }) + .optional(), + }) + // Cross-field invariants: picking smtp/postmark without the minimum + // required fields would let the install "succeed" only to then + // silently drop the activation email (wrong From header, no host, no + // token). Reject at the boundary instead. + .superRefine((val, ctx) => { + const m = val.mail; + if (!m || m.kind === 'none') return; + if (!m.from || !m.from.trim()) { + ctx.addIssue({ code: z.ZodIssueCode.custom, path: ['mail', 'from'], message: 'required when kind is smtp or postmark' }); + } + if (m.kind === 'smtp' && !m.smtp?.host) { + ctx.addIssue({ code: z.ZodIssueCode.custom, path: ['mail', 'smtp', 'host'], message: 'required when kind is smtp' }); + } + if (m.kind === 'postmark' && !m.postmark?.serverToken) { + ctx.addIssue({ code: z.ZodIssueCode.custom, path: ['mail', 'postmark', 'serverToken'], message: 'required when kind is postmark' }); + } + }); + +/** + * Public status probe used by the portal to decide whether to route to + * /install on mount. Always reachable (it's what tells the portal the + * system is uninitialized). + */ +installRouter.get('/install/status', async (_req, res) => { + if (getTenancyMode() === 'multi') { + return res.json({ needsSetup: false, reason: 'multi_tenant' }); + } + const [row] = await db(TABLES.Admin) + .where({ tenantId: DEFAULT_TENANT_ID }) + .whereNotNull('activatedAt') + .whereNull('revokedAt') + .count<{ count: string }[]>({ count: '*' }); + res.json({ needsSetup: Number(row?.count ?? 0) === 0 }); +}); + +/** Deterministic pg advisory-lock key — any 64-bit int, as long as this + * is the only caller. Different from 0 to avoid collision with the + * naive "no lock" sentinel some libraries use. */ +const INSTALL_ADVISORY_LOCK = 49_1092_4719; + +installRouter.post('/install', installLimit, async (req, res) => { + if (getTenancyMode() === 'multi') { + return res.status(400).json({ error: 'install_not_available_in_multi_tenant' }); + } + + const body = installSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const adminEmail = body.data.adminEmail.toLowerCase(); + const programName = body.data.programName.trim(); + const supportEmail = body.data.supportEmail?.trim() || null; + const now = new Date(); + + let adminId: string | null = null; + try { + await db.transaction(async (trx) => { + // Serialize concurrent installers. The advisory lock is auto- + // released when the transaction commits / rolls back. Inside + // the lock, re-check the "no admin exists yet" invariant — a + // check outside the lock (TOCTOU) would let two concurrent + // installers both think they're first. + await trx.raw('SELECT pg_advisory_xact_lock(?)', [INSTALL_ADVISORY_LOCK]); + + // Block on ANY admin row (activated or not) in the default tenant: + // while a first-run magic-link is outstanding, a second installer + // shouldn't be able to sneak their own pending admin in. + const [existing] = await trx(TABLES.Admin) + .where({ tenantId: DEFAULT_TENANT_ID }) + .count<{ count: string }[]>({ count: '*' }); + if (Number(existing?.count ?? 0) > 0) { + throw new AlreadyInstalledError(); + } + + await trx(TABLES.Config) + .insert({ + tenantId: DEFAULT_TENANT_ID, + key: 'program_settings', + value: { programName, supportEmail } as unknown as never, + updatedAt: now, + }) + .onConflict(['tenantId', 'key']) + .merge({ value: { programName, supportEmail } as unknown as never, updatedAt: now }); + + const id = ulid(); + await trx(TABLES.Admin).insert({ + id, + tenantId: DEFAULT_TENANT_ID, + email: adminEmail, + name: body.data.adminName.trim(), + activatedAt: null, + }); + adminId = id; + }); + } catch (err) { + if (err instanceof AlreadyInstalledError) { + return res.status(409).json({ error: 'already_installed' }); + } + throw err; + } + if (!adminId) return res.status(500).json({ error: 'install_failed' }); + + // Finish the install in a compensating try/catch: if mail config OR + // the invite send fails, roll back the Admin row we just created so + // /install can be retried. Without this, a partial failure here + // would leave an unactivated admin in the table and /install's + // "any admin exists" guard would 409 every subsequent attempt — the + // only escape being manual DB surgery or env-key intervention. + try { + if (body.data.mail) { + await saveMailSettings(db, DEFAULT_TENANT_ID, { + kind: body.data.mail.kind, + from: body.data.mail.from, + smtp: body.data.mail.smtp, + postmark: body.data.mail.postmark, + }); + } + + const issued = await issueMagicLink(db, { + tenantId: DEFAULT_TENANT_ID, + email: adminEmail, + purpose: 'admin_invite', + principalKind: 'admin', + principalId: adminId, + }); + const tmpl = adminInviteEmail(body.data.adminName.trim(), buildMagicLinkUrl(issued.plaintext), programName); + await getMailer().send({ db, tenantId: DEFAULT_TENANT_ID }, { + to: adminEmail, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'admin_invite', + metadata: { purpose: 'admin_invite', adminId, firstRun: true }, + }); + } catch (err) { + // Remove the admin row + any magic-link token we issued before the + // failure. MagicLinkToken has no FK to Admin since the schema was + // generalized, so clean it up explicitly. + await db.transaction(async (trx) => { + await trx('MagicLinkToken').where({ principalKind: 'admin', principalId: adminId }).del(); + await trx(TABLES.Admin).where({ id: adminId }).del(); + }); + const message = err instanceof Error ? err.message : 'install failed'; + return res.status(502).json({ error: 'install_mail_failed', detail: message }); + } + + res.json({ ok: true }); +}); diff --git a/apps/api/src/routes/links.ts b/apps/api/src/routes/links.ts index e4f8926..0194377 100644 --- a/apps/api/src/routes/links.ts +++ b/apps/api/src/routes/links.ts @@ -1,9 +1,9 @@ import { Router } from 'express'; import { z } from 'zod'; import { ulid } from 'ulid'; -import { TABLES, type LinkRow } from '@openpartner/db'; -import { db } from '../db.js'; +import { TABLES, type CampaignRow, type LinkRow } from '@openpartner/db'; import { grantScope, requireAuth, requirePartnerOrAdmin } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; const createSchema = z.object({ linkKey: z @@ -12,12 +12,17 @@ const createSchema = z.object({ .max(64) .regex(/^[a-zA-Z0-9_-]+$/, 'linkKey must be url-safe'), campaignId: z.string().min(1), - destinationUrl: z.string().url(), + /** Optional override of Campaign.destinationUrl. Allowed only when + * the Campaign has deepLinkAllowedDomains set AND the override host + * matches one of those domains. Otherwise rejected with + * destination_override_not_allowed. */ + destinationUrl: z.string().url().optional(), }); export const linksRouter = Router(); linksRouter.get('/partners/:id/links', requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const links = await db(TABLES.Link) .where({ partnerId: req.params.id }) .orderBy('createdAt', 'desc'); @@ -25,24 +30,71 @@ linksRouter.get('/partners/:id/links', requireAuth, requirePartnerOrAdmin('id'), }); linksRouter.post('/partners/:id/links', requireAuth, grantScope('links:write'), requirePartnerOrAdmin('id'), async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = createSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); if (!partner) return res.status(404).json({ error: 'partner_not_found' }); - const campaign = await db(TABLES.Campaign).where({ id: body.data.campaignId }).first(); + const campaign = await db(TABLES.Campaign).where({ id: body.data.campaignId }).first(); if (!campaign) return res.status(404).json({ error: 'campaign_not_found' }); + // Lifecycle gate: scheduled or ended campaigns refuse new Link + // creation. Existing Links keep redirecting (URLs intact); only the + // create path is gated. Admins are subject to the same rule — if + // they want to make Links for an ended campaign they extend the + // endsAt first. + const { campaignAcceptsNewActivity, campaignStatus } = await import('../campaign-lifecycle.js'); + if (!campaignAcceptsNewActivity(campaign)) { + return res.status(409).json({ + error: 'campaign_not_active', + detail: `Campaign is ${campaignStatus(campaign)} — Links can be created only while the campaign is active.`, + }); + } + + // Partner-side: must be granted access to this Campaign via + // PartnerCampaign. Admins acting on a partner's behalf bypass this + // check (they're the ones who'd grant it anyway). + if (req.principal?.role === 'partner') { + const grant = await db(TABLES.PartnerCampaign) + .where({ partnerId: req.params.id, campaignId: body.data.campaignId }) + .first(); + if (!grant) { + return res.status(403).json({ + error: 'campaign_not_granted', + detail: 'You don’t have access to this program. Apply through the Network or ask the brand admin to add you.', + }); + } + } + + // Destination resolution: if the partner supplied an override, the + // Campaign must allow deep-linking AND the override host must match + // the allowlist. Otherwise the Link inherits Campaign.destinationUrl + // (stored as null on the row → router resolves at click time). + let destinationOverride: string | null = null; + if (body.data.destinationUrl) { + if (!isDeepLinkAllowed(campaign, body.data.destinationUrl)) { + return res.status(400).json({ + error: 'destination_override_not_allowed', + detail: campaign.deepLinkAllowedDomains + ? `Override must be on one of: ${campaign.deepLinkAllowedDomains}` + : 'This program does not allow custom destinations.', + }); + } + destinationOverride = body.data.destinationUrl; + } + const id = ulid(); try { const [link] = await db(TABLES.Link) .insert({ id, + tenantId, linkKey: body.data.linkKey, partnerId: req.params.id, campaignId: body.data.campaignId, - destinationUrl: body.data.destinationUrl, + destinationUrl: destinationOverride, }) .returning('*'); res.status(201).json(link); @@ -54,6 +106,21 @@ linksRouter.post('/partners/:id/links', requireAuth, grantScope('links:write'), } }); +function isDeepLinkAllowed(campaign: CampaignRow, url: string): boolean { + if (!campaign.deepLinkAllowedDomains) return false; + let host: string; + try { + host = new URL(url).hostname.toLowerCase(); + } catch { + return false; + } + return campaign.deepLinkAllowedDomains + .split(',') + .map((s) => s.trim().toLowerCase()) + .filter(Boolean) + .some((allowed) => host === allowed || host.endsWith(`.${allowed}`)); +} + function isUniqueViolation(err: unknown): boolean { return typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505'; } diff --git a/apps/api/src/routes/magic-link.ts b/apps/api/src/routes/magic-link.ts deleted file mode 100644 index f167b55..0000000 --- a/apps/api/src/routes/magic-link.ts +++ /dev/null @@ -1,468 +0,0 @@ -/** - * Human-auth endpoints — magic-link signup/signin for creators AND - * vendors, plus dev mailbox. - * - * Purpose strings encode BOTH the role (creator / vendor) and the - * lifecycle stage (signup / signin), giving us four values: - * creator_signup — claim carries handle + name → creates active NetworkCreator - * creator_signin — returning active creator → new session - * vendor_signup — claim carries full vendor profile → creates pending NetworkVendor - * vendor_signin — returning active vendor → new session - * - * We deliberately use 'pending' status for vendor signup so an admin - * still reviews the federation credentials before activating — unlike - * creator signup where magic-link email verification is enough. - * - * Token consumption is single-use and atomic (conditional update on - * consumedAt IS NULL). Tokens expire after 15 minutes. - */ - -import { Router } from 'express'; -import { z } from 'zod'; -import { ulid } from 'ulid'; -import { - TABLES, - type DevMessageRow, - type MagicLinkCreatorClaim, - type MagicLinkTokenRow, - type MagicLinkVendorClaim, - type NetworkCreatorRow, - type NetworkVendorRow, -} from '@openpartner/db'; -import { db } from '../db.js'; -import { requireAdmin, requireAuth } from '../auth.js'; -import { getMailer } from '../mailer.js'; -import { - SESSION_COOKIE_NAME, - consumeMagicLink, - createSession, - issueMagicLink, - revokeSession, - sessionCookieOptions, -} from '../auth-sessions.js'; -import { encryptKey } from '../network/crypto.js'; -import { safeFetch } from '../network/safe-fetch.js'; -import { - creatorSigninEmail, - creatorSignupEmail, - vendorSigninEmail, - vendorSignupEmail, -} from '../email-templates.js'; -import { NETWORK_FEDERATION_SCOPES } from './api-keys.js'; -import { ipRateLimit } from '../middleware/rate-limit.js'; - -export const magicLinkRouter = Router(); - -// Shared bucket across every email-triggering auth endpoint — stops an -// attacker from rotating across /creator/signin, /vendor/signin, etc. to -// multiply the cap. 10/min per IP is loose for one real user, tight for -// a bot. -const mailAuthLimit = ipRateLimit({ name: 'magic-link-mail', max: 10, windowMs: 60_000 }); - -// Token verification is single-use already, but brute-forcing /verify -// across many IPs is still a theoretical risk. Modest cap — a legit -// user verifies once. -const verifyLimit = ipRateLimit({ name: 'magic-link-verify', max: 30, windowMs: 60_000 }); - -const creatorSignupSchema = z.object({ - email: z.string().email(), - handle: z - .string() - .min(2) - .max(40) - .regex(/^[a-z0-9_]+$/, 'handle must be lowercase letters, digits, or _'), - name: z.string().min(2).max(80), -}); - -const vendorSignupSchema = z.object({ - email: z.string().email(), - name: z.string().min(2).max(120), - slug: z - .string() - .min(2) - .max(40) - .regex(/^[a-z0-9][a-z0-9-]*$/, 'slug must be lowercase letters, digits, or -'), - instanceUrl: z.string().url(), - instanceKey: z.string().min(8), - routerUrl: z.string().url().optional(), - description: z.string().max(1000).optional(), - websiteUrl: z.string().url().optional(), - logoUrl: z.string().url().optional(), -}); - -const signinSchema = z.object({ email: z.string().email() }); -const verifySchema = z.object({ token: z.string().min(8) }); - -function portalOrigin(): string { - return (process.env.PORTAL_URL ?? 'http://localhost:5673').replace(/\/$/, ''); -} - -function magicUrl(token: string, purpose: string): string { - return `${portalOrigin()}/auth/magic?token=${encodeURIComponent(token)}&purpose=${purpose}`; -} - -// -------- Creator signup -------- - -magicLinkRouter.post('/auth/creator/signup', mailAuthLimit, async (req, res) => { - const body = creatorSignupSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const email = body.data.email.toLowerCase(); - const handle = body.data.handle.toLowerCase(); - - const existing = await db(TABLES.NetworkCreator) - .where({ email }) - .orWhere({ handle }) - .first(); - if (existing) return res.status(409).json({ error: 'email_or_handle_taken' }); - - const claim: MagicLinkCreatorClaim = { kind: 'creator', handle, name: body.data.name }; - const issued = await issueMagicLink({ email, purpose: 'creator_signup', claim }); - - const tmpl = creatorSignupEmail(body.data.name, magicUrl(issued.plaintext, 'creator_signup')); - await getMailer().send({ - to: email, - subject: tmpl.subject, - text: tmpl.text, - html: tmpl.html, - tag: tmpl.tag, - metadata: { purpose: 'creator_signup', handle }, - }); - - res.json({ ok: true }); -}); - -// -------- Vendor signup -------- -// -// We verify the vendor's scoped API key against their own instance BEFORE -// issuing the magic link — no point emailing them a verification link -// only to fail at admin-approval time because the key doesn't work. - -magicLinkRouter.post('/auth/vendor/signup', mailAuthLimit, async (req, res) => { - const body = vendorSignupSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const email = body.data.email.toLowerCase(); - - const existing = await db(TABLES.NetworkVendor).where({ slug: body.data.slug }).first(); - if (existing) return res.status(409).json({ error: 'slug_taken' }); - - // Probe the instance's /auth/introspect with the pasted key. Reject if - // the key can't reach the instance or doesn't have the federation - // scopes (unrestricted admin keys are accepted but flagged in the UI). - const introspectUrl = `${body.data.instanceUrl.replace(/\/$/, '')}/auth/introspect`; - try { - const response = await safeFetch(introspectUrl, { - headers: { authorization: `Bearer ${body.data.instanceKey}` }, - }); - if (!response.ok) { - const text = await response.text(); - return res.status(400).json({ - error: 'instance_rejected_key', - status: response.status, - detail: text.slice(0, 300), - }); - } - const intro = (await response.json()) as Record; - const scopes = Array.isArray(intro.scopes) ? (intro.scopes as string[]) : null; - const unrestricted = intro.role === 'admin' && intro.unrestricted === true; - const missing = - scopes != null - ? (NETWORK_FEDERATION_SCOPES as readonly string[]).filter((s) => !scopes.includes(s)) - : []; - if (!unrestricted && (scopes == null || missing.length > 0)) { - return res.status(400).json({ - error: 'missing_scopes', - missing, - have: scopes ?? [], - }); - } - } catch (err: unknown) { - return res.status(400).json({ - error: 'instance_unreachable', - detail: err instanceof Error ? err.message : String(err), - }); - } - - const claim: MagicLinkVendorClaim = { - kind: 'vendor', - name: body.data.name, - slug: body.data.slug, - instanceUrl: body.data.instanceUrl.replace(/\/$/, ''), - instanceKeyCiphertext: encryptKey(body.data.instanceKey), - instanceKeyPrefix: body.data.instanceKey.slice(0, 8), - ...(body.data.routerUrl ? { routerUrl: body.data.routerUrl } : {}), - ...(body.data.description ? { description: body.data.description } : {}), - ...(body.data.websiteUrl ? { websiteUrl: body.data.websiteUrl } : {}), - ...(body.data.logoUrl ? { logoUrl: body.data.logoUrl } : {}), - }; - const issued = await issueMagicLink({ email, purpose: 'vendor_signup', claim }); - - const tmpl = vendorSignupEmail(body.data.name, magicUrl(issued.plaintext, 'vendor_signup')); - await getMailer().send({ - to: email, - subject: tmpl.subject, - text: tmpl.text, - html: tmpl.html, - tag: tmpl.tag, - metadata: { purpose: 'vendor_signup', slug: body.data.slug }, - }); - - res.json({ ok: true }); -}); - -// -------- Unified signin -------- -// -// One endpoint for humans. Looks up creator first, then vendor; issues a -// link for whichever role matches. Response is identical regardless of -// which (or neither) matches, so the endpoint doesn't leak whether an -// email is registered on the Network. - -magicLinkRouter.post('/auth/signin', mailAuthLimit, async (req, res) => { - const body = signinSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - const email = body.data.email.toLowerCase(); - - const creator = await db(TABLES.NetworkCreator).where({ email }).first(); - if (creator && creator.status === 'active') { - const issued = await issueMagicLink({ email, purpose: 'creator_signin' }); - const tmpl = creatorSigninEmail(magicUrl(issued.plaintext, 'creator_signin')); - await getMailer().send({ - to: email, - subject: tmpl.subject, - text: tmpl.text, - html: tmpl.html, - tag: tmpl.tag, - metadata: { purpose: 'creator_signin' }, - }); - return res.json({ ok: true }); - } - - // Vendors use email too; we need a way to tie vendors to an email. For - // now we assume vendor.description or a dedicated column — but we don't - // have vendor.email yet. We infer via MagicLinkToken history: find the - // most recent consumed vendor_signup token for this email and look up - // the vendor created from it. That keeps migrations light for MVP. - const vendor = await findVendorByEmail(email); - if (vendor && vendor.status === 'active') { - const issued = await issueMagicLink({ email, purpose: 'vendor_signin' }); - const tmpl = vendorSigninEmail(magicUrl(issued.plaintext, 'vendor_signin')); - await getMailer().send({ - to: email, - subject: tmpl.subject, - text: tmpl.text, - html: tmpl.html, - tag: tmpl.tag, - metadata: { purpose: 'vendor_signin', vendorId: vendor.id }, - }); - } - - // No-op on unknown / inactive — don't reveal which. - res.json({ ok: true }); -}); - -// Deprecated alias for older clients still calling /auth/creator/signin. -// Scoped to creators only — matches the pre-unified contract. -magicLinkRouter.post('/auth/creator/signin', mailAuthLimit, async (req, res) => { - const body = signinSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - const email = body.data.email.toLowerCase(); - - const creator = await db(TABLES.NetworkCreator).where({ email }).first(); - if (creator && creator.status === 'active') { - const issued = await issueMagicLink({ email, purpose: 'creator_signin' }); - const tmpl = creatorSigninEmail(magicUrl(issued.plaintext, 'creator_signin')); - await getMailer().send({ - to: email, - subject: tmpl.subject, - text: tmpl.text, - html: tmpl.html, - tag: tmpl.tag, - metadata: { purpose: 'creator_signin' }, - }); - } - res.json({ ok: true }); -}); - -// -------- Verify -------- - -magicLinkRouter.post('/auth/magic/verify', verifyLimit, async (req, res) => { - const body = verifySchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const result = await consumeMagicLink(body.data.token); - if (!result.ok) return res.status(400).json({ error: result.error }); - - const token: MagicLinkTokenRow = result.token; - - if (token.purpose === 'creator_signup') { - return verifyCreatorSignup(token, res); - } - if (token.purpose === 'creator_signin') { - return verifyCreatorSignin(token, res); - } - if (token.purpose === 'vendor_signup') { - return verifyVendorSignup(token, res); - } - if (token.purpose === 'vendor_signin') { - return verifyVendorSignin(token, res); - } - res.status(400).json({ error: 'unknown_purpose' }); -}); - -async function verifyCreatorSignup(token: MagicLinkTokenRow, res: Parameters[1]>[1]) { - const claim = token.claim; - if (!claim || claim.kind !== 'creator') { - return res.status(400).json({ error: 'invalid_signup_claim' }); - } - - const collision = await db(TABLES.NetworkCreator) - .where({ email: token.email }) - .orWhere({ handle: claim.handle }) - .first(); - if (collision) return res.status(409).json({ error: 'email_or_handle_taken' }); - - const id = ulid(); - await db(TABLES.NetworkCreator).insert({ - id, - name: claim.name, - handle: claim.handle, - email: token.email, - bio: null, - avatarUrl: null, - platforms: JSON.stringify([]) as unknown as never, - defaultPromoCode: null, - status: 'active', - activatedAt: new Date(), - }); - const creator = (await db(TABLES.NetworkCreator).where({ id }).first())!; - - const session = await createSession({ principalKind: 'network_creator', principalId: creator.id }); - res.cookie(SESSION_COOKIE_NAME, session.plaintext, sessionCookieOptions()); - res.json({ - ok: true, - role: 'network_creator', - creator: { - id: creator.id, - name: creator.name, - handle: creator.handle, - email: creator.email, - avatarUrl: creator.avatarUrl, - defaultPromoCode: creator.defaultPromoCode, - status: creator.status, - }, - }); -} - -async function verifyCreatorSignin(token: MagicLinkTokenRow, res: Parameters[1]>[1]) { - const creator = await db(TABLES.NetworkCreator).where({ email: token.email }).first(); - if (!creator) return res.status(404).json({ error: 'creator_not_found' }); - if (creator.status !== 'active') return res.status(403).json({ error: 'creator_not_active' }); - - const session = await createSession({ principalKind: 'network_creator', principalId: creator.id }); - res.cookie(SESSION_COOKIE_NAME, session.plaintext, sessionCookieOptions()); - res.json({ - ok: true, - role: 'network_creator', - creator: { - id: creator.id, - name: creator.name, - handle: creator.handle, - email: creator.email, - avatarUrl: creator.avatarUrl, - defaultPromoCode: creator.defaultPromoCode, - status: creator.status, - }, - }); -} - -async function verifyVendorSignup(token: MagicLinkTokenRow, res: Parameters[1]>[1]) { - const claim = token.claim; - if (!claim || claim.kind !== 'vendor') { - return res.status(400).json({ error: 'invalid_signup_claim' }); - } - - const collision = await db(TABLES.NetworkVendor).where({ slug: claim.slug }).first(); - if (collision) return res.status(409).json({ error: 'slug_taken' }); - - const id = ulid(); - await db(TABLES.NetworkVendor).insert({ - id, - name: claim.name, - slug: claim.slug, - email: token.email, - websiteUrl: claim.websiteUrl ?? null, - logoUrl: claim.logoUrl ?? null, - description: claim.description ?? null, - instanceUrl: claim.instanceUrl, - // claim carries the ciphertext already — no round-trip through plaintext - instanceKeyCiphertext: claim.instanceKeyCiphertext, - instanceKeyPrefix: claim.instanceKeyPrefix, - routerUrl: claim.routerUrl ?? null, - status: 'pending', // admin still reviews the federation relationship - }); - - // No session yet — the vendor is pending. Returning a helpful message - // so the portal can show "admin is reviewing your application." - res.json({ - ok: true, - role: 'network_vendor', - status: 'pending', - vendor: { id, name: claim.name, slug: claim.slug }, - }); -} - -async function verifyVendorSignin(token: MagicLinkTokenRow, res: Parameters[1]>[1]) { - const vendor = await findVendorByEmail(token.email); - if (!vendor) return res.status(404).json({ error: 'vendor_not_found' }); - if (vendor.status !== 'active') return res.status(403).json({ error: 'vendor_not_active' }); - - const session = await createSession({ principalKind: 'network_vendor', principalId: vendor.id }); - res.cookie(SESSION_COOKIE_NAME, session.plaintext, sessionCookieOptions()); - res.json({ - ok: true, - role: 'network_vendor', - vendor: { - id: vendor.id, - name: vendor.name, - slug: vendor.slug, - logoUrl: vendor.logoUrl, - websiteUrl: vendor.websiteUrl, - status: vendor.status, - }, - }); -} - -/** - * Look up the vendor tied to a signup email. One email can theoretically - * own multiple vendors — we pick the most recently created active one - * and fall through to `vendor_not_active` if nothing is active. Callers - * that need to disambiguate between several active vendors should - * collect the slug from the user first. - */ -async function findVendorByEmail(email: string): Promise { - const vendors = await db(TABLES.NetworkVendor) - .where({ email: email.toLowerCase() }) - .orderBy('createdAt', 'desc'); - return vendors.find((v) => v.status === 'active') ?? vendors[0]; -} - -// -------- Signout -------- - -magicLinkRouter.post('/auth/signout', async (req, res) => { - const plaintext = req.cookies?.[SESSION_COOKIE_NAME]; - if (plaintext) { - const { resolveSession } = await import('../auth-sessions.js'); - const session = await resolveSession(plaintext); - if (session) await revokeSession(session.id); - } - res.clearCookie(SESSION_COOKIE_NAME, { path: '/' }); - res.json({ ok: true }); -}); - -// -------- Dev mailbox -------- - -magicLinkRouter.get('/dev/mailbox', requireAuth, requireAdmin, async (_req, res) => { - const messages = await db(TABLES.DevMessage).orderBy('createdAt', 'desc').limit(100); - res.json({ messages }); -}); diff --git a/apps/api/src/routes/network-creators.ts b/apps/api/src/routes/network-creators.ts deleted file mode 100644 index ff487e9..0000000 --- a/apps/api/src/routes/network-creators.ts +++ /dev/null @@ -1,102 +0,0 @@ -import { Router } from 'express'; -import { ulid } from 'ulid'; -import { TABLES, type NetworkCreatorRow } from '@openpartner/db'; -import { db } from '../db.js'; -import { createApiKeyRow, requireAdmin, requireAuth, requireNetworkCreator } from '../auth.js'; -import { creatorCreateSchema, creatorUpdateSchema } from '../network/validation.js'; - -export const networkCreatorsRouter = Router(); - -// Admin: list + activate. In a real production world this would be -// self-serve with email verification; MVP keeps a moderation queue. -networkCreatorsRouter.get('/network/creators', requireAuth, requireAdmin, async (_req, res) => { - const creators = await db(TABLES.NetworkCreator).orderBy('createdAt', 'desc'); - res.json({ creators }); -}); - -networkCreatorsRouter.post('/network/creators', requireAuth, requireAdmin, async (req, res) => { - const body = creatorCreateSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const id = ulid(); - try { - await db(TABLES.NetworkCreator).insert({ - id, - name: body.data.name, - handle: body.data.handle, - email: body.data.email, - bio: body.data.bio ?? null, - avatarUrl: body.data.avatarUrl ?? null, - platforms: JSON.stringify(body.data.platforms ?? []) as unknown as never, // jsonb - defaultPromoCode: body.data.defaultPromoCode ?? null, - status: 'pending', - }); - } catch (err: unknown) { - if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { - return res.status(409).json({ error: 'handle_or_email_taken' }); - } - throw err; - } - - const key = await createApiKeyRow({ networkCreatorId: id, label: 'creator portal' }); - const creator = await db(TABLES.NetworkCreator).where({ id }).first(); - res.status(201).json({ creator, apiKey: key.plaintext }); -}); - -networkCreatorsRouter.post('/network/creators/:id/activate', requireAuth, requireAdmin, async (req, res) => { - const updated = await db(TABLES.NetworkCreator) - .where({ id: req.params.id }) - .update({ status: 'active', activatedAt: new Date() }) - .returning('*'); - if (updated.length === 0) return res.status(404).json({ error: 'creator_not_found' }); - res.json({ creator: updated[0] }); -}); - -// Creator self-view (own profile) — already in /auth/whoami but this is -// the canonical profile endpoint. -networkCreatorsRouter.get('/network/creators/me', requireAuth, requireNetworkCreator, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_creator') return res.status(403).json({ error: 'forbidden' }); - const creator = await db(TABLES.NetworkCreator).where({ id: p.networkCreatorId }).first(); - if (!creator) return res.status(404).json({ error: 'creator_not_found' }); - res.json({ creator }); -}); - -// Creator self-edit. Handle + email are intentionally NOT patchable: -// changing handle breaks share-URL references on vendor instances, and -// email is the magic-link identity. -networkCreatorsRouter.patch('/network/creators/me', requireAuth, requireNetworkCreator, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_creator') return res.status(403).json({ error: 'forbidden' }); - - const body = creatorUpdateSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const patch: Record = {}; - if (body.data.name !== undefined) patch.name = body.data.name; - if (body.data.bio !== undefined) patch.bio = body.data.bio; - if (body.data.avatarUrl !== undefined) patch.avatarUrl = body.data.avatarUrl; - if (body.data.defaultPromoCode !== undefined) patch.defaultPromoCode = body.data.defaultPromoCode; - if (body.data.platforms !== undefined) patch.platforms = JSON.stringify(body.data.platforms); - - if (Object.keys(patch).length === 0) { - const current = await db(TABLES.NetworkCreator).where({ id: p.networkCreatorId }).first(); - return res.json({ creator: current }); - } - - const [updated] = await db(TABLES.NetworkCreator) - .where({ id: p.networkCreatorId }) - .update(patch) - .returning('*'); - res.json({ creator: updated }); -}); - -// -------- Public directory: active creators (for vendors to browse) -------- - -networkCreatorsRouter.get('/network/directory/creators', async (_req, res) => { - const creators = await db(TABLES.NetworkCreator) - .where({ status: 'active' }) - .orderBy('createdAt', 'desc') - .select('id', 'name', 'handle', 'bio', 'avatarUrl', 'platforms', 'createdAt'); - res.json({ creators }); -}); diff --git a/apps/api/src/routes/network-earnings.ts b/apps/api/src/routes/network-earnings.ts deleted file mode 100644 index c3ab208..0000000 --- a/apps/api/src/routes/network-earnings.ts +++ /dev/null @@ -1,165 +0,0 @@ -/** - * Federated earnings view. - * - * For each active Partnership visible to the principal, we call the vendor's - * /partners/:id/dashboard (via stored admin key) and project the stats back - * into the Network UI. Attribution data stays on the vendor's instance — - * this is a read-only projection. - * - * Fan-out uses Promise.allSettled so a single unreachable vendor doesn't - * black out the whole page. Each partnership ships back with a status: - * ok — stats populated - * error — stats zeroed, `error` message set - * - * We group by vendorId first so we only decrypt each vendor's key once per - * request even if the creator has multiple partnerships with the same vendor. - */ - -import { Router } from 'express'; -import { - TABLES, - type NetworkVendorRow, - type OfferingRow, - type PartnershipRow, -} from '@openpartner/db'; -import { db } from '../db.js'; -import { requireAuth } from '../auth.js'; -import { fetchPartnerCommissions, fetchPartnerDashboard, type PartnerDashboardStats } from '../network/federation.js'; - -export const networkEarningsRouter = Router(); - -interface PartnershipEarning { - partnership: { - id: string; - vendorId: string; - vendorName: string; - offeringTitle: string; - vendorLinkKey: string; - publicShareUrl: string; - createdAt: string; - }; - status: 'ok' | 'error'; - error?: string; - stats: PartnerDashboardStats | null; -} - -networkEarningsRouter.get('/network/partnerships/earnings', requireAuth, async (req, res) => { - const p = req.principal!; - - const partnershipQuery = db(TABLES.Partnership).where({ status: 'active' }); - if (p.role === 'network_creator') partnershipQuery.andWhere({ creatorId: p.networkCreatorId }); - else if (p.role === 'network_vendor') partnershipQuery.andWhere({ vendorId: p.networkVendorId }); - else if (p.role !== 'admin') return res.status(403).json({ error: 'forbidden' }); - - const partnerships = await partnershipQuery.orderBy('createdAt', 'desc'); - if (partnerships.length === 0) { - return res.json({ partnerships: [], totals: emptyTotals() }); - } - - const vendorIds = Array.from(new Set(partnerships.map((p) => p.vendorId))); - const offeringIds = Array.from(new Set(partnerships.map((p) => p.offeringId))); - const [vendors, offerings] = await Promise.all([ - db(TABLES.NetworkVendor).whereIn('id', vendorIds), - db(TABLES.Offering).whereIn('id', offeringIds), - ]); - const vendorById = new Map(vendors.map((v) => [v.id, v])); - const offeringById = new Map(offerings.map((o) => [o.id, o])); - - const results = await Promise.all( - partnerships.map(async (pRow): Promise => { - const vendor = vendorById.get(pRow.vendorId); - const offering = offeringById.get(pRow.offeringId); - const base = { - id: pRow.id, - vendorId: pRow.vendorId, - vendorName: vendor?.name ?? 'Unknown vendor', - offeringTitle: offering?.title ?? 'Unknown offering', - vendorLinkKey: pRow.vendorLinkKey, - publicShareUrl: pRow.publicShareUrl, - createdAt: pRow.createdAt instanceof Date ? pRow.createdAt.toISOString() : String(pRow.createdAt), - }; - - if (!vendor) { - return { partnership: base, status: 'error', error: 'vendor_missing', stats: null }; - } - - try { - const stats = await fetchPartnerDashboard(vendor, pRow.vendorPartnerId); - return { partnership: base, status: 'ok', stats }; - } catch (err: unknown) { - return { - partnership: base, - status: 'error', - error: err instanceof Error ? err.message : String(err), - stats: null, - }; - } - }), - ); - - res.json({ - partnerships: results, - totals: computeTotals(results), - }); -}); - -// -------- Per-partnership commission drilldown -------- - -networkEarningsRouter.get('/network/partnerships/:id/commissions', requireAuth, async (req, res) => { - const p = req.principal!; - const partnership = await db(TABLES.Partnership).where({ id: req.params.id }).first(); - if (!partnership) return res.status(404).json({ error: 'not_found' }); - - const allowed = - p.role === 'admin' || - (p.role === 'network_creator' && partnership.creatorId === p.networkCreatorId) || - (p.role === 'network_vendor' && partnership.vendorId === p.networkVendorId); - if (!allowed) return res.status(403).json({ error: 'forbidden' }); - - const vendor = await db(TABLES.NetworkVendor).where({ id: partnership.vendorId }).first(); - if (!vendor) return res.status(404).json({ error: 'vendor_missing' }); - - try { - const commissions = await fetchPartnerCommissions(vendor, partnership.vendorPartnerId); - res.json({ commissions }); - } catch (err: unknown) { - res.status(502).json({ - error: 'vendor_unreachable', - detail: err instanceof Error ? err.message : String(err), - }); - } -}); - -function emptyTotals() { - return { - clicks: 0, - attributedEvents: 0, - attributedRevenue: 0, - commission: { accrued: 0, approved: 0, paid: 0, reversed: 0 }, - vendorCount: 0, - healthy: 0, - unreachable: 0, - }; -} - -function computeTotals(rows: PartnershipEarning[]) { - const totals = emptyTotals(); - const vendors = new Set(); - for (const r of rows) { - vendors.add(r.partnership.vendorId); - if (r.status === 'ok' && r.stats) { - totals.clicks += r.stats.clicks; - totals.attributedEvents += r.stats.attributedEvents; - totals.attributedRevenue += r.stats.attributedRevenue; - for (const [status, amount] of Object.entries(r.stats.commissionByStatus ?? {})) { - const bucket = totals.commission as Record; - bucket[status] = (bucket[status] ?? 0) + Number(amount ?? 0); - } - totals.healthy += 1; - } else { - totals.unreachable += 1; - } - } - totals.vendorCount = vendors.size; - return totals; -} diff --git a/apps/api/src/routes/network-offerings.ts b/apps/api/src/routes/network-offerings.ts deleted file mode 100644 index 7e0646d..0000000 --- a/apps/api/src/routes/network-offerings.ts +++ /dev/null @@ -1,128 +0,0 @@ -import { Router } from 'express'; -import { ulid } from 'ulid'; -import { TABLES, type NetworkVendorRow, type OfferingRow } from '@openpartner/db'; -import { db } from '../db.js'; -import { requireAuth, requireNetworkVendor } from '../auth.js'; -import { offeringCreateSchema, offeringUpdateSchema } from '../network/validation.js'; - -export const networkOfferingsRouter = Router(); - -// -------- Vendor: manage own offerings -------- - -networkOfferingsRouter.post('/network/offerings', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - - const body = offeringCreateSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const vendor = await db(TABLES.NetworkVendor).where({ id: p.networkVendorId }).first(); - if (!vendor || vendor.status !== 'active') { - return res.status(403).json({ error: 'vendor_not_active' }); - } - - const id = ulid(); - await db(TABLES.Offering).insert({ - id, - vendorId: vendor.id, - title: body.data.title, - productUrl: body.data.productUrl, - description: body.data.description ?? null, - heroImageUrl: body.data.heroImageUrl ?? null, - vendorCampaignId: body.data.vendorCampaignId, - terms: body.data.terms as never, // jsonb - published: body.data.published ?? false, - }); - - const offering = await db(TABLES.Offering).where({ id }).first(); - res.status(201).json({ offering }); -}); - -networkOfferingsRouter.patch('/network/offerings/:id', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - - const body = offeringUpdateSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const existing = await db(TABLES.Offering).where({ id: req.params.id }).first(); - if (!existing) return res.status(404).json({ error: 'offering_not_found' }); - if (existing.vendorId !== p.networkVendorId) return res.status(403).json({ error: 'not_yours' }); - - const patch: Partial = { updatedAt: new Date() }; - if (body.data.title !== undefined) patch.title = body.data.title; - if (body.data.productUrl !== undefined) patch.productUrl = body.data.productUrl; - if (body.data.description !== undefined) patch.description = body.data.description ?? null; - if (body.data.heroImageUrl !== undefined) patch.heroImageUrl = body.data.heroImageUrl ?? null; - if (body.data.vendorCampaignId !== undefined) patch.vendorCampaignId = body.data.vendorCampaignId; - if (body.data.terms !== undefined) patch.terms = body.data.terms as never; - if (body.data.published !== undefined) patch.published = body.data.published; - - await db(TABLES.Offering).where({ id: existing.id }).update(patch); - const fresh = await db(TABLES.Offering).where({ id: existing.id }).first(); - res.json({ offering: fresh }); -}); - -networkOfferingsRouter.get('/network/offerings/mine', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - const offerings = await db(TABLES.Offering) - .where({ vendorId: p.networkVendorId }) - .orderBy('createdAt', 'desc'); - res.json({ offerings }); -}); - -// -------- Public: browse the directory -------- - -networkOfferingsRouter.get('/network/directory/offerings', async (_req, res) => { - const rows = (await db(TABLES.Offering) - .join(TABLES.NetworkVendor, `${TABLES.NetworkVendor}.id`, `${TABLES.Offering}.vendorId`) - .where(`${TABLES.Offering}.published`, true) - .andWhere(`${TABLES.NetworkVendor}.status`, 'active') - .orderBy(`${TABLES.Offering}.createdAt`, 'desc') - .select( - `${TABLES.Offering}.id as id`, - `${TABLES.Offering}.title as title`, - `${TABLES.Offering}.description as description`, - `${TABLES.Offering}.heroImageUrl as heroImageUrl`, - `${TABLES.Offering}.productUrl as productUrl`, - `${TABLES.Offering}.terms as terms`, - `${TABLES.Offering}.createdAt as createdAt`, - `${TABLES.NetworkVendor}.id as vendorId`, - `${TABLES.NetworkVendor}.name as vendorName`, - `${TABLES.NetworkVendor}.slug as vendorSlug`, - `${TABLES.NetworkVendor}.logoUrl as vendorLogoUrl`, - `${TABLES.NetworkVendor}.routerUrl as vendorRouterUrl`, - `${TABLES.NetworkVendor}.instanceUrl as vendorInstanceUrl`, - )) as Array>; - - res.json({ offerings: rows }); -}); - -networkOfferingsRouter.get('/network/directory/offerings/:id', async (req, res) => { - const row = (await db(TABLES.Offering) - .join(TABLES.NetworkVendor, `${TABLES.NetworkVendor}.id`, `${TABLES.Offering}.vendorId`) - .where(`${TABLES.Offering}.id`, req.params.id) - .andWhere(`${TABLES.Offering}.published`, true) - .andWhere(`${TABLES.NetworkVendor}.status`, 'active') - .first( - `${TABLES.Offering}.id as id`, - `${TABLES.Offering}.title as title`, - `${TABLES.Offering}.description as description`, - `${TABLES.Offering}.heroImageUrl as heroImageUrl`, - `${TABLES.Offering}.productUrl as productUrl`, - `${TABLES.Offering}.terms as terms`, - `${TABLES.Offering}.createdAt as createdAt`, - `${TABLES.NetworkVendor}.id as vendorId`, - `${TABLES.NetworkVendor}.name as vendorName`, - `${TABLES.NetworkVendor}.slug as vendorSlug`, - `${TABLES.NetworkVendor}.logoUrl as vendorLogoUrl`, - `${TABLES.NetworkVendor}.description as vendorDescription`, - `${TABLES.NetworkVendor}.websiteUrl as vendorWebsiteUrl`, - `${TABLES.NetworkVendor}.routerUrl as vendorRouterUrl`, - `${TABLES.NetworkVendor}.instanceUrl as vendorInstanceUrl`, - )) as Record | undefined; - - if (!row) return res.status(404).json({ error: 'not_found' }); - res.json({ offering: row }); -}); diff --git a/apps/api/src/routes/network-partner.ts b/apps/api/src/routes/network-partner.ts new file mode 100644 index 0000000..1895a8e --- /dev/null +++ b/apps/api/src/routes/network-partner.ts @@ -0,0 +1,190 @@ +/** + * Partner-role proxy onto the OpenPartner Network. + * + * The Network used to host its own creator-facing SPA. We collapsed that + * surface into the openpartner portal so creators have a single home (the + * vendor instance they signed up at). These routes are how that portal + * reaches Network APIs without ever leaking the vendor's bearer token to + * the browser — the partner's session principal authenticates locally, + * and we proxy server-side using the encrypted vendorToken in + * network_membership Config plus an x-act-as-vendor-partner header. + * + * Network's requireCreator middleware accepts that header (when paired + * with an active vendor bearer) and resolves the Creator via + * VendorAffiliation(vendorId, vendorPartnerId), so the same handler code + * serves both browser-creator and proxied-from-vendor cases. + */ + +import { Router, type Request, type Response } from 'express'; +import { z } from 'zod'; +import type { Knex } from 'knex'; +import { requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { partnerProxy, NetworkProxyError } from '../network-client.js'; + +export const networkPartnerRouter = Router(); + +function requirePartnerPrincipal(req: Request, res: Response): string | null { + const p = req.principal; + if (!p) { + res.status(401).json({ error: 'unauthorized' }); + return null; + } + if (p.role !== 'partner' || !p.partnerId) { + res.status(403).json({ error: 'partner_only' }); + return null; + } + return p.partnerId; +} + +async function proxy( + req: Request, + res: Response, + fn: (db: Knex, tenantId: string) => Promise, + successStatus = 200, + /** Shape returned when the partner has no Network identity yet + * (Network 403 invalid_acting_context). Lets the SPA show an empty + * state instead of an error banner for the common case where the + * brand exists on the Network but auto-enroll never pushed this + * particular partner. */ + notFederatedFallback?: () => unknown, +): Promise { + const { db, tenantId } = tenantOf(req); + try { + const out = await fn(db, tenantId); + res.status(successStatus).json(out); + } catch (err) { + if (err instanceof NetworkProxyError) { + if (notFederatedFallback && (err.status === 403 || err.status === 404) && err.message.includes('invalid_acting_context')) { + res.json(notFederatedFallback()); + return; + } + res.status(err.status).json({ error: 'network_call_failed', detail: err.message }); + return; + } + throw err; + } +} + +// ---------- Public discovery (no acting partner needed) ---------- +// +// These routes don't require partner role — anyone authenticated on the +// vendor instance can browse. We still gate with requireAuth so we have a +// tenant context for the vendorToken. + +networkPartnerRouter.get('/network/discover', requireAuth, async (req, res) => { + const qs = new URLSearchParams(); + if (typeof req.query.q === 'string') qs.set('q', req.query.q); + if (typeof req.query.sort === 'string') qs.set('sort', req.query.sort); + return proxy(req, res, (db, tenantId) => partnerProxy.listOfferings(db, tenantId, qs.toString())); +}); + +networkPartnerRouter.get('/network/offerings/:id', requireAuth, async (req, res) => + proxy(req, res, (db, tenantId) => partnerProxy.getOffering(db, tenantId, req.params.id!)), +); + +networkPartnerRouter.get('/network/vendors/:id', requireAuth, async (req, res) => + proxy(req, res, (db, tenantId) => partnerProxy.getVendor(db, tenantId, req.params.id!)), +); + +// ---------- Acting-as-creator (partner-only) ---------- + +const applySchema = z.object({ + message: z.string().max(2000).optional(), + preferredSlug: z + .string() + .trim() + .min(2) + .max(40) + .regex(/^[a-zA-Z0-9_-]+$/) + .optional(), +}); + +networkPartnerRouter.post('/network/offerings/:id/apply', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + const body = applySchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + return proxy( + req, + res, + (db, tenantId) => partnerProxy.applyToOffering(db, tenantId, partnerId, req.params.id!, body.data), + 201, + ); +}); + +networkPartnerRouter.get('/network/me/affiliations', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + return proxy( + req, + res, + (db, tenantId) => partnerProxy.listMyAffiliations(db, tenantId, partnerId), + 200, + () => ({ affiliations: [] }), + ); +}); + +networkPartnerRouter.get('/network/me/affiliations/:id/earnings', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + return proxy(req, res, (db, tenantId) => + partnerProxy.getAffiliationEarnings(db, tenantId, partnerId, req.params.id!), + ); +}); + +networkPartnerRouter.get('/network/me/requests', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + return proxy( + req, + res, + (db, tenantId) => partnerProxy.listMyRequests(db, tenantId, partnerId), + 200, + () => ({ requests: [] }), + ); +}); + +networkPartnerRouter.post('/network/me/requests/:id/cancel', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + return proxy(req, res, (db, tenantId) => partnerProxy.cancelRequest(db, tenantId, partnerId, req.params.id!)); +}); + +networkPartnerRouter.get('/network/me/profile', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + // Profile fallback returns null instead of an empty object so the + // SPA can show a "not on the Network yet" message rather than a + // half-rendered editable form. + return proxy( + req, + res, + (db, tenantId) => partnerProxy.getMyProfile(db, tenantId, partnerId), + 200, + () => ({ notFederated: true }), + ); +}); + +const updateProfileSchema = z.object({ + name: z.string().trim().min(1).max(120).optional(), + handle: z + .string() + .trim() + .min(2) + .max(40) + .regex(/^[a-zA-Z0-9_-]+$/) + .nullable() + .optional(), + avatarUrl: z.string().url().nullable().optional(), + bio: z.string().max(2000).nullable().optional(), + profile: z.record(z.unknown()).optional(), +}); + +networkPartnerRouter.patch('/network/me/profile', requireAuth, async (req, res) => { + const partnerId = requirePartnerPrincipal(req, res); + if (!partnerId) return; + const body = updateProfileSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + return proxy(req, res, (db, tenantId) => partnerProxy.updateMyProfile(db, tenantId, partnerId, body.data)); +}); diff --git a/apps/api/src/routes/network-requests.ts b/apps/api/src/routes/network-requests.ts deleted file mode 100644 index c104630..0000000 --- a/apps/api/src/routes/network-requests.ts +++ /dev/null @@ -1,246 +0,0 @@ -import { Router } from 'express'; -import { ulid } from 'ulid'; -import { - TABLES, - type NetworkCreatorRow, - type NetworkVendorRow, - type OfferingRow, - type PartnershipRequestRow, - type PartnershipRow, -} from '@openpartner/db'; -import { db } from '../db.js'; -import { requireAuth, requireNetworkCreator, requireNetworkVendor } from '../auth.js'; -import { dispatchEvent } from '../webhook-dispatcher.js'; -import { z } from 'zod'; -import { promoCodeSchema, requestCreateSchema, requestDecideSchema } from '../network/validation.js'; - -const inviteSchema = z.object({ - offeringId: z.string().min(1), - creatorId: z.string().min(1), - message: z.string().max(2000).optional(), - promoCode: promoCodeSchema.optional(), -}); -import { provisionPartnerOnVendor } from '../network/federation.js'; - -export const networkRequestsRouter = Router(); - -// -------- Creator: apply to an offering -------- - -networkRequestsRouter.post('/network/requests', requireAuth, requireNetworkCreator, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_creator') return res.status(403).json({ error: 'forbidden' }); - - const body = requestCreateSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const creator = await db(TABLES.NetworkCreator).where({ id: p.networkCreatorId }).first(); - if (!creator || creator.status !== 'active') return res.status(403).json({ error: 'creator_not_active' }); - - const offering = await db(TABLES.Offering).where({ id: body.data.offeringId, published: true }).first(); - if (!offering) return res.status(404).json({ error: 'offering_not_found' }); - - // Fall back chain: request override → creator default → handle. - const promoCode = body.data.promoCode ?? creator.defaultPromoCode ?? creator.handle; - - const id = ulid(); - try { - await db(TABLES.PartnershipRequest).insert({ - id, - offeringId: offering.id, - vendorId: offering.vendorId, - creatorId: creator.id, - direction: 'creator_to_vendor', - message: body.data.message ?? null, - promoCode, - status: 'pending', - }); - } catch (err: unknown) { - if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { - return res.status(409).json({ error: 'already_requested' }); - } - throw err; - } - const request = await db(TABLES.PartnershipRequest).where({ id }).first(); - res.status(201).json({ request }); -}); - -// -------- Vendor: invite a creator -------- - -networkRequestsRouter.post('/network/invites', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - - const body = inviteSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const offering = await db(TABLES.Offering).where({ id: body.data.offeringId }).first(); - if (!offering) return res.status(404).json({ error: 'offering_not_found' }); - if (offering.vendorId !== p.networkVendorId) return res.status(403).json({ error: 'not_yours' }); - - const creator = await db(TABLES.NetworkCreator).where({ id: body.data.creatorId }).first(); - if (!creator) return res.status(404).json({ error: 'creator_not_found' }); - - const promoCode = body.data.promoCode ?? creator.defaultPromoCode ?? creator.handle; - - const id = ulid(); - try { - await db(TABLES.PartnershipRequest).insert({ - id, - offeringId: offering.id, - vendorId: offering.vendorId, - creatorId: creator.id, - direction: 'vendor_to_creator', - message: body.data.message ?? null, - promoCode, - status: 'pending', - }); - } catch (err: unknown) { - if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { - return res.status(409).json({ error: 'already_invited' }); - } - throw err; - } - const request = await db(TABLES.PartnershipRequest).where({ id }).first(); - res.status(201).json({ request }); -}); - -// -------- Lists -------- - -networkRequestsRouter.get('/network/requests/mine', requireAuth, async (req, res) => { - const p = req.principal!; - const q = db(TABLES.PartnershipRequest).orderBy('createdAt', 'desc'); - if (p.role === 'network_vendor') q.where({ vendorId: p.networkVendorId }); - else if (p.role === 'network_creator') q.where({ creatorId: p.networkCreatorId }); - else if (p.role !== 'admin') return res.status(403).json({ error: 'forbidden' }); - const requests = await q; - res.json({ requests }); -}); - -// -------- Vendor: approve (federates) or reject -------- - -networkRequestsRouter.post('/network/requests/:id/approve', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - - const body = requestDecideSchema.safeParse(req.body ?? {}); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const reqRow = await db(TABLES.PartnershipRequest).where({ id: req.params.id }).first(); - if (!reqRow) return res.status(404).json({ error: 'request_not_found' }); - if (reqRow.vendorId !== p.networkVendorId) return res.status(403).json({ error: 'not_yours' }); - if (reqRow.status !== 'pending') return res.status(409).json({ error: 'not_pending' }); - - // Claim the request atomically before federating. Two concurrent - // approves would both see status='pending' above; the conditional - // update below only succeeds for the first — the loser returns 409. - // The intermediate 'approving' status is never returned from vendor - // APIs (the loser never sees it), but it keeps the ledger honest. - const claimed = await db(TABLES.PartnershipRequest) - .where({ id: reqRow.id, status: 'pending' }) - .update({ status: 'approving' }); - if (claimed === 0) { - return res.status(409).json({ error: 'not_pending' }); - } - - const [vendor, creator, offering] = await Promise.all([ - db(TABLES.NetworkVendor).where({ id: reqRow.vendorId }).first(), - db(TABLES.NetworkCreator).where({ id: reqRow.creatorId }).first(), - db(TABLES.Offering).where({ id: reqRow.offeringId }).first(), - ]); - if (!vendor || !creator || !offering) { - // Release the claim so a fix-up can retry. - await db(TABLES.PartnershipRequest) - .where({ id: reqRow.id, status: 'approving' }) - .update({ status: 'pending' }); - return res.status(500).json({ error: 'missing_related_rows' }); - } - - let federated; - try { - federated = await provisionPartnerOnVendor({ - vendor, - offering, - creator: { - name: creator.name, - email: creator.email, - handle: creator.handle, - promoCode: reqRow.promoCode, - }, - }); - } catch (err: unknown) { - // Federation failed — release the claim so the vendor can retry. - await db(TABLES.PartnershipRequest) - .where({ id: reqRow.id, status: 'approving' }) - .update({ status: 'pending' }); - const msg = err instanceof Error ? err.message : String(err); - return res.status(502).json({ error: 'federation_failed', detail: msg }); - } - - const partnershipId = ulid(); - await db.transaction(async (trx) => { - await trx(TABLES.PartnershipRequest) - .where({ id: reqRow.id }) - .update({ - status: 'approved', - decidedAt: new Date(), - decisionNote: body.data.decisionNote ?? null, - }); - await trx(TABLES.Partnership).insert({ - id: partnershipId, - requestId: reqRow.id, - offeringId: offering.id, - vendorId: vendor.id, - creatorId: creator.id, - vendorPartnerId: federated.partnerId, - vendorLinkKey: federated.linkKey, - publicShareUrl: federated.publicShareUrl, - status: 'active', - }); - }); - - const partnership = await db(TABLES.Partnership).where({ id: partnershipId }).first(); - if (partnership) { - dispatchEvent('partnership.approved', { - partnershipId: partnership.id, - requestId: reqRow.id, - offeringId: partnership.offeringId, - vendorId: partnership.vendorId, - creatorId: partnership.creatorId, - vendorPartnerId: partnership.vendorPartnerId, - vendorLinkKey: partnership.vendorLinkKey, - publicShareUrl: partnership.publicShareUrl, - }); - } - res.json({ partnership, federated }); -}); - -networkRequestsRouter.post('/network/requests/:id/reject', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - - const body = requestDecideSchema.safeParse(req.body ?? {}); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const reqRow = await db(TABLES.PartnershipRequest).where({ id: req.params.id }).first(); - if (!reqRow) return res.status(404).json({ error: 'request_not_found' }); - if (reqRow.vendorId !== p.networkVendorId) return res.status(403).json({ error: 'not_yours' }); - if (reqRow.status !== 'pending') return res.status(409).json({ error: 'not_pending' }); - - const updated = await db(TABLES.PartnershipRequest) - .where({ id: reqRow.id }) - .update({ status: 'rejected', decidedAt: new Date(), decisionNote: body.data.decisionNote ?? null }) - .returning('*'); - res.json({ request: updated[0] }); -}); - -// -------- Partnerships list -------- - -networkRequestsRouter.get('/network/partnerships/mine', requireAuth, async (req, res) => { - const p = req.principal!; - const q = db(TABLES.Partnership).orderBy('createdAt', 'desc'); - if (p.role === 'network_vendor') q.where({ vendorId: p.networkVendorId }); - else if (p.role === 'network_creator') q.where({ creatorId: p.networkCreatorId }); - else if (p.role !== 'admin') return res.status(403).json({ error: 'forbidden' }); - const partnerships = await q; - res.json({ partnerships }); -}); diff --git a/apps/api/src/routes/network-vendors.ts b/apps/api/src/routes/network-vendors.ts deleted file mode 100644 index f424b7a..0000000 --- a/apps/api/src/routes/network-vendors.ts +++ /dev/null @@ -1,168 +0,0 @@ -import { Router } from 'express'; -import { z } from 'zod'; -import { ulid } from 'ulid'; -import { TABLES, type NetworkVendorRow } from '@openpartner/db'; -import { db } from '../db.js'; -import { createApiKeyRow, requireAdmin, requireAuth, requireNetworkVendor } from '../auth.js'; -import { encryptKey } from '../network/crypto.js'; -import { safeFetch } from '../network/safe-fetch.js'; -import { vendorCreateSchema } from '../network/validation.js'; -import { NETWORK_FEDERATION_SCOPES } from './api-keys.js'; - -export const networkVendorsRouter = Router(); - -const verifyKeySchema = z.object({ - instanceUrl: z.string().url(), - instanceKey: z.string().min(8), -}); - -/** - * Server-side introspection helper. Before an admin registers a vendor, - * the onboarding UI calls this to check the key the vendor pasted. We - * hit the vendor's own /auth/introspect with that key and report back. - * - * Why this exists: we strongly prefer vendors hand us a SCOPED key with - * only the federation permissions (partners:write, links:write, - * partners:read, commissions:read). If they paste a full admin key we - * want to warn them so they can go mint a scoped one instead. - */ -// Intentionally open (no auth) — the vendor is pre-signup and doesn't -// have an account yet. SSRF surface is closed via safeFetch: URL must -// be http(s), hostname must resolve to a public IP (unless the operator -// opts in with NETWORK_ALLOW_PRIVATE_HOSTS=1). We only proxy GET -// /auth/introspect (narrow path), not arbitrary URLs. -networkVendorsRouter.post('/network/vendors/verify-key', async (req, res) => { - const body = verifyKeySchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const { instanceUrl, instanceKey } = body.data; - const url = `${instanceUrl.replace(/\/$/, '')}/auth/introspect`; - try { - const response = await safeFetch(url, { headers: { authorization: `Bearer ${instanceKey}` } }); - const text = await response.text(); - if (!response.ok) { - return res.status(502).json({ - error: 'instance_rejected_key', - status: response.status, - detail: text.slice(0, 300), - }); - } - const introspect = text ? (JSON.parse(text) as Record) : {}; - - const required = [...NETWORK_FEDERATION_SCOPES] as string[]; - const scopes = Array.isArray(introspect.scopes) ? (introspect.scopes as string[]) : null; - const missing = scopes ? required.filter((s) => !scopes.includes(s)) : []; - const unrestricted = introspect.role === 'admin' && introspect.unrestricted === true; - - res.json({ - ok: true, - instanceUrl, - introspect, - recommended: required, - missing, - unrestricted, - // Green — "good to register": - acceptable: (scopes != null && missing.length === 0) || unrestricted, - }); - } catch (err: unknown) { - res.status(502).json({ - error: 'instance_unreachable', - detail: err instanceof Error ? err.message : String(err), - }); - } -}); - -// -------- Admin: list + create + activate -------- - -networkVendorsRouter.get('/network/vendors', requireAuth, requireAdmin, async (_req, res) => { - const vendors = await db(TABLES.NetworkVendor).orderBy('createdAt', 'desc'); - res.json({ vendors: vendors.map(stripKey) }); -}); - -// Vendor self-registration is admin-gated for MVP — keeps quality high -// before we have Stripe-based paid tiers on the Network. -networkVendorsRouter.post('/network/vendors', requireAuth, requireAdmin, async (req, res) => { - const body = vendorCreateSchema.safeParse(req.body); - if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); - - const id = ulid(); - const prefix = body.data.instanceKey.slice(0, 8); - const ciphertext = encryptKey(body.data.instanceKey); - - try { - await db(TABLES.NetworkVendor).insert({ - id, - name: body.data.name, - slug: body.data.slug, - // Admin-created vendors are pre-verified, so email isn't load-bearing; - // the magic-link signin path still works once the admin sets one. - email: (body.data.email ?? `admin+${body.data.slug}@${new URL(body.data.instanceUrl).hostname}`).toLowerCase(), - websiteUrl: body.data.websiteUrl ?? null, - logoUrl: body.data.logoUrl ?? null, - description: body.data.description ?? null, - instanceUrl: body.data.instanceUrl.replace(/\/$/, ''), - instanceKeyCiphertext: ciphertext, - instanceKeyPrefix: prefix, - routerUrl: body.data.routerUrl ? body.data.routerUrl.replace(/\/$/, '') : null, - status: 'pending', - }); - } catch (err: unknown) { - if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { - return res.status(409).json({ error: 'slug_taken' }); - } - throw err; - } - - // Issue a vendor-scoped API key so the merchant can sign in to the - // Network-side UI without needing admin rights. - const key = await createApiKeyRow({ networkVendorId: id, label: 'vendor portal' }); - - const vendor = await db(TABLES.NetworkVendor).where({ id }).first(); - res.status(201).json({ - vendor: stripKey(vendor!), - apiKey: key.plaintext, // shown once - }); -}); - -networkVendorsRouter.post('/network/vendors/:id/activate', requireAuth, requireAdmin, async (req, res) => { - const updated = await db(TABLES.NetworkVendor) - .where({ id: req.params.id }) - .update({ status: 'active', activatedAt: new Date() }) - .returning('*'); - if (updated.length === 0) return res.status(404).json({ error: 'vendor_not_found' }); - res.json({ vendor: stripKey(updated[0]!) }); -}); - -networkVendorsRouter.post('/network/vendors/:id/suspend', requireAuth, requireAdmin, async (req, res) => { - const updated = await db(TABLES.NetworkVendor) - .where({ id: req.params.id }) - .update({ status: 'suspended' }) - .returning('*'); - if (updated.length === 0) return res.status(404).json({ error: 'vendor_not_found' }); - res.json({ vendor: stripKey(updated[0]!) }); -}); - -// -------- Vendor: view self -------- - -networkVendorsRouter.get('/network/vendors/me', requireAuth, requireNetworkVendor, async (req, res) => { - const p = req.principal!; - if (p.role !== 'network_vendor') return res.status(403).json({ error: 'forbidden' }); - const vendor = await db(TABLES.NetworkVendor).where({ id: p.networkVendorId }).first(); - if (!vendor) return res.status(404).json({ error: 'vendor_not_found' }); - res.json({ vendor: stripKey(vendor) }); -}); - -// -------- Public-ish: browse active vendors -------- - -networkVendorsRouter.get('/network/directory/vendors', async (_req, res) => { - const vendors = await db(TABLES.NetworkVendor) - .where({ status: 'active' }) - .orderBy('createdAt', 'desc') - .select('id', 'name', 'slug', 'websiteUrl', 'logoUrl', 'description'); - res.json({ vendors }); -}); - -function stripKey(v: NetworkVendorRow): Omit & { instanceKeyPrefix: string } { - const { instanceKeyCiphertext: _omit, ...rest } = v; - return rest; -} diff --git a/apps/api/src/routes/onboarding.ts b/apps/api/src/routes/onboarding.ts new file mode 100644 index 0000000..0c0a8b0 --- /dev/null +++ b/apps/api/src/routes/onboarding.ts @@ -0,0 +1,102 @@ +/** + * Brand admin onboarding state. + * + * Single GET that the Dashboard's "Getting started" card consumes. + * We aggregate counts + a Network-connected flag here so the card + * doesn't have to fan out four separate queries on every render. + * + * Response is intentionally booleans + small ints — every item the + * UI shows is something the admin can check off without the response + * giving them lookup IDs to chase. + */ + +import { Router } from 'express'; +import { TABLES, type CampaignRow, type PartnerRow } from '@openpartner/db'; +import { requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { getNetworkMembership, networkProxy, NetworkProxyError } from '../network-client.js'; +import { getTenantBillingState } from '../billing-plan.js'; + +export const onboardingRouter = Router(); + +interface BrandOnboardingStatus { + brandInfoComplete: boolean; + campaignCount: number; + networkConnected: boolean; + offeringPublishedCount: number; + partnerCount: number; + /** True when billing is set up OR not required (selfhost / enterprise). */ + billingReady: boolean; + /** True once everything actionable is done — card hides. */ + complete: boolean; +} + +onboardingRouter.get('/admin/onboarding-status', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + + // Brand info is "complete" if either program_settings.programName is + // explicitly set OR the Tenant has a displayName from signup. We + // accept the signup value because re-typing it in Settings just to + // satisfy a checkbox is dumb. + const programRow = await db(TABLES.Config).where({ key: 'program_settings' }).first(); + const programName = ((programRow?.value as { programName?: string })?.programName ?? '').trim(); + const tenant = await db('Tenant').where({ id: tenantId }).first(['displayName']); + const displayName = ((tenant?.displayName as string | undefined) ?? '').trim(); + const brandInfoComplete = programName.length > 0 || displayName.length > 0; + + const campaignRows = await db(TABLES.Campaign).count>({ count: '*' }); + const campaignCount = Number(campaignRows[0]?.count ?? 0); + + const partnerRows = await db(TABLES.Partner) + .whereNull('revokedAt') + .count>({ count: '*' }); + const partnerCount = Number(partnerRows[0]?.count ?? 0); + + const membership = await getNetworkMembership(db, tenantId); + const networkConnected = !!(membership?.enabled && membership.vendorTokenCiphertext); + + // Offering count: only when connected. Network would 503 otherwise + // and we don't want a transient Network outage to block onboarding. + let offeringPublishedCount = 0; + if (networkConnected) { + try { + const r = await networkProxy.listOfferings(db, tenantId); + offeringPublishedCount = Array.isArray(r.offerings) + ? r.offerings.filter((o: unknown) => (o as { published?: boolean }).published).length + : 0; + } catch (err) { + if (!(err instanceof NetworkProxyError)) throw err; + // Leave as 0; surface as "not done yet" rather than failing the + // whole probe because the Network can't be reached. + } + } + + // Billing readiness: selfhost has no billing, so always "ready". + // Enterprise tenants are billed out of band — we treat as "ready" + // since there's no Checkout flow for them. Everyone else needs an + // active Stripe subscription (trial counts). + const billingState = await getTenantBillingState(db, tenantId); + const billingReady = + billingState.mode === 'selfhost' || + billingState.plan === 'enterprise' || + !!billingState.stripeSubscriptionId; + + const complete = + brandInfoComplete && + campaignCount > 0 && + networkConnected && + offeringPublishedCount > 0 && + partnerCount > 0 && + billingReady; + + const out: BrandOnboardingStatus = { + brandInfoComplete, + campaignCount, + networkConnected, + offeringPublishedCount, + partnerCount, + billingReady, + complete, + }; + res.json(out); +}); diff --git a/apps/api/src/routes/partner-auth.ts b/apps/api/src/routes/partner-auth.ts new file mode 100644 index 0000000..20c3aa7 --- /dev/null +++ b/apps/api/src/routes/partner-auth.ts @@ -0,0 +1,169 @@ +/** + * Human authentication — covers both admin and partner personas. + * + * POST /auth/signin email → magic-link email (whichever table + * the address lives in). 200 always so + * email existence can't be enumerated. + * POST /auth/magic/verify token → session cookie + whoami. Branches + * on the token's principalKind. + * POST /auth/signout revokes the session cookie. + * + * Invite-on-create sides live in /partners and /admins. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { TABLES, type AdminRow, type PartnerRow } from '@openpartner/db'; +import { + SESSION_COOKIE_NAME, + consumeMagicLink, + createSession, + issueMagicLink, + resolveSession, + revokeSession, + sessionCookieOptions, +} from '../auth-sessions.js'; +import { getMailer } from '../mailer.js'; +import { ipRateLimit } from '../middleware/rate-limit.js'; +import { adminSigninEmail, buildMagicLinkUrl, partnerSigninEmail } from '../email-templates.js'; +import { tenantOf } from '../tenancy.js'; + +export const partnerAuthRouter = Router(); + +const mailAuthLimit = ipRateLimit({ name: 'partner-auth-mail', max: 10, windowMs: 60_000 }); +const verifyLimit = ipRateLimit({ name: 'partner-auth-verify', max: 30, windowMs: 60_000 }); + +const signinSchema = z.object({ email: z.string().email() }); +const verifySchema = z.object({ token: z.string().min(8) }); + +// -------- Signin -------- + +partnerAuthRouter.post('/auth/signin', mailAuthLimit, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = signinSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const email = body.data.email.toLowerCase(); + + // Admin first — if the same email is registered as both an admin and a + // partner (unusual but possible on single-operator setups), admin wins. + const admin = await db(TABLES.Admin).where({ email }).first(); + if (admin?.activatedAt && !admin.revokedAt) { + const issued = await issueMagicLink(db, { + tenantId, + email, + purpose: 'admin_signin', + principalKind: 'admin', + principalId: admin.id, + }); + const tmpl = adminSigninEmail(admin.name, buildMagicLinkUrl(issued.plaintext, req.tenantSlug)); + await getMailer().send({ db, tenantId }, { + to: email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'admin_signin', + metadata: { purpose: 'admin_signin', adminId: admin.id }, + }); + return res.json({ ok: true }); + } + + const partner = await db(TABLES.Partner).where({ email }).first(); + // Revoked partners fall through silently — they were notified at + // revoke time (if admin opted in) and emailing on every signin + // attempt turns /auth/signin into a harassment vector: anyone can + // cause arbitrary emails to the victim by POSTing their address + // here repeatedly. + if (partner?.activatedAt && !partner.revokedAt) { + const issued = await issueMagicLink(db, { + tenantId, + email, + purpose: 'partner_signin', + principalKind: 'partner', + principalId: partner.id, + }); + const { resolveBrandName } = await import('../brand-name.js'); + const brandName = await resolveBrandName(db, tenantId); + const tmpl = partnerSigninEmail(partner.name, buildMagicLinkUrl(issued.plaintext, req.tenantSlug), brandName); + await getMailer().send({ db, tenantId }, { + to: email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'partner_signin', + metadata: { purpose: 'partner_signin', partnerId: partner.id }, + }); + } + // Unknown / pending / revoked → silent 200. No email sent. + res.json({ ok: true }); +}); + +// -------- Verify -------- + +partnerAuthRouter.post('/auth/magic/verify', verifyLimit, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = verifySchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const consumed = await consumeMagicLink(db, body.data.token); + if (!consumed) return res.status(400).json({ error: 'invalid_or_expired_token' }); + const token = consumed.token; + + if (token.principalKind === 'admin') { + const admin = await db(TABLES.Admin).where({ id: token.principalId }).first(); + if (!admin) return res.status(404).json({ error: 'admin_not_found' }); + if (admin.revokedAt) return res.status(403).json({ error: 'admin_revoked' }); + + if (token.purpose === 'admin_invite' && !admin.activatedAt) { + await db(TABLES.Admin) + .where({ id: admin.id }) + .update({ activatedAt: new Date(), updatedAt: new Date() }); + } + await db(TABLES.Admin).where({ id: admin.id }).update({ lastSignInAt: new Date() }); + + const session = await createSession(db, { tenantId, principalKind: 'admin', principalId: admin.id }); + res.cookie(SESSION_COOKIE_NAME, session.plaintext, sessionCookieOptions()); + return res.json({ + ok: true, + role: 'admin', + admin: { id: admin.id, email: admin.email, name: admin.name }, + }); + } + + // partner + const partner = await db(TABLES.Partner).where({ id: token.principalId }).first(); + if (!partner) return res.status(404).json({ error: 'partner_not_found' }); + if (partner.revokedAt) return res.status(403).json({ error: 'partner_revoked' }); + + if (token.purpose === 'partner_invite' && !partner.activatedAt) { + await db(TABLES.Partner) + .where({ id: partner.id }) + .update({ activatedAt: new Date(), updatedAt: new Date() }); + } + + const session = await createSession(db, { tenantId, principalKind: 'partner', principalId: partner.id }); + res.cookie(SESSION_COOKIE_NAME, session.plaintext, sessionCookieOptions()); + res.json({ + ok: true, + role: 'partner', + partner: { + id: partner.id, + name: partner.name, + email: partner.email, + stripeConnected: !!partner.stripeConnectAccountId, + }, + }); +}); + +// -------- Signout -------- + +partnerAuthRouter.post('/auth/signout', async (req, res) => { + const { db } = tenantOf(req); + const cookie = (req as unknown as { cookies?: Record }).cookies?.[SESSION_COOKIE_NAME]; + if (cookie) { + const session = await resolveSession(db, cookie); + if (session) await revokeSession(db, session.id); + } + res.clearCookie(SESSION_COOKIE_NAME, { path: '/' }); + res.json({ ok: true }); +}); diff --git a/apps/api/src/routes/partner-campaigns.ts b/apps/api/src/routes/partner-campaigns.ts new file mode 100644 index 0000000..437eed1 --- /dev/null +++ b/apps/api/src/routes/partner-campaigns.ts @@ -0,0 +1,104 @@ +/** + * Brand-admin management of partner ↔ campaign access. + * + * GET /partners/:id/campaigns list current grants + every campaign in the tenant + * POST /partners/:id/campaigns add grants — body: { campaignIds: string[] } + * DELETE /partners/:id/campaigns/:campaignId revoke a single grant + * + * Guard rails: + * - source='offering' grants are still revokable, but the response + * surface flags them so the admin sees they're undoing a + * Network-driven grant. (We don't outright block — operators + * should be able to clean up.) + * - Revoking a grant doesn't delete existing Links the partner + * created against that campaign. They keep working until the + * Link itself is deleted; new Links are blocked by the + * /partners/:id/links route's grant check. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { ulid } from 'ulid'; +import { TABLES, type CampaignRow, type PartnerCampaignRow, type PartnerRow } from '@openpartner/db'; +import { requireAdmin, requireAuth } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { autoMintCouponsForGrants } from './coupons.js'; + +export const partnerCampaignsRouter = Router(); + +partnerCampaignsRouter.get('/partners/:id/campaigns', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); + if (!partner) return res.status(404).json({ error: 'partner_not_found' }); + + const campaigns = (await db(TABLES.Campaign) + .select('id', 'name', 'destinationUrl', 'deepLinkAllowedDomains') + .orderBy('createdAt', 'desc')) as Array>; + const grants = (await db(TABLES.PartnerCampaign) + .where({ partnerId: req.params.id }) + .select('campaignId', 'source', 'createdAt')) as Array>; + const grantByCampaign = new Map(grants.map((g) => [g.campaignId, g])); + + // Each campaign carries its grant state so the UI can render a single + // checklist instead of hand-merging. + const result = campaigns.map((c) => ({ + ...c, + granted: grantByCampaign.has(c.id), + grantSource: grantByCampaign.get(c.id)?.source ?? null, + grantedAt: grantByCampaign.get(c.id)?.createdAt ?? null, + })); + + res.json({ campaigns: result }); +}); + +const addSchema = z.object({ + campaignIds: z.array(z.string().min(1)).min(1), +}); + +partnerCampaignsRouter.post('/partners/:id/campaigns', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); + if (!partner) return res.status(404).json({ error: 'partner_not_found' }); + const body = addSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + // Filter to campaignIds that actually exist in the tenant — otherwise + // a typo would create an orphan grant. + const valid = (await db(TABLES.Campaign) + .whereIn('id', body.data.campaignIds) + .select('id')) as Array<{ id: string }>; + const validIds = new Set(valid.map((c) => c.id)); + const toInsert = body.data.campaignIds.filter((id) => validIds.has(id)); + if (toInsert.length === 0) { + return res.status(404).json({ error: 'no_valid_campaigns' }); + } + + await db(TABLES.PartnerCampaign) + .insert( + toInsert.map((cid) => ({ + id: `pc_${ulid()}`, + tenantId, + partnerId: req.params.id, + campaignId: cid, + source: 'admin', + })), + ) + .onConflict(['tenantId', 'partnerId', 'campaignId']) + .ignore(); + + // Mint default coupons alongside the grant so creators have both + // attribution paths (link + code) without admin doing two steps. + // Idempotent — pre-existing coupons aren't disturbed. + await autoMintCouponsForGrants(db, tenantId, { id: partner.id, email: partner.email }, toInsert); + + res.status(201).json({ added: toInsert }); +}); + +partnerCampaignsRouter.delete('/partners/:id/campaigns/:campaignId', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const deleted = await db(TABLES.PartnerCampaign) + .where({ partnerId: req.params.id, campaignId: req.params.campaignId }) + .del(); + if (deleted === 0) return res.status(404).json({ error: 'grant_not_found' }); + res.json({ ok: true }); +}); diff --git a/apps/api/src/routes/partner-signup.ts b/apps/api/src/routes/partner-signup.ts new file mode 100644 index 0000000..61f431b --- /dev/null +++ b/apps/api/src/routes/partner-signup.ts @@ -0,0 +1,192 @@ +/** + * Public creator-facing partner signup. + * + * Lets a creator self-serve onto a vendor's program without admin + * pre-invite. The vendor's `Settings → Partners` page chooses the + * post-signup state: + * + * - `auto_approve` (default): activatedAt set immediately, magic-link + * issued, partner can sign in straight away. + * - `require_review`: activatedAt stays null until an admin approves + * via /partners/:id/invite (which also sends the magic-link). + * + * The endpoint is mounted INSIDE tenantMiddleware — in multi-tenant + * mode the URL is /t//partner-signup; in single-tenant mode + * (self-host) it's /partner-signup. Tenant context comes from req.db + * either way. + * + * Network behavior: if the vendor has Network membership enabled with + * autoEnroll on, the new partner is pushed to /partners/upsert and the + * returned networkCreatorId is stamped on Partner.metadata.network. + * Network failures are queued in NetworkOutbox and don't fail the + * signup. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { ulid } from 'ulid'; +import { TABLES, type ConfigRow, type PartnerRow } from '@openpartner/db'; +import { issueMagicLink } from '../auth-sessions.js'; +import { getMailer } from '../mailer.js'; +import { buildMagicLinkUrl, partnerInviteEmail } from '../email-templates.js'; +import { ipRateLimit } from '../middleware/rate-limit.js'; +import { tenantOf } from '../tenancy.js'; +import { getNetworkMembership, pushPartnerUpsert } from '../network-client.js'; + +export const partnerSignupRouter = Router(); + +const signupLimit = ipRateLimit({ name: 'partner-signup', max: 10, windowMs: 60_000 }); + +const schema = z.object({ + email: z.string().trim().email().max(254), + name: z.string().trim().min(1).max(120), + profile: z.record(z.unknown()).optional(), +}); + +export type PartnerSignupPolicy = 'auto_approve' | 'require_review'; + +interface PartnerSignupSettings { + policy: PartnerSignupPolicy; + /** Disable signup entirely. Defaults false (signup open). */ + disabled?: boolean; +} + +const PARTNER_SIGNUP_CONFIG_KEY = 'partner_signup'; + +async function readSignupSettings( + db: import('knex').Knex, + tenantId: string, +): Promise { + const row = await db(TABLES.Config).where({ tenantId, key: PARTNER_SIGNUP_CONFIG_KEY }).first(); + const value = (row?.value ?? {}) as Partial; + return { + policy: value.policy === 'require_review' ? 'require_review' : 'auto_approve', + disabled: value.disabled === true, + }; +} + +partnerSignupRouter.post('/partner-signup', signupLimit, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = schema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const settings = await readSignupSettings(db, tenantId); + if (settings.disabled) { + return res.status(403).json({ error: 'signup_disabled' }); + } + + const email = body.data.email.toLowerCase(); + + // Existence check is intentionally generic — same response shape on + // taken vs not. We don't want this endpoint to leak whether an email + // is registered with this vendor (creator-facing privacy + a minor + // hardening against credential-stuffing reconnaissance). + const existing = await db(TABLES.Partner).where({ email }).first(); + if (existing) { + return res.json({ ok: true, status: 'already_registered' }); + } + + const id = ulid(); + const now = new Date(); + const activatedAt = settings.policy === 'auto_approve' ? now : null; + let partner: PartnerRow; + try { + const inserted = (await db(TABLES.Partner) + .insert({ + id, + tenantId, + email, + name: body.data.name, + metadata: body.data.profile ?? {}, + activatedAt, + }) + .returning('*')) as PartnerRow[]; + if (inserted.length === 0) { + // Should be impossible — INSERT without ON CONFLICT either succeeds + // or throws — but typing-wise returning('*') is PartnerRow[]. + throw new Error('partner_insert_returned_no_rows'); + } + partner = inserted[0]!; + } catch (err) { + if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { + // Race against the existence check — same generic response. + return res.json({ ok: true, status: 'already_registered' }); + } + throw err; + } + + // Push to Network if configured + autoEnroll. Fire-and-forget on the + // request hot path: a Network outage must not fail the signup. The + // pushPartnerUpsert helper handles outbox queueing internally. + const network = await getNetworkMembership(db, tenantId); + if (network?.enabled && network.autoEnroll) { + const result = await pushPartnerUpsert(db, tenantId, { + vendorPartnerId: partner.id, + email: partner.email, + name: partner.name, + profile: body.data.profile, + joinedVendorAt: partner.createdAt.toISOString(), + status: activatedAt ? 'active' : 'pending', + metadata: { source: 'self_signup' }, + }); + if (result) { + // Stamp the canonical Network identity onto the Partner so the + // admin UI can show "this creator was already on the Network". + await db(TABLES.Partner) + .where({ id: partner.id }) + .update({ + metadata: db.raw( + `jsonb_set(coalesce("metadata", '{}'::jsonb), '{network}', ?::jsonb, true)`, + [ + JSON.stringify({ + creatorId: result.networkCreatorId, + preExisting: result.alreadyExisted, + affiliations: result.affiliations.length, + syncedAt: new Date().toISOString(), + }), + ], + ), + updatedAt: new Date(), + }); + } + } + + // Magic link goes out for both auto-approve and require-review paths. + // require-review's magic link confirms email ownership; admin still + // has to flip activatedAt before the partner can do anything. + const issued = await issueMagicLink(db, { + tenantId, + email, + purpose: 'partner_invite', + principalKind: 'partner', + principalId: partner.id, + }); + const { resolveBrandName } = await import('../brand-name.js'); + const brandName = await resolveBrandName(db, tenantId); + const tmpl = partnerInviteEmail(partner.name, buildMagicLinkUrl(issued.plaintext, req.tenantSlug), brandName); + try { + await getMailer().send({ db, tenantId }, { + to: email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'partner_invite', + metadata: { purpose: 'partner_invite', partnerId: partner.id, source: 'self_signup' }, + }); + } catch (err) { + // Mail failure on signup is recoverable — the row exists, the admin + // can resend. Surface a 202 so the client knows partial success. + return res.status(202).json({ + ok: true, + status: activatedAt ? 'active_pending_email' : 'pending_review_pending_email', + partnerId: partner.id, + mailError: err instanceof Error ? err.message : String(err), + }); + } + + res.status(201).json({ + ok: true, + status: activatedAt ? 'active' : 'pending_review', + partnerId: partner.id, + }); +}); diff --git a/apps/api/src/routes/partners.ts b/apps/api/src/routes/partners.ts index 47f13f7..0e8e5dd 100644 --- a/apps/api/src/routes/partners.ts +++ b/apps/api/src/routes/partners.ts @@ -1,41 +1,305 @@ import { Router } from 'express'; import { z } from 'zod'; import { ulid } from 'ulid'; -import { TABLES, type PartnerRow } from '@openpartner/db'; -import { db } from '../db.js'; +import { TABLES, type ApiKeyRow, type PartnerRow, type SessionRow } from '@openpartner/db'; import { grantScope, requireAdmin, requireAuth, requirePartnerOrAdmin } from '../auth.js'; +import { issueMagicLink } from '../auth-sessions.js'; +import { getMailer } from '../mailer.js'; +import { buildMagicLinkUrl, partnerInviteEmail, partnerRevokedEmail } from '../email-templates.js'; +import { tenantOf } from '../tenancy.js'; +import { getNetworkMembership, pushPartnerRevoke, pushPartnerUpsert } from '../network-client.js'; +import { autoMintCouponsForGrants } from './coupons.js'; const createSchema = z.object({ email: z.string().email(), name: z.string().min(1), metadata: z.record(z.unknown()).optional(), + // Admin can opt out of the invite email (e.g. federation creating a + // Partner row on behalf of an external creator network). Default is + // "invite them" since that's the intent of the admin UI. + sendInvite: z.boolean().optional(), + /** Programs (campaigns) this partner can create share-links for. + * Omitted = grant access to ALL current campaigns (preserves the + * pre-PartnerCampaign behavior). Empty array = grant nothing + * (rare; admin can add later). */ + campaignIds: z.array(z.string()).optional(), + /** Where the grants come from. 'offering' is what the Network + * federation sends so we can tell admin assignments apart from + * Network-driven ones in the audit log + UI. */ + campaignGrantSource: z.enum(['admin', 'offering']).optional(), }); export const partnersRouter = Router(); +/** + * Create a partner and, by default, send them an invite magic link. The + * partner starts with `activatedAt = null`; when they click the link, + * verify flips that to now() and issues a session. Admin never sees a + * partner-facing credential. + */ partnersRouter.post('/partners', requireAuth, grantScope('partners:write'), requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = createSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + const sendInvite = body.data.sendInvite !== false; + const email = body.data.email.toLowerCase(); const id = ulid(); - const [partner] = await db(TABLES.Partner) - .insert({ - id, - email: body.data.email, + + // Duplicate-email pre-check — we'd rather 409 with a clean error than + // let Postgres throw a unique-constraint violation that would surface + // to the admin as a generic 500. Race between the check + insert is + // caught below via the unique-violation path. + const existing = await db(TABLES.Partner).where({ email }).first(); + if (existing) return res.status(409).json({ error: 'email_taken' }); + + let partner; + try { + [partner] = await db(TABLES.Partner) + .insert({ + id, + tenantId, + email, + name: body.data.name, + metadata: body.data.metadata ?? {}, + // sendInvite=false means the caller is responsible for activating + // (federation client, admin seeding, etc); skip the pending state. + activatedAt: sendInvite ? null : new Date(), + }) + .returning('*'); + } catch (err) { + // Race: two concurrent POSTs with the same email both pass the + // pre-check and try to insert. The second one hits the unique + // constraint on Partner.email. + if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { + return res.status(409).json({ error: 'email_taken' }); + } + throw err; + } + + // Grant program (campaign) access. Default = all current campaigns + // when caller didn't specify, matching the pre-PartnerCampaign + // behavior so existing flows don't change shape. + let campaignIds: string[]; + if (body.data.campaignIds) { + // Explicit list — validate that each ID exists in this tenant. + // Without this, a stale ID (e.g. Network Offering still references + // a Campaign deleted on the vendor side) hits the FK constraint + // and surfaces to the caller as an opaque 500. Federation flows + // need a clean error message back instead. + const existing = (await db(TABLES.Campaign) + .whereIn('id', body.data.campaignIds) + .select('id')) as Array<{ id: string }>; + const existingIds = new Set(existing.map((c) => c.id)); + const missing = body.data.campaignIds.filter((cid) => !existingIds.has(cid)); + if (missing.length > 0) { + return res.status(400).json({ error: 'unknown_campaign_ids', missing }); + } + campaignIds = body.data.campaignIds; + } else { + campaignIds = ((await db(TABLES.Campaign).select('id')) as Array<{ id: string }>).map((c) => c.id); + } + if (campaignIds.length > 0) { + const grantSource = body.data.campaignGrantSource ?? 'admin'; + await db(TABLES.PartnerCampaign) + .insert( + campaignIds.map((cid) => ({ + id: `pc_${ulid()}`, + tenantId, + partnerId: id, + campaignId: cid, + source: grantSource, + })), + ) + .onConflict(['tenantId', 'partnerId', 'campaignId']) + .ignore(); + // Auto-mint a default coupon per granted campaign so creators have + // both attribution paths (link + code) available without admin + // explicit action. ON CONFLICT IGNORE inside the helper means + // re-grants don't error. + await autoMintCouponsForGrants(db, tenantId, { id, email }, campaignIds); + } + + if (sendInvite) { + const issued = await issueMagicLink(db, { + tenantId, + email, + purpose: 'partner_invite', + principalKind: 'partner', + principalId: id, + }); + const { resolveBrandName } = await import('../brand-name.js'); + const brandName = await resolveBrandName(db, tenantId); + const tmpl = partnerInviteEmail(body.data.name, buildMagicLinkUrl(issued.plaintext, req.tenantSlug), brandName); + await getMailer().send({ db, tenantId }, { + to: email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'partner_invite', + metadata: { purpose: 'partner_invite', partnerId: id }, + }); + } + + // Push to Network if configured + autoEnroll. Same fire-and-forget + // semantics as /partner-signup: a Network outage doesn't fail this + // request; failures land in NetworkOutbox for the scheduler to retry. + const network = await getNetworkMembership(db, tenantId); + if (network?.enabled && network.autoEnroll) { + const partnerRow = partner as PartnerRow; + const result = await pushPartnerUpsert(db, tenantId, { + vendorPartnerId: id, + email, name: body.data.name, - metadata: body.data.metadata ?? {}, - }) - .returning('*'); + profile: body.data.metadata, + joinedVendorAt: partnerRow.createdAt.toISOString(), + status: partnerRow.activatedAt ? 'active' : 'pending', + metadata: { source: 'admin_invite' }, + }); + if (result) { + await db(TABLES.Partner) + .where({ id }) + .update({ + metadata: db.raw( + `jsonb_set(coalesce("metadata", '{}'::jsonb), '{network}', ?::jsonb, true)`, + [ + JSON.stringify({ + creatorId: result.networkCreatorId, + preExisting: result.alreadyExisted, + affiliations: result.affiliations.length, + syncedAt: new Date().toISOString(), + }), + ], + ), + updatedAt: new Date(), + }); + } + } + + res.status(201).json({ ...partner, invited: sendInvite }); +}); + +/** + * Re-send an invite for a partner who hasn't accepted yet. Admin only. + * Idempotent: multiple sends are fine; the partner can click any one + * (they all expire in 15 minutes). + */ +partnersRouter.post('/partners/:id/invite', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); + if (!partner) return res.status(404).json({ error: 'not_found' }); + if (partner.activatedAt) return res.status(409).json({ error: 'already_activated' }); + + const issued = await issueMagicLink(db, { + tenantId, + email: partner.email, + purpose: 'partner_invite', + principalKind: 'partner', + principalId: partner.id, + }); + const { resolveBrandName } = await import('../brand-name.js'); + const brandName = await resolveBrandName(db, tenantId); + const tmpl = partnerInviteEmail(partner.name, buildMagicLinkUrl(issued.plaintext, req.tenantSlug), brandName); + await getMailer().send({ db, tenantId }, { + to: partner.email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'partner_invite', + metadata: { purpose: 'partner_invite', partnerId: partner.id, resend: true }, + }); + res.json({ ok: true }); +}); + +const revokeSchema = z.object({ + reason: z.string().max(500).optional(), + // Default true: industry-standard partner-program norm is to notify. + // Admin unchecks for fraud cases where tipping the partner off is + // counterproductive. + notify: z.boolean().optional().default(true), +}); + +/** + * Suspend a partner. Flips revokedAt, revokes all of their sessions so + * they're kicked out mid-request, leaves historical commissions + * untouched. Future attribution skips them; router flags clicks on their + * links as 'revoked'. Sends a notification email unless notify=false. + */ +partnersRouter.post('/partners/:id/revoke', requireAuth, grantScope('partners:write'), requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = revokeSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); + if (!partner) return res.status(404).json({ error: 'not_found' }); + if (partner.revokedAt) return res.status(409).json({ error: 'already_revoked' }); + + const now = new Date(); + const reason = body.data.reason ?? null; + // The request is already in a transaction (per-request via tenantMiddleware); + // operate on req.db directly without a nested trx. + await db(TABLES.Partner) + .where({ id: partner.id }) + .update({ revokedAt: now, revokeReason: reason, updatedAt: now }); + // Kill every authentication channel they hold: web sessions AND + // their partner-scoped API keys. Leaving ApiKey rows live meant a + // revoked partner retained programmatic access even though their + // dashboard cookie was killed. + await db(TABLES.Session) + .where({ principalKind: 'partner', principalId: partner.id }) + .whereNull('revokedAt') + .update({ revokedAt: now }); + await db(TABLES.ApiKey) + .where({ partnerId: partner.id }) + .whereNull('revokedAt') + .update({ revokedAt: now }); + + if (body.data.notify) { + const tmpl = partnerRevokedEmail(partner.name, reason); + await getMailer().send({ db, tenantId }, { + to: partner.email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'partner_revoked', + metadata: { purpose: 'partner_revoked', partnerId: partner.id }, + }); + } + + // Network gets the revoke too — but unconditionally if Network is + // enabled (not gated on autoEnroll). The reasoning: autoEnroll + // controls whether NEW partners flow into the Network; revokes need + // to mirror regardless so a creator the Network thinks is active + // doesn't keep getting matched after the vendor cuts them off. + const network = await getNetworkMembership(db, tenantId); + if (network?.enabled) { + await pushPartnerRevoke(db, tenantId, partner.id); + } + + res.json({ ok: true, revokedAt: now, notified: body.data.notify }); +}); + +/** Undo revoke — partner regains dashboard access and future attribution. */ +partnersRouter.post('/partners/:id/reinstate', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); + const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); + if (!partner) return res.status(404).json({ error: 'not_found' }); + if (!partner.revokedAt) return res.status(409).json({ error: 'not_revoked' }); - res.status(201).json(partner); + await db(TABLES.Partner) + .where({ id: partner.id }) + .update({ revokedAt: null, revokeReason: null, updatedAt: new Date() }); + res.json({ ok: true }); }); -partnersRouter.get('/partners', requireAuth, requireAdmin, async (_req, res) => { +partnersRouter.get('/partners', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const partners = await db(TABLES.Partner).orderBy('createdAt', 'desc').limit(500); res.json({ partners }); }); partnersRouter.get('/partners/:id', requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const partner = await db(TABLES.Partner).where({ id: req.params.id }).first(); if (!partner) return res.status(404).json({ error: 'not_found' }); res.json(partner); diff --git a/apps/api/src/routes/payouts.ts b/apps/api/src/routes/payouts.ts index dd15077..a597c26 100644 --- a/apps/api/src/routes/payouts.ts +++ b/apps/api/src/routes/payouts.ts @@ -1,13 +1,14 @@ import { Router } from 'express'; import { TABLES, type PayoutRow } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAdmin, requireAuth, requirePartnerOrAdmin } from '../auth.js'; import { runPayouts } from '../payouts.js'; +import { tenantOf } from '../tenancy.js'; export const payoutsRouter = Router(); -payoutsRouter.post('/payouts/run', requireAuth, requireAdmin, async (_req, res) => { - const result = await runPayouts(); +payoutsRouter.post('/payouts/run', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const result = await runPayouts(db, tenantId); res.json(result); }); @@ -16,6 +17,7 @@ payoutsRouter.get( requireAuth, requirePartnerOrAdmin('id'), async (req, res) => { + const { db } = tenantOf(req); const payouts = await db(TABLES.Payout) .where({ partnerId: req.params.id }) .orderBy('createdAt', 'desc') diff --git a/apps/api/src/routes/platform-auth.ts b/apps/api/src/routes/platform-auth.ts new file mode 100644 index 0000000..cf2944b --- /dev/null +++ b/apps/api/src/routes/platform-auth.ts @@ -0,0 +1,152 @@ +/** + * Platform-identity auth: verify the unified-signin magic link, list + * the user's workspaces, and exchange the platform session for a + * tenant-scoped one. + * + * Routes here all run BEFORE tenantMiddleware (no /t// prefix in + * URLs), so they reach for the privileged `db` directly. Tenant-scoped + * follow-on writes (creating the per-workspace Session) open their own + * transaction with `app.tenant_id` set. + */ + +import { Router, type Request } from 'express'; +import { z } from 'zod'; +import { TABLES, type AdminRow, type TenantRow } from '@openpartner/db'; +import { db, appDb } from '../db.js'; +import { consumeMagicLink, createSession, SESSION_COOKIE_NAME, sessionCookieOptions } from '../auth-sessions.js'; +import { + createPlatformSession, + PLATFORM_SESSION_COOKIE, + platformSessionCookieOptions, + resolvePlatformSession, + revokePlatformSession, +} from '../platform-sessions.js'; + +export const platformAuthRouter = Router(); + +const verifySchema = z.object({ token: z.string().min(8) }); + +// -------- Verify the platform magic-link token -------- + +platformAuthRouter.post('/auth/platform-verify', async (req, res) => { + const body = verifySchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const consumed = await consumeMagicLink(db, body.data.token); + if (!consumed) return res.status(400).json({ error: 'invalid_or_expired_token' }); + const token = consumed.token; + + if (token.purpose !== 'platform_signin' || token.principalKind !== 'platform') { + return res.status(400).json({ error: 'wrong_token_kind' }); + } + + // The token's principalId is the email; use it as the canonical identity. + const email = (token.email || token.principalId).toLowerCase(); + + const session = await createPlatformSession(db, email); + res.cookie(PLATFORM_SESSION_COOKIE, session.plaintext, platformSessionCookieOptions()); + + res.json({ ok: true, kind: 'platform', email }); +}); + +// -------- List workspaces the platform-identity owns -------- + +interface Workspace { + tenantSlug: string; + tenantDisplayName: string; + adminId: string; + activated: boolean; +} + +function readPlatformCookie(req: Request): string | null { + const cookie = (req as unknown as { cookies?: Record }).cookies?.[PLATFORM_SESSION_COOKIE]; + return cookie ?? null; +} + +platformAuthRouter.get('/me/workspaces', async (req, res) => { + const cookie = readPlatformCookie(req); + if (!cookie) return res.status(401).json({ error: 'no_platform_session' }); + + const session = await resolvePlatformSession(db, cookie); + if (!session) return res.status(401).json({ error: 'invalid_or_expired_session' }); + + const rows = (await db(TABLES.Admin) + .join(TABLES.Tenant, `${TABLES.Tenant}.id`, `${TABLES.Admin}.tenantId`) + .where(`${TABLES.Admin}.email`, session.email) + .whereNull(`${TABLES.Admin}.revokedAt`) + .andWhere(`${TABLES.Tenant}.status`, 'active') + .select( + `${TABLES.Tenant}.slug as tenantSlug`, + `${TABLES.Tenant}.displayName as tenantDisplayName`, + `${TABLES.Admin}.id as adminId`, + `${TABLES.Admin}.activatedAt as activatedAt`, + )) as Array<{ tenantSlug: string; tenantDisplayName: string; adminId: string; activatedAt: Date | null }>; + + const workspaces: Workspace[] = rows.map((r) => ({ + tenantSlug: r.tenantSlug, + tenantDisplayName: r.tenantDisplayName, + adminId: r.adminId, + activated: !!r.activatedAt, + })); + + res.json({ email: session.email, workspaces }); +}); + +// -------- Enter a workspace: trade platform session for a tenant Session -------- + +const enterSchema = z.object({ slug: z.string().trim().min(1) }); + +platformAuthRouter.post('/workspaces/enter', async (req, res) => { + const body = enterSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body' }); + + const cookie = readPlatformCookie(req); + if (!cookie) return res.status(401).json({ error: 'no_platform_session' }); + const platform = await resolvePlatformSession(db, cookie); + if (!platform) return res.status(401).json({ error: 'invalid_or_expired_session' }); + + const tenant = await db(TABLES.Tenant).where({ slug: body.data.slug, status: 'active' }).first(); + if (!tenant) return res.status(404).json({ error: 'tenant_not_found' }); + + const admin = await db(TABLES.Admin) + .where({ tenantId: tenant.id, email: platform.email }) + .whereNull('revokedAt') + .first(); + if (!admin) return res.status(403).json({ error: 'not_a_member' }); + + // Activate on first entry — the platform-signin link doubles as + // activation if signup's invite email never landed. Aligns with the + // recovery path in /signin. + if (!admin.activatedAt) { + await db(TABLES.Admin) + .where({ id: admin.id }) + .update({ activatedAt: new Date(), updatedAt: new Date() }); + } + await db(TABLES.Admin).where({ id: admin.id }).update({ lastSignInAt: new Date() }); + + // Per-tenant Session insert needs to run with app.tenant_id set so RLS + // (when enabled) accepts the write. Use the appDb pool. + const trx = await appDb.transaction(); + let sessionPlaintext: string; + try { + await trx.raw(`set local app.tenant_id = '${tenant.id.replace(/'/g, "''")}'`); + const created = await createSession(trx, { tenantId: tenant.id, principalKind: 'admin', principalId: admin.id }); + sessionPlaintext = created.plaintext; + await trx.commit(); + } catch (err) { + await trx.rollback(); + throw err; + } + + res.cookie(SESSION_COOKIE_NAME, sessionPlaintext, sessionCookieOptions()); + res.json({ ok: true, tenantSlug: tenant.slug, home: `/t/${tenant.slug}/` }); +}); + +// -------- Sign out the platform identity -------- + +platformAuthRouter.post('/auth/platform-signout', async (req, res) => { + const cookie = readPlatformCookie(req); + if (cookie) await revokePlatformSession(db, cookie); + res.clearCookie(PLATFORM_SESSION_COOKIE, { path: '/' }); + res.json({ ok: true }); +}); diff --git a/apps/api/src/routes/session-home.ts b/apps/api/src/routes/session-home.ts new file mode 100644 index 0000000..f4dfab3 --- /dev/null +++ b/apps/api/src/routes/session-home.ts @@ -0,0 +1,52 @@ +/** + * Where does the current session belong? + * + * Used by the public Landing on app.openpartner.dev to auto-redirect + * already-signed-in visitors into their portal instead of dumping them + * on the marketing page. Reads the op_session cookie, resolves the + * session through the privileged db (no tenant context needed since + * sessions table carries tenantId on the row), and returns the home + * path the SPA should navigate to. + * + * Public — no auth gate. Returns 200 + `{ home: null }` for anonymous + * visitors so the Landing can render normally without a 401 in devtools. + */ + +import { Router } from 'express'; +import { TABLES, type TenantRow } from '@openpartner/db'; +import { db } from '../db.js'; +import { resolveSession, SESSION_COOKIE_NAME } from '../auth-sessions.js'; +import { PLATFORM_SESSION_COOKIE, resolvePlatformSession } from '../platform-sessions.js'; + +export const sessionHomeRouter = Router(); + +sessionHomeRouter.get('/session/home', async (req, res) => { + const cookies = (req as unknown as { cookies?: Record }).cookies ?? {}; + + // Tenant session takes precedence — they've already picked a workspace. + const tenantCookie = cookies[SESSION_COOKIE_NAME]; + if (tenantCookie) { + const session = await resolveSession(db, tenantCookie); + if (session) { + const tenant = await db(TABLES.Tenant).where({ id: session.tenantId, status: 'active' }).first(); + if (tenant) { + return res.json({ + home: `/t/${tenant.slug}/`, + kind: session.principalKind, + tenantSlug: tenant.slug, + }); + } + } + } + + // Platform session = identity verified, no workspace picked yet. + const platformCookie = cookies[PLATFORM_SESSION_COOKIE]; + if (platformCookie) { + const platform = await resolvePlatformSession(db, platformCookie); + if (platform) { + return res.json({ home: '/workspaces', kind: 'platform', email: platform.email }); + } + } + + res.json({ home: null }); +}); diff --git a/apps/api/src/routes/settings.ts b/apps/api/src/routes/settings.ts new file mode 100644 index 0000000..4acffee --- /dev/null +++ b/apps/api/src/routes/settings.ts @@ -0,0 +1,548 @@ +/** + * Program-wide settings stored in the Config table. Keyed by + * `program_settings`, the value is a JSON blob of: + * + * programName?: string — how to brand the portal (admin + partner) + * supportEmail?: string — shown in the partner footer as contact + * + * Env is reserved for secrets + build-time; runtime content like this + * lives here so admins can update it without a redeploy. + */ + +import { Router } from 'express'; +import type { Knex } from 'knex'; +import { z } from 'zod'; +import { TABLES, type AdminRow, type ConfigRow, type TenantRow } from '@openpartner/db'; +import { requireAdmin, requireAuth } from '../auth.js'; +import { + MailSettingsValidationError, + getPublicMailSettings, + saveMailSettings, + type MailTransportKind, +} from '../mail-settings.js'; +import { + backfillPartners, + completeNetworkConnect, + getPublicNetworkMembership, + NetworkProxyError, + networkProxy, + saveNetworkMembership, + signupWithNetwork, +} from '../network-client.js'; +import { createApiKeyRow } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { NETWORK_FEDERATION_SCOPES } from './api-keys.js'; + +export const settingsRouter = Router(); + +const CONFIG_KEY = 'program_settings'; + +const settingsSchema = z.object({ + programName: z.string().trim().max(120).optional(), + supportEmail: z.string().trim().email().max(254).optional().or(z.literal('')), +}); + +/** What we persist into the Config row. Logo lives on Tenant; everything + * else is per-tenant program settings. */ +interface PersistedProgramSettings { + programName: string | null; + supportEmail: string | null; +} + +export interface ProgramSettings extends PersistedProgramSettings { + logoUrl: string | null; +} + +async function readSettings(db: Knex, tenantId: string): Promise { + const row = await db(TABLES.Config).where({ tenantId, key: CONFIG_KEY }).first(); + const value = (row?.value ?? {}) as Partial; + // Fall back to Tenant.displayName so brand admins don't have to re-type + // the brand name they set at signup. Saving this page promotes the + // value into program_settings; until then we just surface the signup + // value so the UI is pre-populated and the onboarding checklist + // doesn't keep nagging about a thing that's effectively already set. + let programName = value.programName ?? null; + // Logo lives on Tenant (set by /uploads/logo). Always read from there + // so the cached Config row never lags the actual upload. + const tenant = await db('Tenant').where({ id: tenantId }).first(['displayName', 'logoUrl']); + if (!programName && tenant?.displayName) programName = tenant.displayName as string; + return { + programName, + supportEmail: value.supportEmail ?? null, + logoUrl: (tenant?.logoUrl as string | null | undefined) ?? null, + }; +} + +/** Any authenticated caller (admin OR partner) can read — not secret. */ +settingsRouter.get('/config/program', requireAuth, async (req, res) => { + const { db, tenantId } = tenantOf(req); + res.json(await readSettings(db, tenantId)); +}); + +/** Only admins write. Empty strings clear fields. */ +settingsRouter.post('/config/program', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = settingsSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const next: PersistedProgramSettings = { + programName: body.data.programName?.trim() || null, + supportEmail: body.data.supportEmail?.trim() || null, + }; + const now = new Date(); + // Upsert: preserves updatedAt semantics without a separate read. + await db(TABLES.Config) + .insert({ tenantId, key: CONFIG_KEY, value: next as unknown as never, updatedAt: now }) + .onConflict(['tenantId', 'key']) + .merge({ value: next as unknown as never, updatedAt: now }); + // Re-hydrate so the returned shape matches GET (includes logoUrl). + res.json(await readSettings(db, tenantId)); +}); + +// ---------- Mail settings ---------- + +const mailSettingsSchema = z.object({ + kind: z.enum(['smtp', 'postmark', 'none']), + from: z.string().trim().max(254).optional().or(z.literal('')), + smtp: z + .object({ + host: z.string().trim().max(253).optional(), + port: z.number().int().min(1).max(65535).optional(), + secure: z.boolean().optional(), + user: z.string().trim().max(320).optional(), + // Password / token are write-only from the client. Undefined = + // "keep existing"; empty string = "clear"; set = rotate. + password: z.string().max(500).optional(), + }) + .optional(), + postmark: z + .object({ + serverToken: z.string().max(500).optional(), + messageStream: z.string().trim().max(120).optional(), + }) + .optional(), +}); + +settingsRouter.get('/config/mail', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + res.json(await getPublicMailSettings(db, tenantId)); +}); + +settingsRouter.post('/config/mail', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = mailSettingsSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + try { + await saveMailSettings(db, tenantId, { + kind: body.data.kind as MailTransportKind, + from: body.data.from === '' ? null : body.data.from ?? undefined, + smtp: body.data.smtp, + postmark: body.data.postmark, + }); + } catch (err) { + if (err instanceof MailSettingsValidationError) { + return res.status(400).json({ error: err.code, field: err.field }); + } + throw err; + } + res.json(await getPublicMailSettings(db, tenantId)); +}); + +// ---------- Partner signup policy ---------- + +const partnerSignupSchema = z.object({ + policy: z.enum(['auto_approve', 'require_review']).optional(), + disabled: z.boolean().optional(), +}); + +settingsRouter.get('/config/partner-signup', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const row = await db(TABLES.Config) + .where({ tenantId, key: 'partner_signup' }) + .first(); + const value = (row?.value ?? {}) as { policy?: string; disabled?: boolean }; + res.json({ + policy: value.policy === 'require_review' ? 'require_review' : 'auto_approve', + disabled: value.disabled === true, + }); +}); + +settingsRouter.post('/config/partner-signup', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = partnerSignupSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const current = await db(TABLES.Config).where({ tenantId, key: 'partner_signup' }).first(); + const currentValue = (current?.value ?? {}) as { policy?: string; disabled?: boolean }; + const next = { + policy: body.data.policy ?? currentValue.policy ?? 'auto_approve', + disabled: body.data.disabled ?? currentValue.disabled ?? false, + }; + const now = new Date(); + await db(TABLES.Config) + .insert({ tenantId, key: 'partner_signup', value: next as unknown as never, updatedAt: now }) + .onConflict(['tenantId', 'key']) + .merge({ value: next as unknown as never, updatedAt: now }); + res.json(next); +}); + +// ---------- Network membership ---------- + +const networkMembershipSchema = z.object({ + enabled: z.boolean().optional(), + networkUrl: z.string().url().optional().or(z.literal('')), + /** Plaintext bearer issued by the Network on /vendors/register. Undefined keeps existing. */ + vendorToken: z.string().max(500).optional(), + /** ApiKey.id of the scoped key the Network should call back with. */ + scopedKeyId: z.string().nullable().optional(), + autoEnroll: z.boolean().optional(), +}); + +settingsRouter.get('/config/network', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + res.json(await getPublicNetworkMembership(db, tenantId)); +}); + +settingsRouter.post('/config/network', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = networkMembershipSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + await saveNetworkMembership(db, tenantId, { + enabled: body.data.enabled, + networkUrl: body.data.networkUrl === '' ? '' : body.data.networkUrl, + vendorToken: body.data.vendorToken, + scopedKeyId: body.data.scopedKeyId, + autoEnroll: body.data.autoEnroll, + }); + res.json(await getPublicNetworkMembership(db, tenantId)); +}); + +/** + * Reconcile existing partners with the Network. Called when an admin + * enables Network membership after already having a partner roster. + * + * Pushes every Partner row through /partners/upsert. The Network dedups + * on email — so a creator who's already on the Network from another + * vendor returns the existing networkCreatorId, and we stamp + * Partner.metadata.network.preExisting=true so the admin sees who was + * already known. + * + * Synchronous (returns counts when done). For very large rosters the + * outbox + scheduler-drained retries handle Network-side timeouts so a + * single backfill failure doesn't lose the work. + */ +settingsRouter.post('/config/network/backfill', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const partners = await db('Partner') + .select>( + 'id', + 'email', + 'name', + 'createdAt', + 'activatedAt', + 'revokedAt', + ); + const result = await backfillPartners(db, tenantId, partners); + res.json(result); +}); + +// ---------- Network connect (self-serve onboarding) ---------- + +const startConnectSchema = z.object({ + /** Where the magic-link should send the admin back to. Defaults to + * the request's origin + tenant path + /admin/network/complete. */ + portalCallbackUrl: z.string().url().optional(), + /** Email to receive the confirmation link. Defaults to the calling + * admin's email if a session-derived admin is calling; required for + * env-bearer admins. */ + contactEmail: z.string().email().optional(), + contactName: z.string().max(120).optional(), + /** Display name shown to creators. Defaults to tenantId on multi + * hosted; on self-host the admin should pass their brand. */ + displayName: z.string().min(1).max(120).optional(), + /** Where the Network can reach this instance's API. Defaults to + * inferring from the request hostname + /api. */ + instanceUrl: z.string().url().optional(), +}); + +settingsRouter.post('/config/network/start-connect', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId, tenantSlug } = (() => { + const t = tenantOf(req); + return { ...t, tenantSlug: req.tenantSlug ?? null }; + })(); + const body = startConnectSchema.safeParse(req.body ?? {}); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const networkUrl = process.env.NETWORK_URL; + if (!networkUrl) { + return res.status(503).json({ error: 'network_url_not_configured', detail: 'set NETWORK_URL env to your network endpoint, e.g. https://network.openpartner.dev' }); + } + + // Resolve defaults from the request + tenant context. + const protoHost = `${req.protocol}://${req.get('host') ?? ''}`; + const tenantPath = tenantSlug ? `/t/${tenantSlug}` : ''; + // /api goes BEFORE /t/ so the DO App Platform ingress + // (/api → api component) actually routes to us. The tenant middleware + // accepts both /t//* and /api/t//* in its regex, so the + // api still resolves the tenant correctly. + const inferredInstanceUrl = `${protoHost}${tenantSlug ? `/api/t/${tenantSlug}` : '/api'}`; + const inferredCallback = `${protoHost}${tenantPath}/admin/network/complete`; + + // Mint a fresh scoped key with the federation scope set; store the + // ApiKey id in network_membership so we can identify which key the + // Network is using and rotate it later via /vendors/me/rotate-callback-key. + const scoped = await createApiKeyRow(db, { + tenantId, + scopes: [...NETWORK_FEDERATION_SCOPES], + label: 'network_federation', + }); + + let signup; + try { + signup = await signupWithNetwork({ + networkUrl, + instanceUrl: body.data.instanceUrl ?? inferredInstanceUrl, + scopedKey: scoped.plaintext, + displayName: body.data.displayName ?? (tenantSlug ? tenantSlug : 'OpenPartner instance'), + contactEmail: body.data.contactEmail ?? '', + contactName: body.data.contactName, + tier: process.env.OPENPARTNER_TENANCY === 'multi' ? 'hosted' : 'self_hosted', + portalCallbackUrl: body.data.portalCallbackUrl ?? inferredCallback, + }); + } catch (err) { + return res.status(502).json({ error: 'network_signup_failed', detail: err instanceof Error ? err.message : String(err) }); + } + + // Stash the partial state — networkUrl + scopedKeyId + autoEnroll + // default true. The vendorToken comes back from completeConnect. + await saveNetworkMembership(db, tenantId, { + enabled: false, // not active until verify lands + networkUrl, + scopedKeyId: scoped.id, + autoEnroll: true, + }); + + // This is the manual "Connect to Network" button on the Settings page — + // never uses adminAuthToken, so the result is always the email-verify + // pending shape. + const emailSent = signup.status === 'pending' ? signup.emailSent : true; + res.status(202).json({ vendorId: signup.vendorId, status: 'pending', emailSent }); +}); + +// One-click enroll for tenants that pre-date the auto-enroll-on-signup +// flow. Uses NETWORK_ADMIN_API_KEY to skip the email-verify round trip +// — the brand admin is already signed in here, no point re-confirming. +// Self-host deployments without NETWORK_ADMIN_API_KEY still use the +// /start-connect email-verify path. +settingsRouter.post('/config/network/auto-enroll', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId, tenantSlug } = (() => { + const t = tenantOf(req); + return { ...t, tenantSlug: req.tenantSlug ?? null }; + })(); + + const networkUrl = process.env.NETWORK_URL; + const adminToken = process.env.NETWORK_ADMIN_API_KEY; + if (!networkUrl) return res.status(503).json({ error: 'network_url_not_configured' }); + if (!adminToken) return res.status(503).json({ error: 'admin_token_not_configured', detail: 'NETWORK_ADMIN_API_KEY required for auto-enroll' }); + + const tenantRow = await db(TABLES.Tenant).where({ id: tenantId }).first(); + if (!tenantRow) return res.status(404).json({ error: 'tenant_not_found' }); + + // Need a session-derived admin so we have a real Admin row + email. + // Env-bearer / scoped-key admins lack that, so they should use the + // manual /start-connect flow which takes contactEmail explicitly. + const principal = req.principal; + const adminId = principal?.role === 'admin' && principal.source === 'session' ? principal.adminId : null; + if (!adminId) return res.status(400).json({ error: 'session_admin_required', detail: 'Auto-enroll needs a logged-in admin; env/scoped admins should use start-connect with contactEmail.' }); + const adminRow = await db(TABLES.Admin).where({ id: adminId }).first(); + const adminEmail = adminRow?.email; + if (!adminEmail) return res.status(400).json({ error: 'admin_email_unresolvable' }); + + const protoHost = `${req.protocol}://${req.get('host') ?? ''}`; + const tenantPath = tenantSlug ? `/t/${tenantSlug}` : ''; + // /api goes BEFORE /t/ so the DO App Platform ingress + // (/api → api component) actually routes to us. The tenant middleware + // accepts both /t//* and /api/t//* in its regex, so the + // api still resolves the tenant correctly. + const inferredInstanceUrl = `${protoHost}${tenantSlug ? `/api/t/${tenantSlug}` : '/api'}`; + const inferredCallback = `${protoHost}${tenantPath}/admin/network/complete`; + + const scoped = await createApiKeyRow(db, { + tenantId, + scopes: [...NETWORK_FEDERATION_SCOPES], + label: 'network_federation', + }); + + let signup; + try { + signup = await signupWithNetwork({ + networkUrl, + instanceUrl: inferredInstanceUrl, + scopedKey: scoped.plaintext, + displayName: tenantRow.displayName, + contactEmail: adminEmail, + tier: 'hosted', + portalCallbackUrl: inferredCallback, + adminAuthToken: adminToken, + }); + } catch (err) { + return res.status(502).json({ error: 'network_signup_failed', detail: err instanceof Error ? err.message : String(err) }); + } + + if (signup.status !== 'active') { + // Network refused the admin auth — fall back state. Shouldn't happen + // in practice but surface clearly so the operator knows the env is wrong. + return res.status(502).json({ error: 'admin_path_rejected', detail: 'Network signup did not return active; check NETWORK_ADMIN_API_KEY' }); + } + + await saveNetworkMembership(db, tenantId, { + enabled: true, + networkUrl, + vendorToken: signup.vendorToken, + scopedKeyId: scoped.id, + autoEnroll: true, + vendorId: signup.vendorId, + }); + + res.json({ ok: true, vendorId: signup.vendorId }); +}); + +const completeConnectSchema = z.object({ ntoken: z.string().min(20) }); + +settingsRouter.post('/config/network/complete-connect', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); + const body = completeConnectSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const networkUrl = process.env.NETWORK_URL; + if (!networkUrl) return res.status(503).json({ error: 'network_url_not_configured' }); + + let result; + try { + result = await completeNetworkConnect(networkUrl, body.data.ntoken); + } catch (err) { + return res.status(502).json({ error: 'network_verify_failed', detail: err instanceof Error ? err.message : String(err) }); + } + + // Stamp networkUrl + vendorId explicitly on every complete-connect. + // Without it, brands that hit the email-verify path *before* any + // /start-connect call (e.g. auto-enroll fell back, then they + // verified directly) ended up with networkUrl='' on their membership + // row — and every subsequent proxy call 503'd with + // network_not_configured. + await saveNetworkMembership(db, tenantId, { + enabled: true, + networkUrl, + vendorToken: result.vendorToken, + vendorId: result.vendorId, + }); + + res.json({ ok: true, vendorId: result.vendorId, displayName: result.displayName }); +}); + +// ---------- Network proxy: offerings + partnership requests ---------- +// Admin manages their Network presence through these. Backend is the +// only thing that can hold the vendorToken (encrypted in network_membership +// Config), so the portal calls these routes which proxy to the Network. + +import type { Request, Response } from 'express'; + +async function proxy(req: Request, res: Response, fn: (db: Knex, tenantId: string) => Promise, successStatus = 200): Promise { + const { db, tenantId } = tenantOf(req); + try { + const out = await fn(db, tenantId); + res.status(successStatus).json(out); + } catch (err) { + if (err instanceof NetworkProxyError) { + res.status(err.status).json({ error: 'network_call_failed', detail: err.message }); + return; + } + throw err; + } +} + +settingsRouter.get('/admin/network/me', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, async (db, tenantId) => { + // Override the Network's stored partnerCount with the local truth. + // Network's value updates via the hourly heartbeat job — between + // ticks (or before the first tick after connecting) it lags reality. + // The brand admin expects "active partners" to match the count they + // see on /admin/partners, so reconcile here. + const remote = await networkProxy.whoami(db, tenantId); + const partnerRows = await db(TABLES.Partner) + .whereNull('revokedAt') + .count>({ count: '*' }); + const partnerCount = Number(partnerRows[0]?.count ?? 0); + return { ...(remote as Record), partnerCount }; + }), +); + +settingsRouter.get('/admin/network/offerings', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.listOfferings(db, tenantId)), +); + +settingsRouter.post('/admin/network/offerings', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.createOffering(db, tenantId, req.body), 201), +); + +settingsRouter.patch('/admin/network/offerings/:id', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.updateOffering(db, tenantId, req.params.id!, req.body)), +); + +settingsRouter.delete('/admin/network/offerings/:id', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.deleteOffering(db, tenantId, req.params.id!)), +); + +settingsRouter.get('/admin/network/requests', requireAuth, requireAdmin, async (req, res) => { + const status = typeof req.query.status === 'string' ? req.query.status : undefined; + return proxy(req, res, (db, tenantId) => networkProxy.listRequests(db, tenantId, status)); +}); + +settingsRouter.post('/admin/network/requests/:id/approve', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.approveRequest(db, tenantId, req.params.id!, req.body ?? {})), +); + +settingsRouter.post('/admin/network/requests/:id/reject', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.rejectRequest(db, tenantId, req.params.id!, req.body ?? {})), +); + +// Brand-side creator discovery — vendor-authenticated proxy passes the +// browser's querystring straight through to Network's directory endpoint. +settingsRouter.get('/admin/network/discover/creators', requireAuth, requireAdmin, async (req, res) => { + const qs = req.url.includes('?') ? req.url.slice(req.url.indexOf('?') + 1) : ''; + return proxy(req, res, (db, tenantId) => networkProxy.discoverCreators(db, tenantId, qs)); +}); + +// Brand admin invites a discovered creator to apply to an offering. +// Body is forwarded verbatim — Network validates the creatorId + offering +// ownership. +settingsRouter.post('/admin/network/offerings/:id/invite-creator', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.inviteCreator(db, tenantId, req.params.id!, req.body ?? {}), 201), +); + +// ---------- Network billing proxy ---------- +// Self-hosted vendors subscribe to Network access ($29/mo + 3% metered) +// from this admin surface. Hosted vendors get bundled with their main +// Flex/Revshare subscription — the Network endpoint handles that gate +// and we just pass the response through. + +settingsRouter.get('/admin/network/billing', requireAuth, requireAdmin, async (req, res) => + proxy(req, res, (db, tenantId) => networkProxy.getBilling(db, tenantId)), +); + +settingsRouter.post('/admin/network/billing/checkout', requireAuth, requireAdmin, async (req, res) => { + const body = z.object({ + successUrl: z.string().url(), + cancelUrl: z.string().url(), + }).safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + return proxy(req, res, (db, tenantId) => networkProxy.createCheckout(db, tenantId, body.data)); +}); + +settingsRouter.post('/admin/network/billing/portal', requireAuth, requireAdmin, async (req, res) => { + const body = z.object({ returnUrl: z.string().url() }).safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + return proxy(req, res, (db, tenantId) => networkProxy.openPortal(db, tenantId, body.data)); +}); diff --git a/apps/api/src/routes/signin.ts b/apps/api/src/routes/signin.ts new file mode 100644 index 0000000..4efb86e --- /dev/null +++ b/apps/api/src/routes/signin.ts @@ -0,0 +1,93 @@ +/** + * Unified email-only signin for the multi-tenant deployment. + * + * The Landing on app.openpartner.dev offers a single Sign-in entry — we + * don't make the user pick "brand vs creator" up front. Email goes in, + * a platform-identity magic link goes out (one email regardless of how + * many brands the user admins). Verifying the link issues a + * PlatformSession; the SPA reads /api/me/workspaces and the user picks + * which brand to enter. Creator signin is forwarded to the Network in + * parallel — same email, separate flow, separate cookie. + * + * Always 200 silently to avoid email enumeration. + * + * Multi-tenant only — single-tenant deployments use the existing + * per-tenant /auth/signin which already knows its tenant. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { TABLES, type AdminRow, DEFAULT_TENANT_ID } from '@openpartner/db'; +import { db } from '../db.js'; +import { issueMagicLink } from '../auth-sessions.js'; +import { adminSigninEmail, buildMagicLinkUrl } from '../email-templates.js'; +import { getMailer } from '../mailer.js'; +import { getTenancyMode } from '../tenancy.js'; + +export const signinRouter = Router(); + +const schema = z.object({ email: z.string().trim().email() }); + +signinRouter.post('/signin', async (req, res) => { + if (getTenancyMode() !== 'multi') { + return res.status(400).json({ error: 'use_per_tenant_signin' }); + } + + const body = schema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_email' }); + + const email = body.data.email.toLowerCase(); + + // Brand admin path. We don't care here which tenants — only whether ANY + // non-revoked admin row matches. The picker after verify enumerates the + // workspaces and lets the user choose. Mailer best-effort; per-tenant + // log on failure. + const anyAdmin = await db(TABLES.Admin) + .where({ email }) + .whereNull('revokedAt') + .first(); + + if (anyAdmin) { + try { + const issued = await issueMagicLink(db, { + tenantId: DEFAULT_TENANT_ID, // placeholder — platform tokens aren't tenant-scoped + email, + purpose: 'platform_signin', + principalKind: 'platform', + principalId: email, + }); + // Tenant-less link — verify will issue a PlatformSession, the SPA + // routes through /workspaces to let the user pick which brand to + // enter. + const link = buildMagicLinkUrl(issued.plaintext); + const tmpl = adminSigninEmail(anyAdmin.name, link); + await getMailer().send({ db, tenantId: anyAdmin.tenantId }, { + to: email, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'platform_signin', + metadata: { purpose: 'platform_signin', email, source: 'unified_signin' }, + }); + } catch (err) { + console.error('[signin] platform mail failed', { email, err }); + } + } + + // Creator side — same email, separate cookie, parallel flow. Best-effort. + const networkUrl = process.env.NETWORK_URL; + if (networkUrl) { + try { + await fetch(`${networkUrl.replace(/\/$/, '')}/creators/signin`, { + method: 'POST', + headers: { 'content-type': 'application/json', 'user-agent': 'OpenPartner-Signin/1' }, + body: JSON.stringify({ email }), + signal: AbortSignal.timeout(5_000), + }); + } catch (err) { + console.error('[signin] network creator signin failed', err); + } + } + + res.json({ ok: true }); +}); diff --git a/apps/api/src/routes/signup.ts b/apps/api/src/routes/signup.ts new file mode 100644 index 0000000..c11e872 --- /dev/null +++ b/apps/api/src/routes/signup.ts @@ -0,0 +1,253 @@ +/** + * Public tenant signup. Hosted/multi-tenant only — in single-tenant mode + * the seeded 'default' tenant is the entire installation and the install + * endpoint covers first-run setup. + * + * POST /signup creates a Tenant + first Admin and emails the admin a + * magic-link to activate. The endpoint is unauthenticated, rate-limited + * by IP, and gated by slug validation: + * + * - slug matches /^[a-z0-9-]{3,30}$/ + * - slug not in RESERVED_SLUGS + * - slug not already taken (Tenant.slug unique) + * - adminEmail not already in use under that slug (which is impossible + * since the tenant is brand new, but we check anyway for symmetry) + * + * Mounted BEFORE tenantMiddleware in app.ts because there's no tenant + * context yet — we use the privileged `db` for the writes. + */ + +import { Router } from 'express'; +import { z } from 'zod'; +import { ulid } from 'ulid'; +import { TABLES, BILLING_PLANS, type AdminRow, type TenantRow } from '@openpartner/db'; +import { db } from '../db.js'; +import { ipRateLimit } from '../middleware/rate-limit.js'; +import { issueMagicLink } from '../auth-sessions.js'; +import { getMailer } from '../mailer.js'; +import { adminInviteEmail, buildMagicLinkUrl } from '../email-templates.js'; +import { RESERVED_SLUGS, getTenancyMode } from '../tenancy.js'; + +export const signupRouter = Router(); + +const signupLimit = ipRateLimit({ name: 'signup', max: 10, windowMs: 60_000 }); + +const signupSchema = z.object({ + slug: z + .string() + .trim() + .min(3) + .max(30) + .regex(/^[a-z0-9-]+$/, 'slug must be lowercase alphanumerics or hyphens'), + displayName: z.string().trim().min(1).max(120), + adminEmail: z.string().trim().email().max(254), + adminName: z.string().trim().min(1).max(120), + /** Optional billing plan picked from the marketing pricing CTAs. Stored + * on Tenant.billingPlan; the actual subscription is created when the + * admin completes Stripe Checkout post-activation. Enterprise tenants + * set this for record-keeping but never run through Checkout. */ + plan: z.enum(BILLING_PLANS as readonly [string, ...string[]]).optional(), +}); + +class SignupGuardError extends Error { + constructor(public code: string) { + super(code); + } +} + +signupRouter.post('/signup', signupLimit, async (req, res) => { + if (getTenancyMode() !== 'multi') { + return res.status(400).json({ error: 'signup_not_available_in_single_tenant' }); + } + + const body = signupSchema.safeParse(req.body); + if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); + + const slug = body.data.slug.toLowerCase(); + const adminEmail = body.data.adminEmail.toLowerCase(); + + if (RESERVED_SLUGS.has(slug)) { + return res.status(409).json({ error: 'slug_reserved' }); + } + + let tenantId = ulid(); + const adminId = ulid(); + const now = new Date(); + + try { + await db.transaction(async (trx) => { + // Re-check inside the transaction. A racing signup with the same + // slug would otherwise both pass the pre-check and one would hit + // the unique constraint as a generic 500. The unique constraint is + // still our final guard; this just gives us a clean error. + const existing = await trx(TABLES.Tenant).where({ slug }).first(); + if (existing) { + // Recovery path: if the slug exists but no admin has activated + // yet, the previous signup was abandoned (typo'd email, mailer + // misconfigured, etc.). Replace the unactivated admins + magic + // links + the network membership Config so the new email can + // claim the brand cleanly. If anyone has activated we still + // refuse — that's a real conflict. + const activatedExists = await trx(TABLES.Admin) + .where({ tenantId: existing.id }) + .whereNotNull('activatedAt') + .first(); + if (activatedExists) throw new SignupGuardError('slug_taken'); + + tenantId = existing.id; + await trx(TABLES.Admin).where({ tenantId }).del(); + await trx(TABLES.MagicLinkToken).where({ tenantId }).del(); + await trx(TABLES.Config).where({ tenantId, key: 'network_membership' }).del(); + await trx(TABLES.Tenant).where({ id: tenantId }).update({ + displayName: body.data.displayName, + // Refresh the plan choice on recovery — the user may have come + // back via a different pricing CTA than their first attempt. + ...(body.data.plan ? { billingPlan: body.data.plan as TenantRow['billingPlan'] } : {}), + updatedAt: new Date(), + }); + } else { + await trx(TABLES.Tenant).insert({ + id: tenantId, + slug, + displayName: body.data.displayName, + status: 'active', + billingPlan: (body.data.plan ?? null) as TenantRow['billingPlan'], + metadata: { createdBy: 'signup' } as unknown as never, + }); + } + + await trx(TABLES.Admin).insert({ + id: adminId, + tenantId, + email: adminEmail, + name: body.data.adminName, + activatedAt: null, + }); + }); + } catch (err) { + if (err instanceof SignupGuardError) { + return res.status(409).json({ error: err.code }); + } + if (typeof err === 'object' && err !== null && (err as { code?: string }).code === '23505') { + // Unique violation — slug got taken between the pre-check and the + // insert. Surface as the same 409. + return res.status(409).json({ error: 'slug_taken' }); + } + throw err; + } + + // Email the activation magic link. If this fails the Tenant + Admin row + // remain — the operator can hit /admins/:id/invite later (after first + // admin activates) or we can expose a dedicated "resend invite" public + // endpoint. We don't roll back signup on mail failure because the + // tenant slug is now taken and reusing it might surprise the second + // installer; better to leave it claimed and let them recover via mail + // ops than auto-release on a transient SMTP blip. + try { + const issued = await issueMagicLink(db, { + tenantId, + email: adminEmail, + purpose: 'admin_invite', + principalKind: 'admin', + principalId: adminId, + }); + const tmpl = adminInviteEmail(body.data.adminName, buildMagicLinkUrl(issued.plaintext, slug), body.data.displayName); + await getMailer().send({ db, tenantId }, { + to: adminEmail, + subject: tmpl.subject, + text: tmpl.text, + html: tmpl.html, + tag: 'admin_invite', + metadata: { purpose: 'admin_invite', adminId, signup: true }, + }); + } catch (err) { + const message = err instanceof Error ? err.message : 'mail_failed'; + return res.status(202).json({ + ok: true, + tenant: { id: tenantId, slug }, + mailDelivered: false, + mailError: message, + createdAt: now, + }); + } + + // Auto-enroll hosted brand on the Network. The hosted value prop + // INCLUDES partner discovery; new brands shouldn't have to find a + // "connect" button to use what they signed up for. Visibility is + // controlled by whether they publish offerings, not by membership. + // + // If NETWORK_ADMIN_API_KEY is set we take the admin fast path — Network + // skips its email verify (we just verified the brand admin via our own + // magic link in the next step) and returns the vendorToken inline so + // membership is `enabled: true` from t=0. + // + // Otherwise we fall back to the email-verify flow (used by self-host + // and any operator who didn't wire the admin key). + let networkStatus: 'active' | 'pending' | 'skipped' | 'failed' = 'skipped'; + let networkVendorId: string | null = null; + const networkUrl = process.env.NETWORK_URL; + const networkAdminKey = process.env.NETWORK_ADMIN_API_KEY; + if (networkUrl) { + try { + const { signupWithNetwork } = await import('../network-client.js'); + const { createApiKeyRow } = await import('../auth.js'); + const { NETWORK_FEDERATION_SCOPES } = await import('./api-keys.js'); + const { saveNetworkMembership } = await import('../network-client.js'); + + const scoped = await createApiKeyRow(db, { + tenantId, + scopes: [...NETWORK_FEDERATION_SCOPES], + label: 'network_federation', + }); + const protoHost = `${req.protocol}://${req.get('host') ?? ''}`; + const signupResult = await signupWithNetwork({ + networkUrl, + // /api comes BEFORE /t/ so the DO App Platform ingress + // (/api → api component) actually routes to us. The tenant + // middleware accepts both /t//* and /api/t//* via + // its regex, so the api still resolves the tenant correctly. + instanceUrl: `${protoHost}/api/t/${slug}`, + scopedKey: scoped.plaintext, + displayName: body.data.displayName, + contactEmail: adminEmail, + contactName: body.data.adminName, + tier: 'hosted', + portalCallbackUrl: `${protoHost}/t/${slug}/admin/network/complete`, + adminAuthToken: networkAdminKey, + }); + networkVendorId = signupResult.vendorId; + if (signupResult.status === 'active') { + await saveNetworkMembership(db, tenantId, { + enabled: true, + networkUrl, + vendorToken: signupResult.vendorToken, + scopedKeyId: scoped.id, + autoEnroll: true, + vendorId: signupResult.vendorId, + }); + networkStatus = 'active'; + } else { + // Email-verify path — vendorToken comes back at complete-connect. + await saveNetworkMembership(db, tenantId, { + enabled: false, + networkUrl, + scopedKeyId: scoped.id, + autoEnroll: true, + vendorId: signupResult.vendorId, + }); + networkStatus = 'pending'; + } + } catch (err) { + console.error('[signup] auto-network-register failed', err); + networkStatus = 'failed'; + } + } + + res.status(201).json({ + ok: true, + tenant: { id: tenantId, slug }, + mailDelivered: true, + createdAt: now, + network: { status: networkStatus, vendorId: networkVendorId }, + }); +}); diff --git a/apps/api/src/routes/stripe-webhook.ts b/apps/api/src/routes/stripe-webhook.ts index d41c4e4..be605be 100644 --- a/apps/api/src/routes/stripe-webhook.ts +++ b/apps/api/src/routes/stripe-webhook.ts @@ -1,13 +1,34 @@ import { Router, raw } from 'express'; +import type { Knex } from 'knex'; import Stripe from 'stripe'; import { ulid } from 'ulid'; -import { TABLES, type EventRow, type PartnerRow, type PayoutRow } from '@openpartner/db'; -import { db } from '../db.js'; +import { + TABLES, + type AttributionRow, + type ClickRow, + type CommissionRow, + type EventRow, + type IdentityRow, + type PartnerRow, + type PayoutRow, + type TenantRow, +} from '@openpartner/db'; +import { appDb, db } from '../db.js'; import { attributeEvent } from '../attribution.js'; -import { persistMerchantSubscription } from './billing.js'; +import { inferPlanFromPriceIds, persistMerchantSubscription, updateTenantPlanFromStripeSub } from './billing.js'; +import { ensureCouponClickAndIdentity, findCouponByCode } from './coupons.js'; const stripeKey = process.env.STRIPE_SECRET_KEY; -const webhookSecret = process.env.STRIPE_WEBHOOK_SECRET; +// STRIPE_WEBHOOK_SECRET accepts either a single secret or a comma-separated +// list. Stripe's new "Event destinations" UI splits platform-account events +// (checkout.*, invoice.*, customer.*) and connected-account events +// (account.updated, transfer.*) into separate destinations, each with its own +// signing secret. Both destinations point at the same /webhooks/stripe URL — +// we just need to verify against any configured secret. +const webhookSecrets = (process.env.STRIPE_WEBHOOK_SECRET ?? '') + .split(',') + .map((s) => s.trim()) + .filter(Boolean); const stripe = stripeKey ? new Stripe(stripeKey) : null; @@ -16,84 +37,254 @@ export const stripeWebhookRouter = Router(); /** * Stripe webhook → raw event log. * + * Stripe events have no URL tenant — the webhook URL is platform-wide. We + * resolve tenantId from event metadata (every Stripe object we create is + * stamped with `openpartner_tenant_id`) with DB-backed fallbacks for objects + * that pre-date the stamping. Once resolved, the actual writes happen inside + * an `appDb.transaction(...)` with `SET LOCAL app.tenant_id` so RLS catches + * any cross-tenant mistake as a second line of defense. + * * Each Stripe event we care about becomes an immutable Event row, then goes * through the attribution engine. We map: * - customer.created → 'signup' * - customer.subscription.created → 'subscription_created' * - invoice.paid → 'invoice_paid' (carries revenue) * - * We require stripe_userId to be present in customer metadata as - * `openpartner_user_id` — that's the bridge from Stripe's Customer to the - * merchant's userId, which is what Identity stitches against. + * We require `openpartner_user_id` to be present in customer metadata as + * the bridge from Stripe's Customer to the merchant's userId, which is + * what Identity stitches against. */ stripeWebhookRouter.post( '/webhooks/stripe', raw({ type: 'application/json' }), async (req, res) => { - if (!stripe || !webhookSecret) { + if (!stripe || webhookSecrets.length === 0) { return res.status(503).json({ error: 'stripe_not_configured' }); } const sig = req.header('stripe-signature'); if (!sig) return res.status(400).json({ error: 'missing_signature' }); - let event: Stripe.Event; - try { - event = stripe.webhooks.constructEvent(req.body, sig, webhookSecret); - } catch { - return res.status(400).json({ error: 'invalid_signature' }); - } - - const connectResult = await handleConnectEvent(event); - if (connectResult) return res.json({ ok: true, connect: connectResult }); - - const mapped = await mapStripeEvent(stripe, event); - if (!mapped) return res.json({ ok: true, skipped: event.type }); - - // Idempotency: a Stripe retry (5xx, timeout) re-delivers the same - // event.id. Insert with ON CONFLICT DO NOTHING on the unique - // partial index over externalEventId, then handle the dedupe path. - const eventId = ulid(); - const inserted = await db(TABLES.Event) - .insert({ - id: eventId, - userId: mapped.userId, - type: mapped.type, - value: mapped.value != null ? mapped.value.toFixed(2) : null, - currency: mapped.currency ?? 'USD', - externalEventId: event.id, - metadata: { stripeEventId: event.id, stripeType: event.type }, - ts: new Date(event.created * 1000), - }) - .onConflict('externalEventId') - .ignore() - .returning('*'); - - if (inserted.length === 0) { - // Retry of a previously-processed event — the first delivery - // already attributed it. Return 2xx so Stripe stops retrying. - const existing = await db(TABLES.Event).where({ externalEventId: event.id }).first(); - return res.json({ ok: true, idempotent: true, eventId: existing?.id }); - } - - const result = await attributeEvent(db, inserted[0] as EventRow); - res.json({ ok: true, eventId, attribution: result }); + let event: Stripe.Event | null = null; + for (const secret of webhookSecrets) { + try { + event = stripe.webhooks.constructEvent(req.body, sig, secret); + break; + } catch { + // Try the next secret. If none match we'll fall through to 400. + } + } + if (!event) return res.status(400).json({ error: 'invalid_signature' }); + + const tenantId = await resolveTenantForEvent(event); + if (!tenantId) { + // Genuinely unresolvable — most likely a connected-account event for + // an account we don't recognize. 2xx so Stripe stops retrying. + return res.json({ ok: true, skipped: event.type, reason: 'unresolved_tenant' }); + } + + const result = await runInTenant(tenantId, async (trx) => { + const connectResult = await handleConnectEvent(trx, event!, tenantId); + if (connectResult) return { connect: connectResult }; + + // Coupon auto-redemption: if the event carries a discount code that + // matches an OpenPartner Coupon in this tenant, ensure the synthetic + // Click + Identity exist BEFORE the standard attribution path runs. + // The next attributeEvent() call then finds the click and credits + // the partner — same code path as a clicked share-link conversion. + const redeemed = await maybeRedeemStripeCoupons(trx, stripe!, event!, tenantId); + if (redeemed.length > 0) { + console.log('[stripe-webhook] auto-redeemed coupons', { eventId: event!.id, redeemed }); + } + + const mapped = await mapStripeEvent(trx, stripe!, event!); + if (!mapped) return { skipped: event!.type }; + + // Idempotency: a Stripe retry (5xx, timeout) re-delivers the same + // event.id. Insert with ON CONFLICT DO NOTHING on the unique + // partial index over externalEventId, then handle the dedupe path. + const eventId = ulid(); + const inserted = await trx(TABLES.Event) + .insert({ + id: eventId, + tenantId, + userId: mapped.userId, + type: mapped.type, + value: mapped.value != null ? mapped.value.toFixed(2) : null, + currency: mapped.currency ?? 'USD', + externalEventId: event!.id, + metadata: { stripeEventId: event!.id, stripeType: event!.type, ...(mapped.metadata ?? {}) }, + ts: new Date(event!.created * 1000), + }) + .onConflict('externalEventId') + .ignore() + .returning('*'); + + if (inserted.length === 0) { + // Retry of a previously-processed event — the first delivery + // already attributed it. Return 2xx so Stripe stops retrying. + const existing = await trx(TABLES.Event).where({ externalEventId: event!.id }).first(); + return { idempotent: true, eventId: existing?.id }; + } + + // Corrective events (refund, dispute, payment_failed) are recorded for + // the audit trail but don't drive new attribution rows — handling those + // is done in mapStripeEvent before insertion (e.g. flipping the source + // commissions to 'reversed'). + if (CORRECTIVE_EVENT_TYPES.has(mapped.type)) { + return { eventId, corrective: mapped.type }; + } + + const attribution = await attributeEvent(trx, inserted[0] as EventRow); + return { eventId, attribution }; + }); + + res.json({ ok: true, ...result }); }, ); +const CORRECTIVE_EVENT_TYPES = new Set(['refund', 'dispute_created', 'invoice_payment_failed']); + +/** + * Run a callback inside an appDb transaction with `app.tenant_id` pinned to + * the given tenant. Mirrors what tenantMiddleware does for HTTP requests, but + * we can't use that here because Stripe webhooks have no URL tenant. + */ +async function runInTenant(tenantId: string, fn: (trx: Knex.Transaction) => Promise): Promise { + return appDb.transaction(async (trx) => { + // tenantId is sourced from a DB lookup or our own metadata stamp on + // the Stripe object — never directly user-controlled — and single- + // quotes are escaped before inlining. Postgres SET LOCAL doesn't + // accept bind params, so the inline interpolation is required. + // nosemgrep: javascript.lang.security.audit.sqli.node-knex-sqli.node-knex-sqli + await trx.raw(`set local app.tenant_id = '${tenantId.replace(/'/g, "''")}'`); + return fn(trx); + }); +} + +/** + * Resolve which tenant an event belongs to. Strategy: + * + * 1. Read `openpartner_tenant_id` from the event object's metadata. Every + * Stripe object we create is stamped with this on construction, so for + * anything created post-multi-tenant deploy the lookup is constant-time. + * 2. For events whose payload doesn't carry our metadata (older Connect + * accounts, transfers identified only by payoutId), fall back to DB + * lookup via the privileged `db` (cross-tenant scan). + * 3. If neither yields a tenant, return null and the caller skips the + * event with 2xx so Stripe stops retrying. + */ +async function resolveTenantForEvent(event: Stripe.Event): Promise { + const obj = event.data.object as { metadata?: Record | null }; + const direct = obj?.metadata?.openpartner_tenant_id; + if (direct) return direct; + + switch (event.type) { + case 'account.updated': { + const account = event.data.object as Stripe.Account; + const partnerId = account.metadata?.openpartner_partner_id; + if (partnerId) { + const row = await db(TABLES.Partner).where({ id: partnerId }).first(['tenantId']); + if (row) return row.tenantId; + } + // Last-resort: any partner with this stripeConnectAccountId. + const linked = await db(TABLES.Partner) + .where({ stripeConnectAccountId: account.id }) + .first(['tenantId']); + return linked?.tenantId ?? null; + } + case 'transfer.updated': + case 'transfer.reversed': { + const transfer = event.data.object as Stripe.Transfer; + const payoutId = transfer.metadata?.openpartner_payout_id; + if (!payoutId) return null; + const row = await db(TABLES.Payout).where({ id: payoutId }).first(['tenantId']); + return row?.tenantId ?? null; + } + case 'checkout.session.completed': { + const session = event.data.object as Stripe.Checkout.Session; + // Merchant-subscription checkout (the brand subscribing to Flex / + // Revshare via /admin/billing) stamps openpartner_tenant_id on + // session metadata at create time. Prefer that — it's a constant- + // time read with no DB round-trip. + const metaTenantId = session.metadata?.openpartner_tenant_id; + if (metaTenantId) return metaTenantId; + // Rewardful-style merchant→customer checkout: tenant resolves via + // the Click row identified by client_reference_id (the cref). + if (session.client_reference_id) { + const row = await db(TABLES.Click) + .where({ id: session.client_reference_id }) + .first(['tenantId']); + if (row) return row.tenantId; + } + return null; + } + case 'customer.created': + case 'customer.subscription.created': + case 'customer.subscription.updated': + case 'customer.subscription.deleted': + case 'invoice.paid': + case 'invoice.payment_failed': + case 'charge.refunded': + case 'charge.dispute.created': { + const customerId = extractCustomerId(event.data.object as { customer?: unknown; id?: string }); + if (!customerId) return null; + // First, check whether this is a merchant-self-subscription + // Customer (created by /billing/checkout). We persist the + // customer id on Tenant.stripeCustomerId on first checkout, + // so the lookup is a constant-time indexed read and avoids a + // Stripe API roundtrip to inspect Customer.metadata. + const tenantRow = await db('Tenant') + .where({ stripeCustomerId: customerId }) + .first<{ id: string }>('id'); + if (tenantRow) return tenantRow.id; + // Fallback: rewardful-style customer (came in through attribution). + // Resolves via the Identity → Click chain — covers Customers + // stitched at checkout.session.completed by the Rewardful path. + const identity = await db(TABLES.Identity) + .join(TABLES.Click, `${TABLES.Click}.id`, `${TABLES.Identity}.clickId`) + .where(`${TABLES.Identity}.userId`, customerId) + .first<{ tenantId: string }>(`${TABLES.Click}.tenantId as tenantId`); + return identity?.tenantId ?? null; + } + default: + return null; + } +} + +/** + * Pull a customer id out of an arbitrary Stripe event object — handles the + * `customer` field being either a string id, an embedded Customer object, + * a deleted-customer marker, or absent. For customer.* events the id is on + * the object itself. + */ +function extractCustomerId(obj: { customer?: unknown; id?: string; object?: string }): string | null { + if (obj.object === 'customer' && obj.id) return obj.id; + const c = obj.customer; + if (typeof c === 'string') return c; + if (c && typeof c === 'object' && 'id' in c && typeof (c as { id: unknown }).id === 'string') { + return (c as { id: string }).id; + } + return null; +} + // Connect-side events. These don't produce attribution Events — they update // Partner (onboarding progress) and Payout (transfer resolution) rows. -async function handleConnectEvent(event: Stripe.Event): Promise { +async function handleConnectEvent( + trx: Knex.Transaction, + event: Stripe.Event, + tenantId: string, +): Promise { switch (event.type) { case 'account.updated': { const account = event.data.object as Stripe.Account; const partnerId = account.metadata?.openpartner_partner_id; if (!partnerId) return 'account_updated_no_partner_id'; - await db(TABLES.Partner) + await trx(TABLES.Partner) .where({ id: partnerId }) .update({ stripeConnectAccountId: account.id, - metadata: db.raw( + metadata: trx.raw( `jsonb_set(coalesce("metadata", '{}'::jsonb), '{stripe}', ?::jsonb, true)`, [ JSON.stringify({ @@ -110,19 +301,85 @@ async function handleConnectEvent(event: Stripe.Event): Promise { } case 'checkout.session.completed': { const session = event.data.object as Stripe.Checkout.Session; + // Disambiguator: a Rewardful-style merchant→customer checkout carries + // client_reference_id (the cref). Our merchant→OpenPartner subscription + // checkout (created in billing.ts) doesn't. So presence of + // client_reference_id means "skip the merchant-subscription path and + // let mapStripeEvent do attribution." + if (session.client_reference_id) return null; if (session.mode === 'subscription' && typeof session.customer === 'string' && typeof session.subscription === 'string') { - await persistMerchantSubscription(session.customer, session.subscription); + // Pull trial_end off the subscription so the dashboard can show + // "trial ends in N days" without an extra round-trip on every + // page load. + let trialEndsAt: Date | null = null; + if (stripe) { + try { + const sub = await stripe.subscriptions.retrieve(session.subscription); + trialEndsAt = sub.trial_end ? new Date(sub.trial_end * 1000) : null; + } catch { + // Non-fatal: dashboard will show "trial unknown" until the + // next subscription update event lands. + } + } + await persistMerchantSubscription(trx, tenantId, { + stripeCustomerId: session.customer, + stripeSubscriptionId: session.subscription, + trialEndsAt, + }); + // Stamp firstTrialActivatedAt iff this checkout actually + // included a trial AND we haven't stamped before. Conditional + // SQL update avoids overwriting on a webhook retry. + const startedTrial = session.metadata?.openpartner_trial === '1'; + if (startedTrial) { + await trx(TABLES.Tenant) + .where({ id: tenantId }) + .whereNull('firstTrialActivatedAt') + .update({ firstTrialActivatedAt: new Date(), updatedAt: new Date() }); + } return 'merchant_subscription_persisted'; } return null; } + case 'customer.subscription.updated': { + // Plan switch via Stripe Customer Portal (or trial conversion). + // Detect the new plan from the price IDs on the active items and + // update Tenant.billingPlan to match. Only act when the price IDs + // are ones we recognize — third-party additions (e.g. one-off line + // items) shouldn't reclassify the tenant. + const sub = event.data.object as Stripe.Subscription; + const priceIds = sub.items.data.map((it) => it.price.id); + const newPlan = inferPlanFromPriceIds(priceIds); + const trialEndsAt = sub.trial_end ? new Date(sub.trial_end * 1000) : null; + if (newPlan) { + await updateTenantPlanFromStripeSub(trx, tenantId, newPlan); + } + // Always refresh trial_end + subscriptionId so the local mirror + // reflects the current Stripe state. + await persistMerchantSubscription(trx, tenantId, { + stripeSubscriptionId: sub.id, + trialEndsAt, + }); + return newPlan ? `subscription_updated_plan_${newPlan}` : 'subscription_updated'; + } + case 'customer.subscription.deleted': { + // Cancellation (manual via Portal, trial-without-card, or dunning + // exhaustion). Clear the local subscription pointer; Tenant stays + // active so the admin can re-subscribe via /billing/checkout + // without losing data. + const sub = event.data.object as Stripe.Subscription; + await persistMerchantSubscription(trx, tenantId, { + stripeSubscriptionId: null, + trialEndsAt: null, + }); + return `subscription_deleted_${sub.status}`; + } case 'transfer.updated': case 'transfer.reversed': { const transfer = event.data.object as Stripe.Transfer; const payoutId = transfer.metadata?.openpartner_payout_id; if (!payoutId) return null; const reversed = event.type === 'transfer.reversed' || (transfer.reversed ?? false); - await db(TABLES.Payout).where({ id: payoutId }).update({ + await trx(TABLES.Payout).where({ id: payoutId }).update({ status: reversed ? 'failed' : 'paid', completedAt: reversed ? null : new Date(), }); @@ -138,10 +395,50 @@ interface MappedEvent { type: string; value?: number; currency?: string; + metadata?: Record; } -async function mapStripeEvent(stripe: Stripe, event: Stripe.Event): Promise { +async function mapStripeEvent( + trx: Knex.Transaction, + stripe: Stripe, + event: Stripe.Event, +): Promise { switch (event.type) { + case 'checkout.session.completed': { + // Rewardful-style flow: merchant adds client_reference_id (the cref) + // to Stripe Checkout. We stitch the resulting Stripe customer to that + // click here, so subsequent invoice.paid / subscription events resolve + // without an explicit op.identify() call from the merchant's app. + const session = event.data.object as Stripe.Checkout.Session; + const cref = session.client_reference_id; + const customerId = typeof session.customer === 'string' ? session.customer : session.customer?.id; + if (!cref || !customerId) return null; + + // Validate the cref points at a real Click in this tenant — silently + // drop unknowns so a bad client_reference_id can't inflate a partner's + // numbers. RLS scopes the query to the resolved tenant. + const click = await trx(TABLES.Click).where({ id: cref }).first(); + if (!click) return null; + + await trx(TABLES.Identity) + .insert({ id: ulid(), tenantId: click.tenantId, clickId: cref, userId: customerId }) + .onConflict(['clickId', 'userId']) + .ignore(); + + // Backfill metadata so the cheaper resolve path (metadata lookup) + // works for downstream invoice.paid / subscription events. Best- + // effort: if the API call fails (deleted customer, network blip), + // resolveUserIdFromCustomer will fall back to the Identity table. + try { + await stripe.customers.update(customerId, { + metadata: { openpartner_user_id: customerId, openpartner_tenant_id: click.tenantId }, + }); + } catch { + // Non-fatal. + } + + return { userId: customerId, type: 'signup' }; + } case 'customer.created': { const customer = event.data.object as Stripe.Customer; const userId = customer.metadata?.openpartner_user_id; @@ -150,19 +447,105 @@ async function mapStripeEvent(stripe: Stripe, event: Stripe.Event): Promise { + const sourceEvents = await trx(TABLES.Event) + .whereRaw(`"metadata"->>'stripeInvoiceId' = ?`, [invoiceId]) + .where('type', 'invoice_paid'); + if (sourceEvents.length === 0) return { reversed: 0, alreadyPaid: 0 }; + + const eventIds = sourceEvents.map((e) => e.id); + const attributions = await trx(TABLES.Attribution).whereIn('eventId', eventIds); + if (attributions.length === 0) return { reversed: 0, alreadyPaid: 0 }; + const attributionIds = attributions.map((a) => a.id); + + const reversed = await trx(TABLES.Commission) + .whereIn('attributionId', attributionIds) + .whereIn('status', ['accrued', 'approved']) + .update({ status: 'reversed' }); + + const alreadyPaidRow = (await trx(TABLES.Commission) + .whereIn('attributionId', attributionIds) + .where('status', 'paid') + .count('id as c') + .first()) as { c: string | number } | undefined; + const alreadyPaid = Number(alreadyPaidRow?.c ?? 0); + + return { reversed, alreadyPaid }; +} + async function resolveUserIdFromCustomer( + trx: Knex.Transaction, stripe: Stripe, customer: string | Stripe.Customer | Stripe.DeletedCustomer | null, ): Promise { if (!customer) return null; + let customerId: string | null = null; + let metadataUserId: string | null = null; + if (typeof customer === 'string') { + customerId = customer; const fetched = await stripe.customers.retrieve(customer); - if (fetched.deleted) return null; - return fetched.metadata?.openpartner_user_id ?? null; + if (!fetched.deleted) metadataUserId = fetched.metadata?.openpartner_user_id ?? null; + } else { + if ('deleted' in customer && customer.deleted) return null; + customerId = (customer as Stripe.Customer).id; + metadataUserId = (customer as Stripe.Customer).metadata?.openpartner_user_id ?? null; + } + + if (metadataUserId) return metadataUserId; + if (!customerId) return null; + + // Fallback: Stripe Billing flow stitches Identity rows with userId = + // customer.id. This covers the race where invoice.paid arrives before our + // metadata-backfill on checkout.session.completed lands on Stripe's side. + const identity = await trx(TABLES.Identity).where({ userId: customerId }).first(); + return identity ? customerId : null; +} + +/** + * Auto-redeem any OpenPartner Coupons referenced by discount codes on + * the event. Runs BEFORE the standard event mapping — synthesizes the + * Click + Identity for the customer so the next attribution pass + * credits the partner. Returns the matched codes (for logging only). + * + * Stripe events that can carry discount codes: + * - checkout.session.completed (session.discounts + + * session.total_details.breakdown.discounts) + * - invoice.paid (invoice.discounts) + * - customer.subscription.created (subscription.discounts) — invoice + * also fires for subs so this is partly redundant; covered for + * consistency. + * + * The customer-facing code lives at promotion_code.code (when set). + * If the discount has only a Coupon (no PromotionCode), we fall back + * to coupon.id — brands setting their Stripe Coupon IDs to match + * OpenPartner codes works without an extra Stripe API call. + */ +async function maybeRedeemStripeCoupons( + trx: Knex.Transaction, + stripe: Stripe, + event: Stripe.Event, + tenantId: string, +): Promise { + const obj = event.data.object as unknown as Record; + const ts = new Date(event.created * 1000); + + // Customer ID — same shape across the event types we handle. + const customerRaw = + (obj.customer as string | { id: string } | null | undefined) ?? + (obj.customer_email as string | undefined); + const userId = typeof customerRaw === 'string' ? customerRaw : customerRaw?.id; + if (!userId) return []; + + // Collect candidate codes from wherever Stripe might surface them. + const candidateCodes: string[] = []; + for (const d of extractDiscounts(event)) { + const code = await resolveDiscountCode(stripe, d); + if (code) candidateCodes.push(code); + } + if (candidateCodes.length === 0) return []; + + const matched: string[] = []; + for (const code of candidateCodes) { + const coupon = await findCouponByCode(trx, code); + if (!coupon) continue; + await ensureCouponClickAndIdentity(trx, tenantId, coupon, userId, ts); + matched.push(coupon.code); + } + return matched; +} + +interface DiscountRef { + coupon?: string | { id: string } | null; + promotion_code?: string | { id: string; code?: string } | null; +} + +function extractDiscounts(event: Stripe.Event): DiscountRef[] { + const obj = event.data.object as unknown as Record; + const out: DiscountRef[] = []; + + // checkout.session.completed: session.discounts + total_details.breakdown.discounts + if (Array.isArray(obj.discounts)) { + for (const d of obj.discounts as Array) { + if (typeof d === 'object' && d !== null) out.push(d); + } + } + const totalDetails = obj.total_details as + | { breakdown?: { discounts?: Array<{ discount?: DiscountRef }> } } + | undefined; + if (totalDetails?.breakdown?.discounts) { + for (const item of totalDetails.breakdown.discounts) { + if (item.discount) out.push(item.discount); + } + } + return out; +} + +async function resolveDiscountCode(stripe: Stripe, d: DiscountRef): Promise { + // Prefer the customer-facing PromotionCode string. + if (d.promotion_code) { + if (typeof d.promotion_code === 'object' && d.promotion_code.code) { + return d.promotion_code.code; + } + const id = typeof d.promotion_code === 'string' ? d.promotion_code : d.promotion_code.id; + try { + const promo = await stripe.promotionCodes.retrieve(id); + if (promo.code) return promo.code; + } catch (err) { + console.error('[stripe-webhook] promotion_code retrieve failed', { id, err }); + } + } + // Fallback: use the Coupon's Stripe ID directly. Brands who set their + // Stripe Coupon IDs to match OpenPartner codes don't need the API call. + if (d.coupon) { + return typeof d.coupon === 'string' ? d.coupon : d.coupon.id; } - if ('deleted' in customer && customer.deleted) return null; - return (customer as Stripe.Customer).metadata?.openpartner_user_id ?? null; + return null; } diff --git a/apps/api/src/routes/uploads.ts b/apps/api/src/routes/uploads.ts new file mode 100644 index 0000000..bff25e3 --- /dev/null +++ b/apps/api/src/routes/uploads.ts @@ -0,0 +1,83 @@ +import express, { Router } from 'express'; +import { TABLES } from '@openpartner/db'; +import { requireAuth, requireAdmin } from '../auth.js'; +import { tenantOf } from '../tenancy.js'; +import { fsStorageDir, getStorage, MAX_UPLOAD_BYTES, newUploadKey, UploadError, validateImageUpload } from '../storage.js'; + +export const uploadsRouter = Router(); + +// ---------- Brand logo upload ---------- +// +// Single binary body: client sends the image as the raw request body, +// Content-Type header carries the mime, X-Tenant-Slug etc are pulled +// from the tenant middleware. Returns the public URL after writing to +// storage AND stamping it onto the Tenant row in one transactional-ish +// shot (file write first; if DB update fails the file is orphaned but +// harmless — no link to anything). +// +// Capped at 2 MB by validateImageUpload + the express.raw limit. +uploadsRouter.post( + '/uploads/logo', + express.raw({ + type: ['image/jpeg', 'image/png', 'image/webp'], + limit: MAX_UPLOAD_BYTES, + }), + requireAuth, + requireAdmin, + async (req, res) => { + const { db, tenantId } = tenantOf(req); + + // req.header() returns string | undefined and normalizes any + // accidental array form, vs req.headers[name] which is typed as + // string | string[] | undefined and creates a parameter-tampering + // hazard if a client sends multiple Content-Type headers. + let validated; + try { + validated = validateImageUpload(req.header('content-type'), req.body?.length ?? 0); + } catch (err) { + if (err instanceof UploadError) return res.status(400).json({ error: err.code, detail: err.message }); + throw err; + } + if (!Buffer.isBuffer(req.body) || req.body.length === 0) { + return res.status(400).json({ error: 'empty_body' }); + } + + const key = newUploadKey(`tenants/${tenantId}/logos`, validated.ext); + await getStorage().put(key, req.body, { contentType: validated.contentType }); + const url = getStorage().publicUrl(key); + + await db(TABLES.Tenant).where({ id: tenantId }).update({ logoUrl: url, updatedAt: new Date() }); + + res.json({ logoUrl: url }); + }, +); + +// ---------- Static handler for FS backend ---------- +// +// When OPENPARTNER_STORAGE_KIND=fs, files written by the route above +// need to be served. Mount express.static on /uploads pointing at the +// configured FS dir. No-op when on s3 (the bucket serves directly). +// +// Public — no auth — by design: avatars + logos are shown in +// unauthenticated contexts (creator profile pages, brand listings on +// the Network, etc.). The keys are random hex (16 bytes), so directory +// enumeration isn't feasible. +export function mountStaticUploads(app: import('express').Express): void { + const dir = fsStorageDir(); + if (!dir) return; + app.use( + '/uploads', + express.static(dir, { + // Same cache header as S3 ACLs above. Keys change on every + // re-upload so immutable is safe. + maxAge: '365d', + immutable: true, + // Don't fall through to the next handler on 404 — return a clean + // 404 instead of attempting to interpret /uploads/ as + // something else further down the stack. + fallthrough: false, + // Don't ever serve a directory listing. + index: false, + }), + ); +} diff --git a/apps/api/src/routes/webhooks.ts b/apps/api/src/routes/webhooks.ts index 763a20a..b6e0a1e 100644 --- a/apps/api/src/routes/webhooks.ts +++ b/apps/api/src/routes/webhooks.ts @@ -6,9 +6,9 @@ import { type WebhookDeliveryRow, type WebhookEndpointRow, } from '@openpartner/db'; -import { db } from '../db.js'; import { requireAdmin, requireAuth } from '../auth.js'; import { makeSecret, redeliver } from '../webhook-dispatcher.js'; +import { tenantOf } from '../tenancy.js'; export const webhooksRouter = Router(); @@ -38,6 +38,7 @@ const updateSchema = z.object({ // -------- Endpoints CRUD -------- webhooksRouter.post('/webhooks', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); const body = createSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); @@ -45,6 +46,7 @@ webhooksRouter.post('/webhooks', requireAuth, requireAdmin, async (req, res) => const id = ulid(); await db(TABLES.WebhookEndpoint).insert({ id, + tenantId, url: body.data.url, secretPrefix: secret.prefix, secret: secret.plaintext, @@ -56,18 +58,21 @@ webhooksRouter.post('/webhooks', requireAuth, requireAdmin, async (req, res) => res.status(201).json({ endpoint: strip(endpoint!), secret: secret.plaintext }); }); -webhooksRouter.get('/webhooks', requireAuth, requireAdmin, async (_req, res) => { +webhooksRouter.get('/webhooks', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const endpoints = await db(TABLES.WebhookEndpoint).orderBy('createdAt', 'desc'); res.json({ endpoints: endpoints.map(strip) }); }); webhooksRouter.get('/webhooks/:id', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const endpoint = await db(TABLES.WebhookEndpoint).where({ id: req.params.id }).first(); if (!endpoint) return res.status(404).json({ error: 'not_found' }); res.json({ endpoint: strip(endpoint) }); }); webhooksRouter.patch('/webhooks/:id', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const body = updateSchema.safeParse(req.body); if (!body.success) return res.status(400).json({ error: 'invalid_body', detail: body.error.flatten() }); const existing = await db(TABLES.WebhookEndpoint).where({ id: req.params.id }).first(); @@ -85,6 +90,7 @@ webhooksRouter.patch('/webhooks/:id', requireAuth, requireAdmin, async (req, res }); webhooksRouter.delete('/webhooks/:id', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); // Soft-delete via active=false keeps the delivery history intact for // forensics. We also allow hard-delete via ?hard=1 in case an operator // explicitly wants the row gone. @@ -100,6 +106,7 @@ webhooksRouter.delete('/webhooks/:id', requireAuth, requireAdmin, async (req, re // -------- Delivery log + retry -------- webhooksRouter.get('/webhooks/:id/deliveries', requireAuth, requireAdmin, async (req, res) => { + const { db } = tenantOf(req); const deliveries = await db(TABLES.WebhookDelivery) .where({ endpointId: req.params.id }) .orderBy('createdAt', 'desc') @@ -108,6 +115,7 @@ webhooksRouter.get('/webhooks/:id/deliveries', requireAuth, requireAdmin, async }); webhooksRouter.post('/webhooks/:id/deliveries/:deliveryId/retry', requireAuth, requireAdmin, async (req, res) => { + const { db, tenantId } = tenantOf(req); // Verify the delivery actually belongs to this endpoint BEFORE firing // — the previous order re-delivered and only then checked, which // meant hitting /webhooks/A/.../retry with a delivery id that belonged @@ -119,7 +127,7 @@ webhooksRouter.post('/webhooks/:id/deliveries/:deliveryId/retry', requireAuth, r if (!existing) return res.status(404).json({ error: 'not_found' }); if (existing.endpointId !== req.params.id) return res.status(400).json({ error: 'endpoint_mismatch' }); - const delivery = await redeliver(req.params.deliveryId!); + const delivery = await redeliver(tenantId, req.params.deliveryId!); if (!delivery) return res.status(404).json({ error: 'not_found' }); res.json({ delivery }); }); diff --git a/apps/api/src/scheduler.ts b/apps/api/src/scheduler.ts new file mode 100644 index 0000000..1a37817 --- /dev/null +++ b/apps/api/src/scheduler.ts @@ -0,0 +1,236 @@ +/** + * In-process scheduler for periodic platform jobs. + * + * DO App Platform doesn't have native cron, so for hosted deployments we run + * jobs in the api process via croner. Self-host customers get the same code + * — set OPENPARTNER_ENABLE_SCHEDULER=1 to opt in. Off by default so dev, + * test, and CI runs don't fire scheduled jobs unexpectedly. + * + * Jobs: + * - usage-report: every day at 03:15 UTC. Per active tenant, aggregates + * attributed GMV since last report and reports to Stripe + * Billing meters. + * - payouts: every Monday at 09:00 UTC. Per active tenant, runs + * runPayouts() to issue Stripe Connect transfers for + * approved commissions. + * + * Both jobs are no-ops in selfhost mode for usage-report. Payouts run in + * every mode (a self-host operator with no approved commissions just gets + * an empty result). + * + * Multi-tenant: each tick iterates active tenants. For each tenant we open + * an `appDb.transaction` with `app.tenant_id` pinned, so the per-tenant + * job runs scoped to that tenant's data. + * + * Concurrency: croner's `protect: true` ensures a job that's still running + * when its next tick arrives skips that tick rather than overlapping. This + * matters most for usage-report on first run after a long downtime. + */ + +import { Cron } from 'croner'; +import type { Knex } from 'knex'; +import { TABLES, type TenantRow } from '@openpartner/db'; +import { appDb, db } from './db.js'; +import { reportUsageToStripe } from './usage-billing.js'; +import { runPayouts } from './payouts.js'; +import { drainOutbox, reportNetworkPayoutsToNetwork, sendHeartbeat } from './network-client.js'; +import { getMode } from './stripe.js'; +import { sweepCampaignEndNotifications } from './campaign-end-notifications.js'; + +interface ScheduledJob { + name: string; + cronExpr: string; + description: string; + handler: () => Promise; +} + +/** + * Run `fn` once per active tenant inside a tenant-scoped appDb transaction. + * Failures in one tenant don't stop the iteration — each tenant gets its own + * try/catch and the aggregate result lists per-tenant outcomes. + */ +async function forEachActiveTenant( + fn: (db: Knex, tenantId: string) => Promise, +): Promise> { + const tenants = await db(TABLES.Tenant).where({ status: 'active' }).select('id'); + const out: Array<{ tenantId: string; ok: boolean; result?: T; error?: string }> = []; + for (const t of tenants) { + try { + const result = await appDb.transaction(async (trx) => { + await trx.raw(`set local app.tenant_id = '${t.id.replace(/'/g, "''")}'`); + return fn(trx, t.id); + }); + out.push({ tenantId: t.id, ok: true, result }); + } catch (err) { + out.push({ tenantId: t.id, ok: false, error: err instanceof Error ? err.message : String(err) }); + } + } + return out; +} + +const JOBS: ScheduledJob[] = [ + { + name: 'usage-report', + cronExpr: '15 3 * * *', + description: 'Per tenant: aggregate attributed GMV and report to Stripe meters (daily 03:15 UTC)', + handler: async () => { + if (getMode() === 'selfhost') return { skipped: 'selfhost' }; + return forEachActiveTenant((trx, tenantId) => reportUsageToStripe(trx, tenantId)); + }, + }, + { + name: 'payouts', + cronExpr: '0 9 * * 1', + description: 'Per tenant: issue Stripe Connect transfers for approved commissions (Monday 09:00 UTC)', + handler: async () => forEachActiveTenant((trx, tenantId) => runPayouts(trx, tenantId)), + }, + { + name: 'network-outbox-drain', + cronExpr: '*/5 * * * *', + description: 'Per tenant: retry failed Network pushes from NetworkOutbox (every 5 minutes)', + handler: async () => forEachActiveTenant((trx, tenantId) => drainOutbox(trx, tenantId)), + }, + { + name: 'network-payouts-report', + cronExpr: '30 3 * * *', + description: 'Per tenant: aggregate Network-originated payout volume + report to the Network for Stripe metering (daily 03:30 UTC)', + handler: async () => + forEachActiveTenant((trx, tenantId) => reportNetworkPayoutsToNetwork(trx, tenantId)), + }, + { + name: 'network-heartbeat', + cronExpr: '7 * * * *', + // Hourly (offset 7 min so it doesn't pile on top of other top-of-the-hour + // jobs). The Network uses partnerCount + lastHeartbeatAt to populate the + // vendor's "active partners" stat and to detect abandoned instances — + // shorter interval keeps that view fresh for brand admins. + description: 'Per tenant: ping the Network with current partner count (hourly @ :07)', + handler: async () => forEachActiveTenant((trx, tenantId) => sendHeartbeat(trx, tenantId)), + }, + { + name: 'tenant-hard-delete', + cronExpr: '0 4 * * *', + description: 'Hard-delete tenants whose 30-day deletion grace window has lapsed (daily 04:00 UTC)', + handler: async () => hardDeleteExpiredTenants(), + }, + { + name: 'campaign-end-notifications', + cronExpr: '0 9 * * *', + description: 'Email brand admins + participating partners ~7 days before a campaign ends (daily 09:00 UTC)', + handler: async () => sweepCampaignEndNotifications(), + }, +]; + +/** Cascade-delete tenants whose pendingDeletionAt is older than the + * grace window. Runs in the privileged db (we need to bypass RLS to + * drop child rows in any tenant's slice). */ +async function hardDeleteExpiredTenants(): Promise<{ purged: number; tenantIds: string[] }> { + const cutoff = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); + const expired = await db(TABLES.Tenant) + .where('pendingDeletionAt', '<', cutoff) + .select('id'); + const tenantIds: string[] = []; + // Tenanted tables to wipe — order doesn't matter because they're + // all tagged with tenantId. Tenant row is dropped last. + const TENANTED_TABLES = [ + TABLES.NetworkOutbox, + TABLES.WebhookDelivery, + TABLES.WebhookEndpoint, + TABLES.Session, + TABLES.MagicLinkToken, + TABLES.ApiKey, + TABLES.Config, + TABLES.Payout, + TABLES.Commission, + TABLES.Attribution, + TABLES.Event, + TABLES.Identity, + TABLES.Click, + TABLES.Link, + TABLES.Campaign, + TABLES.Partner, + TABLES.Admin, + ]; + for (const t of expired) { + try { + await db.transaction(async (trx) => { + for (const tbl of TENANTED_TABLES) { + await trx(tbl).where({ tenantId: t.id }).del(); + } + await trx(TABLES.Tenant).where({ id: t.id }).del(); + }); + tenantIds.push(t.id); + } catch (err) { + console.error('[scheduler] hard-delete failed', { tenantId: t.id, err }); + } + } + return { purged: tenantIds.length, tenantIds }; +} + +let started = false; +const handles: Cron[] = []; + +export function startScheduler(): void { + if (started) return; + if (process.env.OPENPARTNER_ENABLE_SCHEDULER !== '1') { + console.log('[scheduler] disabled (set OPENPARTNER_ENABLE_SCHEDULER=1 to enable)'); + return; + } + + for (const job of JOBS) { + const handle = new Cron( + job.cronExpr, + { name: job.name, timezone: 'UTC', protect: true }, + async () => { + const start = Date.now(); + // pg advisory lock keyed off the job name — guarantees only one + // process across the whole cluster runs the body, even if the + // app scales to multiple instances. Two-int form to match the + // 64-bit signature; use a stable hash of the name so the same + // job always lands on the same key. + const [classId, objId] = lockKeyForJob(job.name); + const trx = await db.transaction(); + let acquired = false; + try { + const r = (await trx.raw('select pg_try_advisory_xact_lock(?, ?) as locked', [classId, objId])) as { + rows: Array<{ locked: boolean }>; + }; + acquired = !!r.rows[0]?.locked; + if (!acquired) { + console.log(`[scheduler] ${job.name} skipped — another instance holds the lock`); + await trx.rollback(); + return; + } + console.log(`[scheduler] ${job.name} starting`); + const result = await job.handler(); + await trx.commit(); + console.log(`[scheduler] ${job.name} finished in ${Date.now() - start}ms`, result); + } catch (err) { + await trx.rollback().catch(() => {}); + console.error(`[scheduler] ${job.name} failed`, err); + } + }, + ); + handles.push(handle); + console.log(`[scheduler] registered ${job.name} (${job.cronExpr}) — ${job.description}`); + } + started = true; +} + +/** Stable two-int advisory-lock key for a job name. The xact-scoped + * variant releases on commit/rollback, so we don't need explicit + * unlock. Class id is fixed so all our scheduler locks share a + * namespace. */ +function lockKeyForJob(name: string): [number, number] { + let h = 0; + for (let i = 0; i < name.length; i += 1) h = ((h << 5) - h + name.charCodeAt(i)) | 0; + // 0x534F503F = 'SCHED' magic-ish — any constant works as long as it + // doesn't collide with another locker on the same DB. + return [0x534f503f, h]; +} + +export function stopScheduler(): void { + for (const h of handles) h.stop(); + handles.length = 0; + started = false; +} diff --git a/apps/api/src/server.ts b/apps/api/src/server.ts index 3c521e3..88b6a44 100644 --- a/apps/api/src/server.ts +++ b/apps/api/src/server.ts @@ -1,9 +1,42 @@ import { createApp } from './app.js'; +import { startScheduler } from './scheduler.js'; const PORT = Number(process.env.API_PORT ?? 4601); const MODE = process.env.OPENPARTNER_MODE ?? 'selfhost'; +// Boot probe — log presence/length of every secret env so the run logs +// surface which ones actually populated in the runtime container. App +// Platform silently leaves SECRET envs empty when the cipher-text blob +// can't decrypt (e.g. after a spec-pull-and-reapply); the only reliable +// way to spot that is at startup. Lengths only, never values. +console.log(JSON.stringify({ + msg: 'secrets_probe', + app: 'openpartner', + tenancy: process.env.OPENPARTNER_TENANCY ?? 'single', + // Stripe + stripeSecretLen: (process.env.STRIPE_SECRET_KEY ?? '').length, + webhookSecretLen: (process.env.STRIPE_WEBHOOK_SECRET ?? '').length, + flatPriceIdLen: (process.env.STRIPE_FLAT_PRICE_ID ?? '').length, + flatUsagePriceIdLen: (process.env.STRIPE_FLAT_USAGE_PRICE_ID ?? '').length, + revsharePriceIdLen: (process.env.STRIPE_REVSHARE_USAGE_PRICE_ID ?? '').length, + networkPriceIdLen: (process.env.STRIPE_NETWORK_PRICE_ID ?? '').length, + networkUsagePriceIdLen: (process.env.STRIPE_NETWORK_USAGE_PRICE_ID ?? '').length, + // Mail + postmarkTokenLen: (process.env.POSTMARK_SERVER_TOKEN ?? '').length, + mailFromLen: (process.env.MAIL_FROM ?? '').length, + // Other + adminApiKeyLen: (process.env.ADMIN_API_KEY ?? '').length, + secretsEncryptionKeyLen: (process.env.SECRETS_ENCRYPTION_KEY ?? '').length, + metricsTokenLen: (process.env.METRICS_TOKEN ?? '').length, + networkUrlLen: (process.env.NETWORK_URL ?? '').length, + networkAdminKeyLen: (process.env.NETWORK_ADMIN_API_KEY ?? '').length, + databaseUrlLen: (process.env.DATABASE_URL ?? '').length, + databaseUrlAppLen: (process.env.DATABASE_URL_APP ?? '').length, + portalUrlLen: (process.env.PORTAL_URL ?? '').length, +})); + const app = createApp(); app.listen(PORT, () => { console.log(`[api] listening on :${PORT} (mode=${MODE})`); + startScheduler(); }); diff --git a/apps/api/src/storage.ts b/apps/api/src/storage.ts new file mode 100644 index 0000000..fc019b3 --- /dev/null +++ b/apps/api/src/storage.ts @@ -0,0 +1,220 @@ +/** + * Object storage for user-uploaded assets (brand logos, partner avatars). + * + * Two backends, picked by OPENPARTNER_STORAGE_KIND: + * + * fs (default) — write to a local directory, served by an Express + * static handler at /uploads/*. Fine for self-hosters + * who don't have an S3-compatible store and don't want + * to provision one. Persists across restarts when the + * dir is on a docker volume; ephemeral otherwise. + * + * s3 — write to any S3-compatible bucket. Used in our DO + * deployment with DO Spaces. The s3 client is lazy- + * imported so self-host installs that never set + * STORAGE_KIND=s3 don't pay the dependency cost. + * + * Both backends return a `publicUrl(key)` the rest of the app can stamp + * into JSON responses + DB columns. Switching backends is a deploy-time + * config change; previously-uploaded files keep their old URLs (we + * never rewrite stored URLs). + */ + +import { promises as fs } from 'node:fs'; +import { join, resolve } from 'node:path'; +import { randomBytes } from 'node:crypto'; + +export interface PutOptions { + contentType: string; +} + +export interface StorageBackend { + /** + * Write `buf` to `key`. Implementations overwrite atomically when the + * key already exists; callers shouldn't rely on read-after-write + * consistency across distributed S3 backends but DO Spaces + AWS S3 + * give us strong consistency in practice. + */ + put(key: string, buf: Buffer, opts: PutOptions): Promise; + /** + * Public URL for `key`. Constructed from configured base — never + * involves a request to the storage backend. + */ + publicUrl(key: string): string; +} + +// ---------- FS backend ---------- + +class FsStorage implements StorageBackend { + constructor( + private dir: string, + private publicBase: string, + ) {} + + async put(key: string, buf: Buffer, _opts: PutOptions): Promise { + // No subdir nesting in keys (we control them) — single mkdir on the + // root suffices. atomic-ish: write then rename would be safer but + // for v1 a direct write is fine; the next overwrite supersedes. + await fs.mkdir(this.dir, { recursive: true }); + await fs.writeFile(join(this.dir, key), buf); + } + + publicUrl(key: string): string { + return `${this.publicBase.replace(/\/$/, '')}/${key}`; + } +} + +// ---------- S3 backend ---------- + +interface S3Config { + bucket: string; + region: string; + endpoint?: string; + accessKeyId: string; + secretAccessKey: string; + publicBase?: string; +} + +class S3Storage implements StorageBackend { + // The s3 client is constructed once and re-used — internal connection + // pool handles concurrency. Typed as `any` because we lazy-import the + // SDK; carrying the real types would force a top-level import. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private clientPromise: Promise | null = null; + + constructor(private cfg: S3Config) {} + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private async client(): Promise { + if (!this.clientPromise) { + this.clientPromise = import('@aws-sdk/client-s3').then( + (mod) => + new mod.S3Client({ + region: this.cfg.region, + endpoint: this.cfg.endpoint, + // DO Spaces requires path-style URLs; AWS allows both. + // forcePathStyle: false works for AWS, true is needed for + // some on-prem MinIO setups. DO Spaces accepts either; we + // default to the safer path-style. + forcePathStyle: !!this.cfg.endpoint, + credentials: { + accessKeyId: this.cfg.accessKeyId, + secretAccessKey: this.cfg.secretAccessKey, + }, + }), + ); + } + return this.clientPromise; + } + + async put(key: string, buf: Buffer, opts: PutOptions): Promise { + const c = await this.client(); + const { PutObjectCommand } = await import('@aws-sdk/client-s3'); + await c.send( + new PutObjectCommand({ + Bucket: this.cfg.bucket, + Key: key, + Body: buf, + ContentType: opts.contentType, + // Avatars and logos are public-read by design — they're shown + // in unauthenticated brand pages on the Network and inline in + // emails. ACL header is the cross-cloud way to set this. + ACL: 'public-read', + // 1y cache header — uploads use random keys so the URL changes + // on every replace. Aggressive caching is safe. + CacheControl: 'public, max-age=31536000, immutable', + }), + ); + } + + publicUrl(key: string): string { + if (this.cfg.publicBase) { + return `${this.cfg.publicBase.replace(/\/$/, '')}/${key}`; + } + if (this.cfg.endpoint) { + // Custom endpoint (DO Spaces, MinIO): URL is endpoint/bucket/key. + const ep = this.cfg.endpoint.replace(/\/$/, ''); + return `${ep}/${this.cfg.bucket}/${key}`; + } + // AWS: bucket-subdomain style. + return `https://${this.cfg.bucket}.s3.${this.cfg.region}.amazonaws.com/${key}`; + } +} + +// ---------- factory ---------- + +let backend: StorageBackend | null = null; + +export function getStorage(): StorageBackend { + if (backend) return backend; + + const kind = (process.env.OPENPARTNER_STORAGE_KIND ?? 'fs').toLowerCase(); + + if (kind === 's3') { + const required = ['STORAGE_S3_BUCKET', 'STORAGE_S3_REGION', 'STORAGE_S3_ACCESS_KEY_ID', 'STORAGE_S3_SECRET_ACCESS_KEY']; + for (const name of required) { + if (!process.env[`OPENPARTNER_${name}`]) { + throw new Error(`OPENPARTNER_STORAGE_KIND=s3 requires OPENPARTNER_${name}`); + } + } + backend = new S3Storage({ + bucket: process.env.OPENPARTNER_STORAGE_S3_BUCKET!, + region: process.env.OPENPARTNER_STORAGE_S3_REGION!, + endpoint: process.env.OPENPARTNER_STORAGE_S3_ENDPOINT, + accessKeyId: process.env.OPENPARTNER_STORAGE_S3_ACCESS_KEY_ID!, + secretAccessKey: process.env.OPENPARTNER_STORAGE_S3_SECRET_ACCESS_KEY!, + publicBase: process.env.OPENPARTNER_STORAGE_S3_PUBLIC_BASE, + }); + return backend; + } + + // fs default. publicBase computes off API_URL when not explicitly set + // — saves operators from having to declare two URLs in the common case. + const dir = resolve(process.env.OPENPARTNER_STORAGE_FS_DIR ?? '/var/lib/openpartner/uploads'); + const publicBase = + process.env.OPENPARTNER_STORAGE_FS_PUBLIC_BASE ?? + `${(process.env.API_URL ?? 'http://localhost:4601').replace(/\/$/, '')}/uploads`; + backend = new FsStorage(dir, publicBase); + return backend; +} + +export function fsStorageDir(): string | null { + if ((process.env.OPENPARTNER_STORAGE_KIND ?? 'fs').toLowerCase() !== 'fs') return null; + return resolve(process.env.OPENPARTNER_STORAGE_FS_DIR ?? '/var/lib/openpartner/uploads'); +} + +// ---------- helpers ---------- + +const ALLOWED_CONTENT_TYPES = new Set(['image/jpeg', 'image/png', 'image/webp']); +const EXT_BY_TYPE: Record = { + 'image/jpeg': 'jpg', + 'image/png': 'png', + 'image/webp': 'webp', +}; + +export const MAX_UPLOAD_BYTES = 2 * 1024 * 1024; + +export interface ValidatedUpload { + contentType: string; + ext: string; +} + +export function validateImageUpload(contentType: string | undefined, byteLength: number): ValidatedUpload { + if (!contentType || !ALLOWED_CONTENT_TYPES.has(contentType)) { + throw new UploadError(`unsupported_content_type`, `Content-Type must be one of: ${[...ALLOWED_CONTENT_TYPES].join(', ')}`); + } + if (byteLength > MAX_UPLOAD_BYTES) { + throw new UploadError('payload_too_large', `Upload exceeds ${MAX_UPLOAD_BYTES} bytes`); + } + return { contentType, ext: EXT_BY_TYPE[contentType]! }; +} + +export function newUploadKey(prefix: string, ext: string): string { + return `${prefix}/${randomBytes(16).toString('hex')}.${ext}`; +} + +export class UploadError extends Error { + constructor(public code: string, message: string) { + super(message); + } +} diff --git a/apps/api/src/tenancy.ts b/apps/api/src/tenancy.ts new file mode 100644 index 0000000..2ff0d5b --- /dev/null +++ b/apps/api/src/tenancy.ts @@ -0,0 +1,302 @@ +/** + * Tenant resolution + per-request transaction wiring. + * + * Two tenancy modes: + * + * single — every request runs as tenantId = 'default' (the seeded + * tenant from the multi_tenant migration). Self-host. The + * same code paths and queries work; we just always use one + * tenant. + * + * multi — tenantId resolved from the request URL (path-based for v1: + * /t//...). Reserved slugs reject. Unknown slugs 404. + * + * Each tenant-scoped request runs inside a database transaction with + * `SET LOCAL app.tenant_id = ''`. RLS policies on every data table + * enforce that the response only contains rows for that tenant. The + * transaction is bound to `req.db`; routes use `req.db('Partner')...` + * instead of the module-level `db`. + * + * Public, non-tenant routes (e.g. /signup, /health, the marketing + * landing pages) are handled by routing them away from the tenant + * middleware — they use the privileged `db` directly. + */ +import type { Knex } from 'knex'; +import type { NextFunction, Request, Response } from 'express'; +import { DEFAULT_TENANT_ID } from '@openpartner/db'; +import { appDb } from './db.js'; + +export type TenancyMode = 'single' | 'multi'; + +/** Thrown when a tenant request hits a brand inside its deletion grace + * window (and isn't on the recovery path). The middleware catches and + * surfaces a 410 Gone — explicit so the SPA can show "this brand was + * deleted; sign in again". */ +export class TenantPendingDeletionError extends Error { + constructor(public slug: string) { + super(`tenant_pending_deletion:${slug}`); + this.name = 'TenantPendingDeletionError'; + } +} + +export function getTenancyMode(): TenancyMode { + const m = process.env.OPENPARTNER_TENANCY ?? 'single'; + if (m !== 'single' && m !== 'multi') { + throw new Error(`Invalid OPENPARTNER_TENANCY: ${m}`); + } + return m; +} + +/** Reserved subdomain/path slugs that can't be claimed by a tenant. */ +export const RESERVED_SLUGS = new Set([ + 'default', // already used by single-host bootstrap + 'www', + 'api', + 'app', + 'admin', + 'signup', + 'login', + 'auth', + 'docs', + 'help', + 'support', + 'status', + 'network', + 'static', + 'public', + 'platform', +]); + +// Express's own type definitions use a namespace under global, so the +// canonical way to extend Request is the same shape — no idiomatic +// "module" rewrite available without losing the augmentation. +declare global { + // eslint-disable-next-line @typescript-eslint/no-namespace + namespace Express { + interface Request { + /** Resolved tenant ID. Always set inside the tenant middleware. */ + tenantId?: string; + /** Resolved tenant slug. */ + tenantSlug?: string; + /** Transaction-bound knex instance with app.tenant_id set. */ + db?: Knex; + /** True when a platform admin is acting (rare; gated separately). */ + platformAdmin?: boolean; + } + } +} + +/** + * Convenience helper for tenant-scoped route handlers. Throws if the + * request didn't pass through the tenant middleware (which would be a + * routing bug — the handler shouldn't be there). + * + * const { db, tenantId } = tenantOf(req); + * await db('Partner').insert({ tenantId, ... }); + */ +export function tenantOf(req: Request): { db: Knex; tenantId: string } { + if (!req.db || !req.tenantId) { + throw new Error( + 'tenantOf called on a request without tenant context — mount tenantMiddleware before this route', + ); + } + return { db: req.db, tenantId: req.tenantId }; +} + +/** + * Express middleware that: + * 1. Resolves the tenantId for the request + * 2. Opens a transaction on the appDb pool + * 3. Sets `app.tenant_id` (and `app.platform_admin` if applicable) + * 4. Stashes the trx as `req.db` so handlers can issue tenant-scoped queries + * 5. Awaits response completion before committing/rolling back + * + * Behavior depends on OPENPARTNER_TENANCY: + * single → tenantId = 'default' for every request + * multi → resolveTenantFromPath(req); if no tenant, calls next() without + * opening a transaction so non-tenant routes (signup, marketing) + * still work. + */ +export async function tenantMiddleware( + req: Request, + res: Response, + next: NextFunction, +): Promise { + const mode = getTenancyMode(); + + let tenantId: string | null = null; + let tenantSlug: string | null = null; + + if (mode === 'single') { + tenantId = DEFAULT_TENANT_ID; + tenantSlug = 'default'; + } else { + let resolved: Awaited>; + try { + resolved = await resolveTenantFromPath(req); + } catch (err) { + if (err instanceof TenantPendingDeletionError) { + res.status(410).json({ error: 'tenant_pending_deletion', slug: err.slug }); + return; + } + throw err; + } + if (resolved) { + tenantId = resolved.id; + tenantSlug = resolved.slug; + // Strip the /t/ (or /api/t/) prefix so the downstream + // routers — all mounted at root — match. Express respects req.url + // updates; req.originalUrl stays intact for logging. + req.url = resolved.remainder; + } + } + + if (!tenantId) { + // No tenant scope — non-tenant routes (signup, marketing landing, + // /health) handle themselves. Don't open a transaction. + return next(); + } + + req.tenantId = tenantId; + if (tenantSlug) req.tenantSlug = tenantSlug; + + // Open the transaction outside any callback so we can finalize it + // synchronously when a handler calls res.json/send/end. Committing on + // response 'finish' (the previous design) released the trx AFTER the + // client got its response, which raced any caller doing direct DB + // reads immediately after `await fetch(...)` — including every + // integration test that follows POST /partners with db('Click').insert(). + let trx: Knex.Transaction; + try { + trx = await appDb.transaction(); + await trx.raw(`set local app.tenant_id = '${tenantId.replace(/'/g, "''")}'`); + if (req.platformAdmin) { + await trx.raw(`set local app.platform_admin = 'on'`); + } + } catch (err) { + return next(err); + } + req.db = trx; + + // Patch res.send/json/end so they commit (or rollback on 5xx) before + // any byte goes to the client. If commit fails the request becomes a + // 500; if it succeeds the original response is sent unchanged. + const origJson = res.json.bind(res); + const origSend = res.send.bind(res); + const origEnd = res.end.bind(res); + let finalized = false; + + async function finalize(success: boolean): Promise { + if (finalized) return; + finalized = true; + if (success) { + try { + await trx.commit(); + } catch (err) { + // Commit failed after the handler succeeded — the response we're + // about to send is a lie. Mutate to a 500 if we still can. + if (!res.headersSent) { + res.status(500); + throw err; + } + // Headers already out; nothing safe to do but log. + console.error('[tenancy] commit failed after headers sent', err); + } + } else { + try { + await trx.rollback(); + } catch { + // Best-effort rollback; ignore secondary failures. + } + } + } + + res.json = function (body: unknown) { + const success = res.statusCode < 500; + finalize(success).then( + () => origJson(body), + (err) => { + if (!res.headersSent) origJson({ error: 'commit_failed', detail: err instanceof Error ? err.message : String(err) }); + }, + ); + return res; + }; + res.send = function (body?: unknown) { + const success = res.statusCode < 500; + finalize(success).then( + () => origSend(body), + (err) => { + if (!res.headersSent) origSend(`commit_failed: ${err instanceof Error ? err.message : String(err)}`); + }, + ); + return res; + }; + res.end = function (chunk?: unknown, encoding?: BufferEncoding | (() => void), cb?: () => void) { + const success = res.statusCode < 500; + finalize(success).then( + () => (origEnd as unknown as (...a: unknown[]) => Response)(chunk, encoding, cb), + () => (origEnd as unknown as (...a: unknown[]) => Response)(chunk, encoding, cb), + ); + return res; + }; + + // Belt and suspenders: if the client disconnects before any res.* call + // ran (or if Express's error path bypasses our patched methods), still + // release the trx so it doesn't leak. + res.on('close', () => { + if (!finalized) { + finalize(false).catch(() => {}); + } + }); + + next(); +} + +/** + * Path-based tenant resolution: /t//... → Tenant row. + * + * Returns null for non-tenant paths (no /t/ prefix) so the middleware + * can pass through to public routes. + */ +async function resolveTenantFromPath( + req: Request, +): Promise<{ id: string; slug: string; remainder: string } | null> { + // Path patterns: + // /t//... — portal under a tenant + // /api/t//...— api under a tenant (note: ingress strips /api) + // anything else — no tenant + // We capture the prefix length so the middleware can rewrite req.url + // to just the post-prefix path; downstream routers — all mounted at + // root — then match cleanly. + const match = req.url.match(/^(\/(?:t|api\/t)\/[a-z0-9-]+)(\/.*)?$/); + if (!match) return null; + const prefix = match[1]!; + const slug = prefix.split('/').pop()!; + + if (RESERVED_SLUGS.has(slug)) return null; + + // Lookup goes through the privileged db pool because we need to read + // any tenant by slug, not just the current one. RLS would block this + // on the appDb pool. + const { db } = await import('./db.js'); + const row = await db('Tenant').where({ slug, status: 'active' }).first(['id', 'slug', 'pendingDeletionAt']); + if (!row) return null; + // Tenants in the deletion grace window are accessible only via the + // explicit recovery routes — anything else 410s on the way out so a + // browser cookie hanging around can't keep working against a deleted + // brand. The recovery routes use the `?recover=1` query flag the + // restore page sets. + if (row.pendingDeletionAt) { + const isRecoveryPath = + req.url.includes('/account/restore') || + req.url.includes('/account/deletion-status'); + if (!isRecoveryPath) { + throw new TenantPendingDeletionError(slug); + } + } + return { + id: row.id as string, + slug: row.slug as string, + remainder: match[2] || '/', + }; +} diff --git a/apps/api/src/usage-billing.ts b/apps/api/src/usage-billing.ts new file mode 100644 index 0000000..68dec06 --- /dev/null +++ b/apps/api/src/usage-billing.ts @@ -0,0 +1,196 @@ +/** + * Usage-based billing reporter. + * + * The Hosted Flex plan bills $49/mo + 1.5% of attributed GMV. The 1.5% portion + * is a Stripe metered price tied to a Stripe Meter ("openpartner_attributed_gmv"); + * we report usage via meterEvents.create on whatever cadence makes sense for + * the merchant — daily cron, manual admin trigger, end-of-billing-period job. + * + * Hosted Revshare uses the same meter (3% of GMV instead of 1.5%) — it just + * has a different metered price. The reporter is mode-aware and picks the + * correct meter event_name for each tier. + * + * Idempotency: Stripe's Meter Event API accepts an optional `identifier` so + * the same period reported twice is a no-op. We use the high-water mark + * timestamp to define a closed period and stamp identifier accordingly. If + * the report fails we DO NOT advance the high-water mark, so the next run + * picks up from the same point. + * + * Multi-tenant: each tenant has its own Stripe customer + high-water mark + * (Config rows are tenant-scoped). The scheduler iterates tenants and calls + * this once per tenant; the billing route passes the request's tenant. + */ + +import type { Knex } from 'knex'; +import { TABLES, type EventRow } from '@openpartner/db'; +import { CONFIG_KEYS, getConfig, setConfig } from './config.js'; +import { requireStripe } from './stripe.js'; +import { getTenantBillingState } from './billing-plan.js'; + +// Events we count toward attributed GMV. We sum Event.value for these, +// scoped to events that have a corresponding Attribution row (i.e. credit +// actually went to a partner). The signup event has no value and is +// excluded by the SUM (NULL handling). +const REVENUE_EVENT_TYPES = ['invoice_paid', 'subscription_created']; + +// Mode → meter event_name. Network access uses a separate meter for +// Network-originated payouts; that's reported by the payout runner, not here. +const MODE_TO_METER: Record = { + flat: 'openpartner_attributed_gmv', + revshare: 'openpartner_attributed_gmv', +}; + +export interface UsageReportResult { + mode: string; + meterEventName: string; + customerId: string; + amount: number; // dollars + rangeStart: Date | null; + rangeEnd: Date; + reported: boolean; + reason?: string; +} + +/** + * Sum payout amounts (in dollars) for payouts whose Partner came from + * the OpenPartner Network — i.e., Partner.metadata.network.creatorId + * is set, which is stamped by network-client.ts when a partner is + * upserted to the Network. + * + * This is what the Network bills the vendor on (3% metered). Only + * 'paid' payouts count — pending / failed don't generate Network fee + * because no money actually moved to the partner. + * + * Tenant scope: caller provides db + GUC. + */ +export async function aggregateNetworkOriginatedPayouts( + db: Knex, + since: Date | null, + until: Date, +): Promise { + const q = db(TABLES.Payout) + .join(TABLES.Partner, `${TABLES.Partner}.id`, `${TABLES.Payout}.partnerId`) + .where(`${TABLES.Payout}.status`, 'paid') + .andWhere(`${TABLES.Payout}.completedAt`, '<=', until) + .andWhereRaw(`"${TABLES.Partner}"."metadata"->'network'->>'creatorId' is not null`); + if (since) q.andWhere(`${TABLES.Payout}.completedAt`, '>', since); + const rows = (await q.sum({ total: `${TABLES.Payout}.amount` })) as Array<{ total: string | null }>; + return Number(rows[0]?.total ?? 0); +} + +/** + * Sum attributed GMV (in dollars) for events with `ts > since` and `ts <= until`. + * Only counts events that have at least one Attribution row (i.e. a partner + * was credited). Refund/dispute events are excluded by event-type filter. + * + * Tenant scope is provided by the caller (req.db with app.tenant_id set, or + * a transaction the scheduler opened with the GUC pinned). + */ +export async function aggregateAttributedGmv( + db: Knex, + since: Date | null, + until: Date, +): Promise { + const q = db(TABLES.Event) + .whereIn('type', REVENUE_EVENT_TYPES) + .where('ts', '<=', until) + .whereExists((qb) => { + qb.select(db.raw('1')).from(TABLES.Attribution).whereRaw('"Attribution"."eventId" = "Event"."id"'); + }); + if (since) q.where('ts', '>', since); + const rows = (await q.sum({ total: 'value' })) as Array<{ total: string | null }>; + return Number(rows[0]?.total ?? 0); +} + +/** + * Report aggregated GMV to Stripe via the Meter Events API. The meter + * (openpartner_attributed_gmv) must exist in the platform's Stripe account + * and the merchant's subscription must include the metered price tied to + * the same meter. Both are provisioned by `scripts/setup-stripe.mjs`. + */ +export async function reportUsageToStripe(db: Knex, tenantId: string): Promise { + const state = await getTenantBillingState(db, tenantId); + const mode = state.mode; + const meterEventName = MODE_TO_METER[mode]; + if (!meterEventName) { + return { + mode, + meterEventName: '', + customerId: '', + amount: 0, + rangeStart: null, + rangeEnd: new Date(), + reported: false, + reason: `usage reporting is not configured for mode=${mode}`, + }; + } + + // Read Stripe customer from Tenant column (canonical) — fall back to + // the legacy Config key for tenants that subscribed before the + // billingPlan migration backfilled it. Both should be equivalent + // post-migration. + const customerId = + state.stripeCustomerId ?? + (await getConfig(db, tenantId, CONFIG_KEYS.StripeMerchantCustomerId)); + if (!customerId) { + return { + mode, + meterEventName, + customerId: '', + amount: 0, + rangeStart: null, + rangeEnd: new Date(), + reported: false, + reason: 'no Stripe merchant customer configured (subscribe via /billing/checkout first)', + }; + } + + const lastReportedAtIso = await getConfig(db, tenantId, CONFIG_KEYS.LastUsageReportedAt); + const rangeStart = lastReportedAtIso ? new Date(lastReportedAtIso) : null; + const rangeEnd = new Date(); + const amount = await aggregateAttributedGmv(db, rangeStart, rangeEnd); + + if (amount <= 0) { + // Still advance the high-water mark — we've "reported" zero usage for + // the period and don't want to re-scan the same window forever. + await setConfig(db, tenantId, CONFIG_KEYS.LastUsageReportedAt, rangeEnd.toISOString()); + return { + mode, + meterEventName, + customerId, + amount, + rangeStart, + rangeEnd, + reported: false, + reason: 'no attributed GMV in range', + }; + } + + const stripe = requireStripe(); + // identifier is Stripe's idempotency key for meter events. Tying it to the + // window end means a re-run within the same second is deduped on Stripe's + // side, which is what we want when an admin double-clicks the report + // button or a cron job retries on transient failure. Include tenantId so + // two tenants reporting in the same second don't collide. + const identifier = `op-usage-${mode}-${tenantId}-${rangeEnd.toISOString()}`; + await stripe.billing.meterEvents.create({ + event_name: meterEventName, + payload: { + stripe_customer_id: customerId, + value: amount.toFixed(2), + }, + identifier, + timestamp: Math.floor(rangeEnd.getTime() / 1000), + }); + + await setConfig(db, tenantId, CONFIG_KEYS.LastUsageReportedAt, rangeEnd.toISOString()); + return { + mode, + meterEventName, + customerId, + amount, + rangeStart, + rangeEnd, + reported: true, + }; +} diff --git a/apps/api/src/webhook-dispatcher.ts b/apps/api/src/webhook-dispatcher.ts index 5309a9f..e8a44bb 100644 --- a/apps/api/src/webhook-dispatcher.ts +++ b/apps/api/src/webhook-dispatcher.ts @@ -1,10 +1,10 @@ /** * Outbound webhook dispatcher. * - * dispatchEvent(type, data) fans out to every active endpoint subscribed - * to `type`, writes a WebhookDelivery row per recipient, and fires the - * HTTP POST asynchronously so the inbound request that triggered the - * event isn't blocked on webhook RTT. + * dispatchEvent(tenantId, type, data) fans out to every active endpoint + * in `tenantId` subscribed to `type`, writes a WebhookDelivery row per + * recipient, and fires the HTTP POST asynchronously so the inbound + * request that triggered the event isn't blocked on webhook RTT. * * Signature: HMAC-SHA256 of `${timestamp}.${rawBody}` keyed on the * endpoint's signing secret — the same pattern Stripe uses. Receivers @@ -16,6 +16,11 @@ * with attempts + error, and admins can hit POST /webhooks/:id/ * deliveries/:deliveryId/retry from the UI. A background retry cron * with exponential backoff is an obvious future extension. + * + * Multi-tenant: dispatch runs asynchronously after the request that + * triggered it has responded — so the per-request transaction is gone + * by the time we run. We use the privileged `db` directly and filter + * every query by `tenantId` explicitly. */ import { createHmac, randomBytes } from 'node:crypto'; @@ -55,14 +60,14 @@ export function signPayload(secret: string, timestamp: string, rawBody: string): * AFTER their DB writes have committed, so a subscriber receiving an * event can safely fetch the related rows via the API. */ -export function dispatchEvent(event: WebhookEventType, data: unknown): void { - void runDispatch(event, data).catch((err) => { +export function dispatchEvent(tenantId: string, event: WebhookEventType, data: unknown): void { + void runDispatch(tenantId, event, data).catch((err) => { console.error(`[webhook] dispatch failed: ${err instanceof Error ? err.message : String(err)}`); }); } -async function runDispatch(event: WebhookEventType, data: unknown): Promise { - const endpoints = await db(TABLES.WebhookEndpoint).where({ active: true }); +async function runDispatch(tenantId: string, event: WebhookEventType, data: unknown): Promise { + const endpoints = await db(TABLES.WebhookEndpoint).where({ tenantId, active: true }); const matching = endpoints.filter((e) => { const events = Array.isArray(e.events) ? e.events : []; return events.includes(event) || events.includes('*'); @@ -76,15 +81,16 @@ async function runDispatch(event: WebhookEventType, data: unknown): Promise deliverOne(endpoint, envelope))); + await Promise.all(matching.map((endpoint) => deliverOne(tenantId, endpoint, envelope))); } -async function deliverOne(endpoint: WebhookEndpointRow, envelope: WebhookEnvelope): Promise { +async function deliverOne(tenantId: string, endpoint: WebhookEndpointRow, envelope: WebhookEnvelope): Promise { const deliveryId = ulid(); const body = JSON.stringify(envelope); await db(TABLES.WebhookDelivery).insert({ id: deliveryId, + tenantId, endpointId: endpoint.id, eventId: envelope.id, eventType: envelope.event, @@ -143,13 +149,19 @@ async function attemptDelivery(deliveryId: string, endpoint: WebhookEndpointRow, } } -export async function redeliver(deliveryId: string): Promise { - const delivery = await db(TABLES.WebhookDelivery).where({ id: deliveryId }).first(); +export async function redeliver(tenantId: string, deliveryId: string): Promise { + const delivery = await db(TABLES.WebhookDelivery) + .where({ id: deliveryId, tenantId }) + .first(); if (!delivery) return null; - const endpoint = await db(TABLES.WebhookEndpoint).where({ id: delivery.endpointId }).first(); + const endpoint = await db(TABLES.WebhookEndpoint) + .where({ id: delivery.endpointId, tenantId }) + .first(); if (!endpoint) return null; const body = typeof delivery.payload === 'string' ? delivery.payload : JSON.stringify(delivery.payload); await attemptDelivery(deliveryId, endpoint, body); - return (await db(TABLES.WebhookDelivery).where({ id: deliveryId }).first()) ?? null; + return ( + (await db(TABLES.WebhookDelivery).where({ id: deliveryId, tenantId }).first()) ?? null + ); } diff --git a/apps/portal/Dockerfile b/apps/portal/Dockerfile index ce5688e..d4a9270 100644 --- a/apps/portal/Dockerfile +++ b/apps/portal/Dockerfile @@ -34,10 +34,20 @@ RUN pnpm --filter @openpartner/db build \ FROM nginx:1.27-alpine AS runner RUN apk add --no-cache tini \ - && rm /etc/nginx/conf.d/default.conf + && rm /etc/nginx/conf.d/default.conf \ + # Make every path nginx writes to owned by the unprivileged `nginx` + # user that already exists in the image. Required because we drop + # privileges via USER below, and nginx writes pid/cache/logs at boot. + # Listening on 8080 (>1024) means we don't need CAP_NET_BIND_SERVICE. + && chown -R nginx:nginx /var/cache/nginx /var/log/nginx /etc/nginx/conf.d /usr/share/nginx/html \ + && touch /var/run/nginx.pid \ + && chown nginx:nginx /var/run/nginx.pid ENV API_UPSTREAM=api:4601 -COPY apps/portal/nginx.conf.template /etc/nginx/templates/default.conf.template -COPY --from=builder /repo/apps/portal/dist /usr/share/nginx/html +COPY --chown=nginx:nginx apps/portal/nginx.conf.template /etc/nginx/templates/default.conf.template +COPY --from=builder --chown=nginx:nginx /repo/apps/portal/dist /usr/share/nginx/html + +USER nginx + EXPOSE 8080 HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \ CMD wget -qO- http://127.0.0.1:8080/ >/dev/null || exit 1 diff --git a/apps/portal/index.html b/apps/portal/index.html index 046e2a2..c1826bf 100644 --- a/apps/portal/index.html +++ b/apps/portal/index.html @@ -4,6 +4,12 @@ OpenPartner + + + + + +
diff --git a/apps/portal/package.json b/apps/portal/package.json index 84ef1ee..da56a50 100644 --- a/apps/portal/package.json +++ b/apps/portal/package.json @@ -23,7 +23,7 @@ "@types/react-dom": "18.3.7", "@vitejs/plugin-react": "^4.3.1", "typescript": "5.4.5", - "vite": "^5.4.8", - "vitest": "^1.6.0" + "vite": "^6.4.2", + "vitest": "^3.2.4" } } diff --git a/apps/portal/public/apple-touch-icon.png b/apps/portal/public/apple-touch-icon.png new file mode 100644 index 0000000..f32a36d Binary files /dev/null and b/apps/portal/public/apple-touch-icon.png differ diff --git a/apps/portal/public/favicon-16x16.png b/apps/portal/public/favicon-16x16.png new file mode 100644 index 0000000..453b9b1 Binary files /dev/null and b/apps/portal/public/favicon-16x16.png differ diff --git a/apps/portal/public/favicon-32x32.png b/apps/portal/public/favicon-32x32.png new file mode 100644 index 0000000..98663d0 Binary files /dev/null and b/apps/portal/public/favicon-32x32.png differ diff --git a/apps/portal/public/favicon.ico b/apps/portal/public/favicon.ico new file mode 100644 index 0000000..bec8ae8 Binary files /dev/null and b/apps/portal/public/favicon.ico differ diff --git a/apps/portal/public/logo-mark-green.svg b/apps/portal/public/logo-mark-green.svg new file mode 100644 index 0000000..a0e24ce --- /dev/null +++ b/apps/portal/public/logo-mark-green.svg @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/apps/portal/public/logo-mark-white.svg b/apps/portal/public/logo-mark-white.svg new file mode 100644 index 0000000..95751fa --- /dev/null +++ b/apps/portal/public/logo-mark-white.svg @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/apps/portal/public/og-default.png b/apps/portal/public/og-default.png new file mode 100644 index 0000000..acd1e63 Binary files /dev/null and b/apps/portal/public/og-default.png differ diff --git a/apps/portal/public/og-preview.svg b/apps/portal/public/og-preview.svg new file mode 100644 index 0000000..fed9743 --- /dev/null +++ b/apps/portal/public/og-preview.svg @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + OpenPartner + + + Launch a partner program. + Grow it with the Network. + + + Affiliate, referral, and creator programs · built-in marketplace · direct Stripe payouts + + + MIT · Self-host or hosted · github.com/getcoherence/openpartner + diff --git a/apps/portal/src/App.tsx b/apps/portal/src/App.tsx index 2a24eb6..85604f2 100644 --- a/apps/portal/src/App.tsx +++ b/apps/portal/src/App.tsx @@ -11,17 +11,17 @@ import { ShieldCheck, Download, LogOut, - Compass, - Package2, - Inbox, - Handshake, - Store, - Megaphone, + Webhook, + Settings, Mail, UserCog, - Webhook, + Globe, + Megaphone, + Inbox, + ChevronRight, } from 'lucide-react'; import { clearApiKey, api, type Principal } from './api.js'; +import { useTenantBase } from './tenant-base.js'; import { theme } from './theme.js'; import { Dashboard } from './pages/Dashboard.js'; import { LinksPage } from './pages/Links.js'; @@ -29,40 +29,111 @@ import { CommissionsPage } from './pages/Commissions.js'; import { PayoutsPage } from './pages/Payouts.js'; import { ConnectPage } from './pages/Connect.js'; import { AdminPartners } from './pages/AdminPartners.js'; +import { AdminPartnerPrograms } from './pages/AdminPartnerPrograms.js'; +import { AdminPartnerCoupons } from './pages/AdminPartnerCoupons.js'; import { AdminCampaigns } from './pages/AdminCampaigns.js'; import { AdminReview } from './pages/AdminReview.js'; import { AdminExport } from './pages/AdminExport.js'; -import { DiscoverPage } from './pages/network/Discover.js'; -import { MyRequestsPage } from './pages/network/MyRequests.js'; -import { MyPartnershipsPage } from './pages/network/MyPartnerships.js'; -import { VendorOfferingsPage } from './pages/network/VendorOfferings.js'; -import { VendorRequestsPage } from './pages/network/VendorRequests.js'; -import { AdminNetworkVendors } from './pages/network/AdminNetworkVendors.js'; -import { AdminNetworkCreators } from './pages/network/AdminNetworkCreators.js'; import { LoginPage } from './pages/auth/Login.js'; -import { SignupPage } from './pages/auth/Signup.js'; -import { VendorSignupPage } from './pages/auth/VendorSignup.js'; import { MagicLandingPage } from './pages/auth/MagicLanding.js'; -import { DevMailboxPage } from './pages/admin/DevMailbox.js'; import { WebhooksPage } from './pages/admin/Webhooks.js'; +import { AdminSettings } from './pages/admin/Settings.js'; +import { AdminBilling } from './pages/admin/Billing.js'; +import { AdminAdmins } from './pages/admin/Admins.js'; +import { AdminNetwork } from './pages/admin/Network.js'; +import { AdminNetworkComplete } from './pages/admin/NetworkComplete.js'; +import { AdminNetworkOfferings } from './pages/admin/NetworkOfferings.js'; +import { AdminNetworkRequests } from './pages/admin/NetworkRequests.js'; +import { AdminNetworkCreators } from './pages/admin/NetworkCreators.js'; +import { AdminNetworkBilling } from './pages/admin/NetworkBilling.js'; +import { DiscoverPage } from './pages/partner/Discover.js'; +import { OfferingDetailPage } from './pages/partner/OfferingDetail.js'; +import { VendorDetailPage } from './pages/partner/VendorDetail.js'; +import { MyAffiliationsPage } from './pages/partner/MyAffiliations.js'; +import { MyRequestsPage } from './pages/partner/MyRequests.js'; +import { MyProfilePage } from './pages/partner/MyProfile.js'; +import { InstallPage } from './pages/Install.js'; +import { LandingPage } from './pages/Landing.js'; +import { SignupPage } from './pages/Signup.js'; +import { SigninPage } from './pages/Signin.js'; +import { WorkspacesPage } from './pages/Workspaces.js'; +import { PlatformMagicLandingPage } from './pages/auth/PlatformMagicLanding.js'; +import { CreatorSignupPage } from './pages/creator/CreatorSignup.js'; +import { CreatorSigninPage } from './pages/creator/CreatorSignin.js'; +import { CreatorMagicLandingPage } from './pages/creator/CreatorMagicLanding.js'; +import { CreatorShell } from './pages/creator/CreatorShell.js'; +import { CreatorPublicProfilePage } from './pages/creator/CreatorPublicProfile.js'; import { FraudReviewPage } from './pages/FraudReview.js'; -import { OfferingDetailPage } from './pages/network/OfferingDetail.js'; -import { CreatorProfilePage } from './pages/network/CreatorProfile.js'; +import { useQuery } from '@tanstack/react-query'; interface AuthState { loading: boolean; principal: Principal | null; } +interface InstallStatus { + needsSetup: boolean; + reason?: 'multi_tenant'; +} + export function App() { + // First-run gate. Three modes the probe can return: + // { needsSetup: true } — single-tenant, no admin yet + // { needsSetup: false } — single-tenant, ready + // { needsSetup: false, reason: 'multi_tenant' } — multi-tenant deploy + // + // Multi-tenant flips the routing entirely: root is the public landing, + // /signup creates a tenant, and the Shell only mounts under /t//. + const install = useQuery({ + queryKey: ['install-status'], + queryFn: async () => { + const r = await fetch('/api/install/status'); + return (await r.json()) as InstallStatus; + }, + staleTime: Infinity, + }); + + if (install.isLoading) return null; + const needsSetup = install.data?.needsSetup ?? false; + const isMultiTenant = install.data?.reason === 'multi_tenant'; + return ( - } /> - } /> - } /> - } /> - } /> + {isMultiTenant ? ( + <> + } /> + } /> + } /> + } /> + {/* Platform-identity magic link (one email regardless of how many brands you admin). */} + } /> + {/* Platform-level Creator surfaces — separate auth from vendor admins. */} + } /> + } /> + } /> + } /> + {/* Public creator profiles — no auth, browsable from anywhere. */} + } /> + } /> + } /> + } /> + } /> + + ) : needsSetup ? ( + <> + } /> + } /> + } /> + + ) : ( + <> + } /> + } /> + } /> + } /> + + )} ); @@ -71,18 +142,16 @@ export function App() { function Shell() { const [auth, setAuth] = useState({ loading: true, principal: null }); const location = useLocation(); + const tenantBase = useTenantBase(); useEffect(() => { - // Always attempt /auth/whoami — it'll accept either the API-key - // Bearer token (if present in localStorage) or the op_session cookie - // from a magic-link sign-in. api('/auth/whoami') .then((p) => setAuth({ loading: false, principal: p })) .catch(() => setAuth({ loading: false, principal: null })); }, []); if (auth.loading) return Loading…; - if (!auth.principal) return ; + if (!auth.principal) return ; return (
@@ -91,37 +160,47 @@ function Shell() { } /> - {/* Vendor-side OpenPartner (core attribution) */} } /> } /> } /> } /> + {/* Network discovery — open to anyone signed in (vendor admin can browse too). */} + } /> + } /> + } /> + + {/* Partner-only Network surfaces. */} + {auth.principal.role === 'partner' && ( + <> + } /> + } /> + } /> + + )} + {auth.principal.role === 'admin' && ( <> } /> + } /> + } /> } /> } /> } /> - } /> } /> } /> - } /> - } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> )} - {/* OpenPartner Network — creator-side */} - } /> - } /> - } /> - } /> - } /> - - {/* OpenPartner Network — vendor-side */} - } /> - } /> - } /> @@ -129,12 +208,35 @@ function Shell() { ); } +interface ProgramSettings { + programName: string | null; + supportEmail: string | null; + logoUrl: string | null; +} + function Sidebar({ principal }: { principal: Principal }) { const nav = useNavigate(); + const tenantBase = useTenantBase(); + const settings = useQuery({ + queryKey: ['program-settings'], + queryFn: () => api('/config/program'), + // Refetch infrequently — admin rarely changes this. + staleTime: 60_000, + }); + const programName = settings.data?.programName || 'OpenPartner'; + const supportEmail = settings.data?.supportEmail || null; + return ( ); } @@ -285,58 +399,143 @@ function PrincipalChip({ principal }: { principal: Principal }) { function describePrincipal(p: Principal): { label: string; sublabel: string; initial: string; hue: { bg: string; fg: string } } { if (p.role === 'admin') { - return { label: 'Admin', sublabel: 'admin', initial: 'A', hue: { bg: theme.accentSoft, fg: theme.accent } }; - } - if (p.role === 'partner') { - return { - label: p.partner?.name ?? 'Partner', - sublabel: 'partner', - initial: p.partner?.name?.[0]?.toUpperCase() ?? 'P', - hue: { bg: '#1e2a3d', fg: theme.info }, - }; - } - if (p.role === 'network_vendor') { + const name = p.admin?.name ?? 'Admin'; return { - label: p.vendor?.name ?? 'Vendor', - sublabel: 'vendor', - initial: p.vendor?.name?.[0]?.toUpperCase() ?? 'V', - hue: { bg: '#2a2018', fg: theme.warn }, + label: name, + sublabel: p.source === 'env' ? 'admin (env)' : 'admin', + initial: name[0]?.toUpperCase() ?? 'A', + hue: { bg: theme.accentSoft, fg: theme.accent }, }; } return { - label: p.creator?.name ?? 'Creator', - sublabel: 'creator', - initial: p.creator?.name?.[0]?.toUpperCase() ?? 'C', - hue: { bg: '#2a1a2a', fg: '#e879f9' }, + label: p.partner?.name ?? 'Partner', + sublabel: 'partner', + initial: p.partner?.name?.[0]?.toUpperCase() ?? 'P', + hue: { bg: '#1e2a3d', fg: theme.info }, }; } -function NavSection({ title, children }: { title: string; children: ReactNode }) { +function NavSection({ + title, + collapsible, + storageKey, + children, +}: { + title: string; + collapsible?: boolean; + /** Required when collapsible — disambiguates sections that share a + * title (e.g. partner-side vs brand-side "Network"). Persists the + * open/closed state across page reloads. */ + storageKey?: string; + children: ReactNode; +}) { + // Default closed when collapsible — matches the user's stated + // preference. localStorage persists per storageKey across reloads. + const lsKey = storageKey ? `op:nav-collapsed:${storageKey}` : null; + const [collapsed, setCollapsed] = useState(() => { + if (!collapsible) return false; + if (typeof window === 'undefined' || !lsKey) return true; + const raw = window.localStorage.getItem(lsKey); + if (raw === '0') return false; + if (raw === '1') return true; + return true; // default-collapsed when no preference yet + }); + function toggle() { + setCollapsed((prev) => { + const next = !prev; + if (lsKey && typeof window !== 'undefined') { + window.localStorage.setItem(lsKey, next ? '1' : '0'); + } + return next; + }); + } + + const headerCommonStyle: React.CSSProperties = { + fontSize: 11, + color: theme.textDim, + textTransform: 'uppercase', + letterSpacing: '0.08em', + fontWeight: 600, + padding: '0 8px 8px', + }; + return (
-
- {title} -
-
{children}
+ {collapsible ? ( + + ) : ( +
{title}
+ )} + {!collapsed && ( +
{children}
+ )}
); } +/** Network sidebar section. We only surface Offerings / Requests / + * Billing once the brand is connected — those pages all error out if + * there's no vendorToken, and showing dead nav entries is just noise. + * Connection itself stays visible so the admin can come back to wire + * it up. */ +function NetworkNav() { + const { data } = useQuery({ + queryKey: ['network-membership'], + queryFn: () => api<{ enabled: boolean; hasVendorToken: boolean }>('/config/network'), + staleTime: 60_000, + retry: false, + }); + const connected = !!(data?.enabled && data.hasVendorToken); + return ( + + }>{connected ? 'Connection' : 'Get connected'} + {connected && ( + <> + }>Offerings + }>Requests + }>Discover creators + }>Billing + + )} + + ); +} + function NavItem({ to, icon, children }: { to: string; icon: ReactNode; children: ReactNode }) { const location = useLocation(); - const active = location.pathname === to || (to !== '/' && location.pathname.startsWith(to)); + const tenantBase = useTenantBase(); + const href = to.startsWith('/') ? `${tenantBase}${to === '/' ? '' : to}` || '/' : to; + const active = location.pathname === href || (href !== '/' && location.pathname.startsWith(href)); return ( - O -
+ OpenPartner ); } diff --git a/apps/portal/src/api.ts b/apps/portal/src/api.ts index 9661b94..b0918de 100644 --- a/apps/portal/src/api.ts +++ b/apps/portal/src/api.ts @@ -12,6 +12,18 @@ export function clearApiKey(): void { localStorage.removeItem(KEY_STORAGE); } +/** + * Pull the tenant slug from the current URL path so multi-tenant mode + * scopes API calls automatically. In single-tenant mode the SPA never + * mounts under /t// so this returns null and api() calls hit /api/* + * unchanged. + */ +export function currentTenantSlug(): string | null { + if (typeof window === 'undefined') return null; + const m = window.location.pathname.match(/^\/t\/([a-z0-9-]+)(?:\/|$)/); + return m ? m[1]! : null; +} + export async function api( path: string, init: Omit & { body?: unknown } = {}, @@ -19,18 +31,28 @@ export async function api( const key = getApiKey(); const headers = new Headers(init.headers); if (key) headers.set('Authorization', `Bearer ${key}`); - if (init.body !== undefined && !headers.has('content-type')) { + // Blob/File bodies carry their own content-type and pass through as-is + // (used by upload endpoints). Everything else gets JSON treatment. + const isBinaryBody = init.body instanceof Blob; + if (init.body !== undefined && !isBinaryBody && !headers.has('content-type')) { headers.set('content-type', 'application/json'); } - const res = await fetch(`/api${path}`, { + if (isBinaryBody && !headers.has('content-type')) { + headers.set('content-type', (init.body as Blob).type || 'application/octet-stream'); + } + const slug = currentTenantSlug(); + const tenantPrefix = slug ? `/t/${slug}` : ''; + const res = await fetch(`/api${tenantPrefix}${path}`, { ...init, headers, - credentials: 'include', // send op_session cookie on magic-link flow + credentials: 'include', body: init.body === undefined ? undefined - : typeof init.body === 'string' - ? init.body - : JSON.stringify(init.body), + : isBinaryBody + ? (init.body as Blob) + : typeof init.body === 'string' + ? init.body + : JSON.stringify(init.body), }); if (res.status === 401) { clearApiKey(); @@ -55,20 +77,9 @@ export class ApiError extends Error { } export interface Principal { - role: 'admin' | 'partner' | 'network_vendor' | 'network_creator'; + role: 'admin' | 'partner'; source?: string; partnerId?: string; partner?: { id: string; name: string; email: string; stripeConnected: boolean }; - networkVendorId?: string; - vendor?: { id: string; name: string; slug: string; logoUrl: string | null; websiteUrl: string | null; status: string }; - networkCreatorId?: string; - creator?: { - id: string; - name: string; - handle: string; - email: string; - avatarUrl: string | null; - defaultPromoCode: string | null; - status: string; - }; + admin?: { id: string; name: string; email: string }; } diff --git a/apps/portal/src/main.tsx b/apps/portal/src/main.tsx index 963a4ae..17277da 100644 --- a/apps/portal/src/main.tsx +++ b/apps/portal/src/main.tsx @@ -1,4 +1,5 @@ import './global.css'; +import './posthog.js'; import { StrictMode } from 'react'; import { createRoot } from 'react-dom/client'; import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; diff --git a/apps/portal/src/pages/AdminCampaigns.tsx b/apps/portal/src/pages/AdminCampaigns.tsx index 9d837b2..77937c4 100644 --- a/apps/portal/src/pages/AdminCampaigns.tsx +++ b/apps/portal/src/pages/AdminCampaigns.tsx @@ -1,6 +1,6 @@ import { useState } from 'react'; import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { Plus, Tag } from 'lucide-react'; +import { HelpCircle, Plus, Tag } from 'lucide-react'; import { api } from '../api.js'; import { theme } from '../theme.js'; import { Button, Card, EmptyState, ErrorBanner, Input, Label, Page, Select, Table, formatDate } from '../ui.js'; @@ -11,9 +11,20 @@ interface Campaign { commissionRule: { type: string; value: number; recurring?: boolean }; attributionWindowDays: number; attributionModel: string; + destinationUrl: string; + deepLinkAllowedDomains: string | null; + startsAt: string | null; + endsAt: string | null; createdAt: string; } +type CampaignStatus = 'scheduled' | 'active' | 'ended'; +function statusOf(c: Pick, at: Date = new Date()): CampaignStatus { + if (c.startsAt && at < new Date(c.startsAt)) return 'scheduled'; + if (c.endsAt && at >= new Date(c.endsAt)) return 'ended'; + return 'active'; +} + export function AdminCampaigns() { const qc = useQueryClient(); const [showCreate, setShowCreate] = useState(false); @@ -48,9 +59,16 @@ export function AdminCampaigns() { } /> ) : ( [ {c.name}, + , + + {c.destinationUrl ? new URL(c.destinationUrl).hostname + new URL(c.destinationUrl).pathname.replace(/\/$/, '') : '—'} + {c.deepLinkAllowedDomains && ( + + deep links + )} + , {c.commissionRule.type === 'percent' ? `${c.commissionRule.value}%` : `$${c.commissionRule.value} fixed`} {c.commissionRule.recurring && (recurring)} @@ -67,11 +85,16 @@ export function AdminCampaigns() { function CreateCampaign({ onClose, onCreated }: { onClose: () => void; onCreated: () => void }) { const [name, setName] = useState(''); + const [destinationUrl, setDestinationUrl] = useState(''); + const [deepLinkDomains, setDeepLinkDomains] = useState(''); const [ruleType, setRuleType] = useState<'percent' | 'fixed'>('percent'); const [ruleValue, setRuleValue] = useState('20'); const [recurring, setRecurring] = useState(true); const [windowDays, setWindowDays] = useState('60'); const [model, setModel] = useState<'last_click' | 'first_click' | 'linear' | 'position'>('last_click'); + const [startsAt, setStartsAt] = useState(''); + const [endsAt, setEndsAt] = useState(''); + const [grantToAllPartners, setGrantToAllPartners] = useState(false); const mut = useMutation({ mutationFn: () => @@ -79,9 +102,14 @@ function CreateCampaign({ onClose, onCreated }: { onClose: () => void; onCreated method: 'POST', body: { name, + destinationUrl, + deepLinkAllowedDomains: deepLinkDomains.trim() || undefined, commissionRule: { type: ruleType, value: Number(ruleValue), recurring }, attributionWindowDays: Number(windowDays), attributionModel: model, + startsAt: startsAt ? new Date(startsAt).toISOString() : null, + endsAt: endsAt ? new Date(endsAt).toISOString() : null, + grantToAllPartners: grantToAllPartners || undefined, }, }), onSuccess: onCreated, @@ -95,6 +123,29 @@ function CreateCampaign({ onClose, onCreated }: { onClose: () => void; onCreated setName(e.target.value)} placeholder="Default" /> +
+ + setDestinationUrl(e.target.value)} + placeholder="https://yourbrand.com/landing-page" + /> +
+ Where partner share-links for this campaign land. Partners can’t change this unless you allow deep links below. +
+
+
+ + setDeepLinkDomains(e.target.value)} + placeholder="yourbrand.com,docs.yourbrand.com" + /> +
+ Comma-separated host list. Partners can override the destination on share-links as long as their override matches one of these. Leave blank to lock destinations. +
+
@@ -118,7 +169,17 @@ function CreateCampaign({ onClose, onCreated }: { onClose: () => void; onCreated setWindowDays(e.target.value)} />
- +
+
+
+ + setStartsAt(e.target.value)} /> +
+ Leave blank to start immediately. Before this date the campaign is hidden from creators. +
+
+
+ + setEndsAt(e.target.value)} /> +
+ Leave blank to run indefinitely. Past this date existing share-links keep redirecting but no new commissions accrue. +
+
+
+
+ +
+ Off by default so VIP / scoped campaigns stay private. Only affects the current + partner roster — new invitees still need to be granted explicitly. +
+
- @@ -136,3 +227,37 @@ function CreateCampaign({ onClose, onCreated }: { onClose: () => void; onCreated ); } + +/** Field label with a hover-to-explain question-mark icon. Uses the + * native `title` attribute so we don't need a tooltip library — the + * browser handles positioning, multi-line via \n. Help text should be + * plain prose; no HTML. */ +function LabelWithHelp({ label, help }: { label: string; help: string }) { + return ( +
+ + + + +
+ ); +} + +function CampaignStatusPill({ campaign }: { campaign: Pick }) { + const status = statusOf(campaign); + const palette: Record = { + active: { bg: theme.successSoft, fg: theme.success, label: 'Active' }, + scheduled: { bg: `${theme.accent}15`, fg: theme.accent, label: 'Scheduled' }, + ended: { bg: theme.surface2, fg: theme.textMuted, label: 'Ended' }, + }; + const { bg, fg, label } = palette[status]; + return ( + + {label} + + ); +} diff --git a/apps/portal/src/pages/AdminExport.tsx b/apps/portal/src/pages/AdminExport.tsx index 1f6bc16..a87d2d5 100644 --- a/apps/portal/src/pages/AdminExport.tsx +++ b/apps/portal/src/pages/AdminExport.tsx @@ -1,4 +1,5 @@ -import { Download, FileJson, FileSpreadsheet, Archive } from 'lucide-react'; +import { useState } from 'react'; +import { Download, FileJson, FileSpreadsheet, Archive, Upload } from 'lucide-react'; import { getApiKey } from '../api.js'; import { theme } from '../theme.js'; import { Button, Card, Page, SectionHeading } from '../ui.js'; @@ -91,6 +92,132 @@ export function AdminExport() {
+ + Import partners from CSV + ); } + +interface ImportReportRow { + row: number; + email: string; + status: 'created' | 'skipped_email_taken' | 'invalid'; + reason?: string; + partnerId?: string; +} +interface ImportResult { + dryRun: boolean; + summary: { total: number; created: number; skipped: number; invalid: number }; + report: ImportReportRow[]; +} + +function PartnersCsvImporter() { + const key = getApiKey() ?? ''; + const [csv, setCsv] = useState(''); + const [busy, setBusy] = useState(false); + const [result, setResult] = useState(null); + const [error, setError] = useState(null); + + async function run(dryRun: boolean) { + setError(null); + setBusy(true); + try { + const res = await fetch(`/api/import/partners-csv?dryRun=${dryRun}`, { + method: 'POST', + headers: { Authorization: `Bearer ${key}`, 'Content-Type': 'text/csv' }, + body: csv, + }); + const json = await res.json(); + if (!res.ok) { + setError(typeof json.detail === 'string' ? json.detail : json.error ?? 'failed'); + setResult(null); + } else { + setResult(json as ImportResult); + } + } finally { + setBusy(false); + } + } + + function onFile(e: React.ChangeEvent) { + const f = e.target.files?.[0]; + if (!f) return; + const reader = new FileReader(); + reader.onload = () => setCsv(String(reader.result ?? '')); + reader.readAsText(f); + } + + return ( + +

+ Migrate from Impact, Rewardful, Refersion, etc. — adapt your export to the canonical + format below and upload. Re-runnable: existing emails are skipped, not duplicated. +

+
+ + Canonical CSV format + +
+{`email,name,activatedAt,metadata
+ada@example.com,Ada Lovelace,2024-01-15T00:00:00Z,"{""source"":""impact""}"
+grace@example.com,Grace Hopper,,
+`}
+        
+

+ email + name required. activatedAt defaults to + now (existing partners come in already-activated). metadata is optional + JSON merged into Partner.metadata. New partners get access to all current campaigns. +

+
+ +