diff --git a/.claude/skills/generate-e2e-tests/SKILL.md b/.claude/skills/generate-e2e-tests/SKILL.md new file mode 100644 index 000000000..022531c88 --- /dev/null +++ b/.claude/skills/generate-e2e-tests/SKILL.md @@ -0,0 +1,335 @@ +--- +name: generate-e2e-tests +description: Generate comprehensive Playwright e2e tests from a .netcanvas protocol file and an optional recording. Invoke with /generate-e2e-tests [recording-path] +user-invocable: true +--- + +# Generate E2E Tests from Protocol + +You are generating comprehensive Playwright e2e tests for a Fresco interview protocol. + +## Inputs + +- **Protocol path**: `$1` — path to a `.netcanvas` file (ZIP containing `protocol.json`) +- **Recording path** (optional): `$2` — path to a recording directory (contains `actions.jsonl`, `SESSION.md`, `screenshots/`) + +If no recording is provided, generate a **synthetic happy path** using the data generation strategies in STAGE_TEST_REFERENCE.md (see "Synthetic Data Generation" section). + +## Step 1: Read Reference Materials + +Read these files to understand the testing infrastructure and patterns: + +1. `tests/e2e/docs/STAGE_TEST_REFERENCE.md` — what to test for each stage type, fixture availability, validation testing patterns +2. `tests/e2e/fixtures/stage-fixture.ts` — available fixture methods and their signatures +3. `tests/e2e/fixtures/interview-fixture.ts` — interview navigation fixture +4. `tests/e2e/fixtures/protocol-fixture.ts` — protocol installation and network state inspection +5. `tests/e2e/CLAUDE.md` — full e2e testing architecture guide +6. `tests/e2e/specs/interview/silos-protocol.spec.ts` — reference test implementation to match style/structure +7. `CLAUDE.md` — project coding conventions (path aliases, TypeScript, etc.) + +## Step 2: Extract and Analyze Protocol + +Extract the protocol JSON: + +```bash +unzip -p "$1" protocol.json +``` + +From the extracted JSON, build a **stage map** — for each stage (by index), extract: + +- `type` — stage type (e.g., `NameGeneratorQuickAdd`, `EgoForm`, `Sociogram`) +- `label` — display label +- `subject` — `{ entity, type }` pointing to codebook entry (null for Information/Anonymisation) +- `introductionPanel` — title and text (if present) +- `form.fields[]` — array of `{ variable }` referencing codebook variables +- `prompts[]` — array of prompt objects (with `createEdge`, `variable`, `highlight`, etc.) +- `panels[]` — side panel configuration +- `behaviours` — `maxNodes`, `minNodes`, `freeDraw`, etc. + +For each form field, resolve the variable UUID against the codebook: + +- Look up `codebook.[entity].[type].variables.[variableId]` +- Extract: `name`, `type`, `component`, `validation`, `options` + +This gives you the field name (UUID), display name, input component type, validation rules, and available options for each form field. + +## Step 3: Analyze Recording (if provided) + +If `$2` is provided, read `$2/actions.jsonl` (one JSON object per line). + +Group actions by stage — track URL changes via the `step=N` query parameter. For each stage visited: + +- Extract the sequence of user actions (click, fill, press, select) +- Note filled values and selected options +- Note which nodes were created (names entered in quick-add or name generator forms) +- Note edge-creating interactions (sociogram clicks, dyad census selections) + +The recording represents the **happy path** — the exact user journey to replay. + +### If no recording + +Generate a synthetic happy path from the protocol alone: + +1. Walk through all stages in order (index 0 to N) +2. For each stage, use the **Synthetic Data Generation** section of STAGE_TEST_REFERENCE.md to determine what values to fill, how many nodes to create, etc. +3. For conditional/skip logic, choose the path that visits the **most stages** +4. Track synthetic state as you go — node names created on earlier stages are needed for bin/census/sociogram stages later + +## Step 4: Generate Test File + +Create `tests/e2e/specs/interview/.spec.ts` where `` is derived from the protocol name (kebab-case, lowercase). + +### File Structure + +Follow this exact pattern (from the reference implementation): + +```typescript +/** + * Tests + * + * Tests interview stage navigation using a real .netcanvas protocol file. + */ + +import path from 'node:path'; +import { expect, test } from '~/tests/e2e/fixtures/interview-test.js'; +import { expectURL } from '~/tests/e2e/helpers/expectations.js'; + +const PROTOCOL_PATH = path.resolve( + import.meta.dirname, + '../../data/.netcanvas', +); + +let sharedProtocolId: string; + +test.describe('', () => { + test.beforeAll(async ({ database, protocol }) => { + await database.restoreSnapshot(); + const { protocolId } = await protocol.install(PROTOCOL_PATH); + sharedProtocolId = protocolId; + }); + + test.describe('Happy Path', () => { + test.describe.configure({ mode: 'serial' }); + + let interviewId: string; + + test.beforeAll(async ({ protocol }) => { + interviewId = await protocol.createInterview(sharedProtocolId); + }); + + test.beforeEach(({ interview }) => { + interview.interviewId = interviewId; + }); + + test.afterEach(async ({ page, interview }) => { + const stepMatch = /step=(\d+)/.exec(page.url()); + if (stepMatch?.[1]) { + const step = stepMatch[1]; + // List stage indices with non-deterministic rendering + const highToleranceStages: string[] = [/* sociogram indices */]; + + await interview.capture(`stage-${step}-final`, { + maxDiffPixelRatio: highToleranceStages.includes(step) + ? 0.1 + : undefined, + }); + } + }); + + // One test() per stage... + }); +}); +``` + +### Per-Stage Test Generation + +For each stage in the protocol, generate a `test()` block. Use the STAGE_TEST_REFERENCE.md to determine what to test. + +#### Mapping Recording Actions to Fixture Calls + +Translate recording actions to fixture method calls using these mappings: + +| Recording Pattern | Fixture Call | +|---|---| +| Navigate to URL with `step=N` | `interview.goto(N)` | +| Click element matching next/forward button | `interview.nextButton.click()` | +| Fill input within `[data-field-name="UUID"]` | `stage.form.fillText(UUID, value)` or `fillNumber`/`fillDate` based on codebook component | +| Click radio within `[data-field-name="UUID"]` | `stage.form.selectRadio(UUID, optionLabel)` | +| Click checkbox within `[data-field-name="UUID"]` | `stage.form.selectCheckbox(UUID, optionLabel)` | +| Click toggle button within `[data-field-name="UUID"]` | `stage.form.selectToggleButton(UUID, optionLabel)` | +| Fill quick-add input + press Enter | `stage.quickAdd.addNode(value)` | +| Click "Add a person" button | `stage.nameGenerator.openAddForm()` | +| Click "Finished" button in dialog | `stage.nameGenerator.submitForm()` | +| Drag node from panel | `stage.nodePanel.dragNodeToMainList(label)` | +| Click node on sociogram (connecting) | `stage.sociogram.connectNodes(from, to)` | +| Drag node to ordinal bin | `stage.ordinalBin.dragNodeToBin(node, bin)` | +| Drag node to categorical bin | `stage.categoricalBin.dragNodeToBin(node, bin)` | + +#### Determine Form Method from Codebook + +Use the codebook variable's `component` (or `type` if no component) to pick the right form fixture method: + +| Component | Method | +|---|---| +| `Text`, `TextArea` | `fillText` | +| `Number` | `fillNumber` | +| `DatePicker` | `fillDate` | +| `RadioGroup` | `selectRadio` | +| `LikertScale` | `selectLikert` | +| `CheckboxGroup` | `selectCheckbox` | +| `ToggleButtonGroup` | `selectToggleButton` | +| `Boolean` | `selectRadio` (options are "Yes"/"No" or custom labels from codebook) | + +#### Comments + +Add a comment above each form field interaction with the field's display name and component type: + +```typescript +// 1. Date of birth (DatePicker) +await stage.form.fillDate('596c2ac2-...', '2000-06-15'); + +// 2. Gender identity (RadioGroup) +await stage.form.selectRadio('a06f06f5-...', 'Cisgender Male'); +``` + +### Validation Tests + +For each form stage (EgoForm, AlterForm, AlterEdgeForm), examine the codebook variables for targeted validation rules. Generate validation test assertions **within the happy path test** for that stage: + +1. **Before filling fields**: Try to advance, verify validation blocks: + ```typescript + // Verify validation blocks advancement + await interview.nextButton.click(); + await expectURL(page, /step=N/); // Still on same stage + + // Verify required field errors + await expect( + stage.form.getFieldError('field-uuid'), + ).toBeVisible(); + ``` + +2. **Then fill fields normally** from the recording data. + +Only test these validations (skip others): +- `required: true` — always test +- `minValue` / `maxValue` — test if present +- `minLength` / `maxLength` — test if present +- `pattern` — test if present +- `unique` — test if applicable (needs duplicate value scenario) +- `sameAs` / `differentFrom` — test if present + +### Network State Verification + +The sync middleware uses a 3-second debounce with leading+trailing edges. Each `interview.goto()` destroys the current page, killing any pending trailing-edge syncs. Stages that set data used by downstream skip logic or filtering must explicitly wait for that data to persist. + +#### Form stages (EgoForm, AlterForm) must click Next to submit + +Form data lives in React Hook Form's local state until the form is submitted. **You must click `interview.nextButton` at the end of every form stage** to flush the data to Redux. Without this, the sync middleware never sees the data. + +For **EgoForm** stages, click Next as the last interaction (replaces the `toBeEnabled` assertion): + +```typescript +// Submit form to flush data to Redux +await interview.nextButton.click(); +``` + +For **AlterForm** stages with slides, click Next after filling the **last slide** (the earlier slides already submit when you click Next to advance): + +```typescript +// Submit last slide to flush form data to Redux +await interview.nextButton.click(); +``` + +Note: clicking Next navigates to the next stage, so the `afterEach` screenshot will capture the next stage's initial state rather than the current stage's final state. + +#### Persistence waits for skip logic + +After stages that set attributes consumed by downstream skip logic or filtering, add explicit waits using the protocol fixture. Available methods: + +- `protocol.waitForNodes(interviewId, expectedCount)` — after node creation stages +- `protocol.waitForNode(interviewId, nodeName)` — when count alone is ambiguous +- `protocol.waitForNodeAttribute(interviewId, nodeName, attributeId)` — after CategoricalBin, OrdinalBin, or AlterForm stages (checks for non-null value) +- `protocol.waitForEgoAttribute(interviewId, attributeId, expectedValue)` — after EgoForm stages + +Example for a CategoricalBin stage with downstream skip logic: + +```typescript +test('Stage N: CategoricalBin', async ({ interview, stage, protocol }) => { + await interview.goto(N); + + await stage.categoricalBin.dragNodeToBin('Dan', 'Yes'); + await stage.categoricalBin.dragNodeToBin('Alice', 'No'); + + await expect(interview.nextButton).toBeEnabled(); + + // Wait for the LAST categorized node's attribute to persist + await protocol.waitForNodeAttribute( + interview.interviewId, + 'Alice', + 'variable-uuid', + ); +}); +``` + +**Always add `protocol` to the test's destructured fixtures** when using persistence waits. + +### Stages With Placeholder Fixtures + +Check the Fixture Availability Summary in STAGE_TEST_REFERENCE.md. If a stage type's fixture is marked **Placeholder**, generate a minimal test with a TODO referencing the placeholder: + +```typescript +test('Stage N: Stage Label', async ({ page, interview }) => { + await interview.goto(N); + + // TODO: stage.dyadCensus is a placeholder fixture — implement its + // interaction methods before writing full test assertions. + // See DyadCensusFixture JSDoc in stage-fixture.ts for the methods needed. + // + // Expected behavior from recording: + // - Dismiss intro panel + // - Select Yes/No for each node pair + // - Auto-advances after 350ms +}); +``` + +Always reference the `stage.` property (e.g., `stage.dyadCensus`, `stage.narrative`) so the test structure is ready — it just needs the fixture methods implemented. Never use raw Playwright selectors as a fallback. + +### Skipped Stages + +If the recording skips certain stage indices (e.g., conditional stages), add a comment: + +```typescript +// Stages N-M are skipped (conditional on ) +``` + +### Browser-Specific Skips + +Add `test.skip()` for known browser limitations: + +```typescript +// Skip geospatial on Firefox (no WebGL in Playwright's Firefox) +test.skip(browserName === 'firefox', 'Firefox lacks WebGL support in Playwright'); +``` + +## Step 5: Verify Protocol File Location + +Check if the `.netcanvas` file is already in `tests/e2e/data/`. If not, suggest copying it there and update the path constant accordingly. + +## Step 6: Output Summary + +After generating the test file, output: +1. Path to the generated test file +2. Number of stages covered +3. Number of validation tests included +4. List of stages with TODO placeholders (missing fixtures) +5. Suggested next steps (copy protocol to test data, run tests, etc.) + +## Important Rules + +- **Always use path aliases** (`~/tests/e2e/...`) for imports, never relative paths +- **Use `.js` extensions** in import paths (TypeScript with ESM) +- **Field names are UUIDs** — always use the variable UUID from the codebook, not the display name +- **Serial mode** — interview tests MUST use `test.describe.configure({ mode: 'serial' })` +- **Soft assertions for screenshots** — the `afterEach` capture pattern handles this via `interview.capture()` +- **No `console.log`** — project ESLint rule forbids it +- **Follow existing patterns** — match the style, structure, and conventions of `silos-protocol.spec.ts` exactly diff --git a/.env.example b/.env.example index 3a0f1c11d..567ef0424 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,9 @@ +# ------------------- +# Required environment variables +# ------------------- +DATABASE_URL="postgres://user:password@host:5432/database?schema=public" # A pooled connection URL for Prisma. +DATABASE_URL_UNPOOLED="postgres://user:password@host:5432/database?schema=public" # A non-pooling connection URL for Prisma + # ------------------- # Optional environment variables - uncomment to use # ------------------- @@ -5,16 +11,4 @@ #DISABLE_ANALYTICS # true or false - If true, the app will not send anonymous analytics and error data. Defaults to false. #SANDBOX_MODE=false # true or false - if true, the app will use the sandbox mode, which disables resetting the database and other features #PUBLIC_URL="http://yourdomain.com" # When using advanced deployment, this is required. Set to the domain name of your app -#INSTALLATION_ID="your-app-name" # A unique identifier for your app, used for analytics. Generated automatically if not set. -#USE_NEON_POSTGRES_ADAPTER=false # true or false - If true, uses Neon serverless PostgreSQL adapter instead of standard pg adapter. Required for Vercel/Netlify deployments with Neon. Defaults to false. - -# ------------------- -# Required environment variables -# ------------------- - -POSTGRES_USER="postgres" # Your PostgreSQL username -POSTGRES_PASSWORD="postgres" # Your PostgreSQL password -POSTGRES_DATABASE="postgres" # Your PostgreSQL database name -POSTGRES_HOST="postgres" # Your PostgreSQL host -DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:5432/${POSTGRES_DATABASE}?schema=public" # A pooled connection URL for Prisma. -DATABASE_URL_UNPOOLED="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:5432/${POSTGRES_DATABASE}?schema=public" # A non-pooling connection URL for Prisma \ No newline at end of file +#USE_NEON_POSTGRES_ADAPTER=false # true or false - If true, uses Neon serverless PostgreSQL adapter instead of standard pg adapter. Required for Vercel/Netlify deployments with Neon. Defaults to false. \ No newline at end of file diff --git a/.eslintrc.cjs b/.eslintrc.cjs deleted file mode 100644 index 8dcfe753a..000000000 --- a/.eslintrc.cjs +++ /dev/null @@ -1,69 +0,0 @@ -const path = require('path'); - -/** @type {import("eslint").Linter.Config} */ -const config = { - overrides: [ - { - extends: [ - 'plugin:@typescript-eslint/stylistic-type-checked', - 'plugin:@typescript-eslint/recommended-type-checked', - ], - files: ['*.ts', '*.tsx'], - parserOptions: { - project: path.join(__dirname, 'tsconfig.json'), - }, - }, - ], - parser: '@typescript-eslint/parser', - parserOptions: { - project: path.join(__dirname, 'tsconfig.json'), - }, - plugins: ['@typescript-eslint'], - extends: [ - 'eslint:recommended', - 'plugin:@typescript-eslint/stylistic', - 'plugin:@typescript-eslint/recommended', - 'next/core-web-vitals', - 'prettier', - 'plugin:storybook/recommended', - ], - ignorePatterns: [ - 'node_modules', - '*.stories.*', - '*.test.*', - 'public', - '.eslintrc.cjs', - 'lib/gb/generated', - 'storybook-static', - ], - rules: { - '@next/next/no-img-element': 'off', - 'import/no-anonymous-default-export': 'off', - '@typescript-eslint/consistent-type-definitions': ['error', 'type'], - 'no-process-env': 'error', - 'no-console': 'error', - '@typescript-eslint/consistent-type-imports': [ - 'warn', - { - prefer: 'type-imports', - fixStyle: 'inline-type-imports', - }, - ], - '@typescript-eslint/no-unused-vars': [ - 'error', - { - caughtErrors: 'none', - argsIgnorePattern: '^_', - }, - ], - '@typescript-eslint/no-misused-promises': [ - 'error', - { - checksVoidReturn: false, - }, - ], - 'no-unreachable': 'error', - }, -}; - -module.exports = config; diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9a0a68482..4a7cb67ad 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,4 +4,4 @@ updates: directory: '/' target-branch: 'next' schedule: - interval: "weekly" + interval: 'weekly' diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0a8e428c5..feec74f3b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,132 +11,24 @@ on: - '*' jobs: - lint: + check: runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - name: Lint + command: pnpm lint + - name: Type check + command: pnpm typecheck + - name: Unit tests + command: pnpm test:unit + - name: Knip + command: pnpm knip + name: ${{ matrix.name }} steps: - - name: Checkout repository - uses: actions/checkout@v4 + - name: Setup + uses: complexdatacollective/github-actions/setup-pnpm@v1 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Get pnpm store directory - id: pnpm-cache - run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Lint - run: pnpm lint - - typecheck: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Get pnpm store directory - id: pnpm-cache - run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Type check - run: pnpm typecheck - - test: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Get pnpm store directory - id: pnpm-cache - run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Run tests - run: pnpm test - - knip: - env: - SKIP_ENV_VALIDATION: true - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Get pnpm store directory - id: pnpm-cache - run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Run knip - run: pnpm knip + - name: ${{ matrix.name }} + run: ${{ matrix.command }} diff --git a/.github/workflows/chromatic.yml b/.github/workflows/chromatic.yml new file mode 100644 index 000000000..5d15f7326 --- /dev/null +++ b/.github/workflows/chromatic.yml @@ -0,0 +1,115 @@ +name: 'Chromatic' +permissions: + contents: read + pull-requests: write + statuses: write + +on: + push: + workflow_dispatch: + issue_comment: + types: [created] + +jobs: + chromatic: + name: Run Chromatic + runs-on: ubuntu-latest + if: > + github.event_name == 'push' || + github.event_name == 'workflow_dispatch' || + (github.event_name == 'issue_comment' && + github.event.issue.pull_request && + startsWith(github.event.comment.body, '/chromatic')) + steps: + - name: Get PR details + if: github.event_name == 'issue_comment' + id: pr + uses: actions/github-script@v7 + with: + script: | + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.issue.number + }); + + core.setOutput('ref', pr.data.head.ref); + core.setOutput('sha', pr.data.head.sha); + + await Promise.all([ + github.rest.reactions.createForIssueComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: context.payload.comment.id, + content: 'rocket' + }), + github.rest.repos.createCommitStatus({ + owner: context.repo.owner, + repo: context.repo.repo, + sha: pr.data.head.sha, + state: 'pending', + context: 'Chromatic', + description: 'Visual tests running...' + }) + ]); + + - name: Setup + uses: complexdatacollective/github-actions/setup-pnpm@v1 + with: + fetch-depth: '0' + ref: ${{ steps.pr.outputs.ref || '' }} + + - name: Run Chromatic + id: chromatic + uses: chromaui/action@latest + with: + projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }} + autoAcceptChanges: main + onlyChanged: true + + - name: Post links to PR + if: github.event_name == 'issue_comment' + uses: actions/github-script@v7 + env: + BRANCH: ${{ steps.pr.outputs.ref }} + with: + script: | + const branch = process.env.BRANCH; + const encodedBranch = encodeURIComponent(branch); + const body = [ + '### Chromatic', + '', + `| | |`, + `|---|---|`, + `| **Storybook** | [View on Chromatic](https://${encodedBranch}--68b1958ee9350657446b5406.chromatic.com) |`, + `| **Library** | [View on Chromatic](https://www.chromatic.com/library?appId=68b1958ee9350657446b5406&branch=${encodedBranch}) |`, + ].join('\n'); + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body + }); + + - name: Update commit status + if: github.event_name == 'issue_comment' && always() + uses: actions/github-script@v7 + env: + CHROMATIC_OUTCOME: ${{ steps.chromatic.outcome }} + PR_SHA: ${{ steps.pr.outputs.sha }} + BUILD_URL: ${{ steps.chromatic.outputs.buildUrl }} + with: + script: | + const success = process.env.CHROMATIC_OUTCOME === 'success'; + await github.rest.repos.createCommitStatus({ + owner: context.repo.owner, + repo: context.repo.repo, + sha: process.env.PR_SHA, + state: success ? 'success' : 'failure', + context: 'Chromatic', + description: success + ? 'Visual tests passed' + : 'Visual tests failed', + target_url: process.env.BUILD_URL || undefined + }); diff --git a/.github/workflows/cleanup-e2e-reports.yml b/.github/workflows/cleanup-e2e-reports.yml new file mode 100644 index 000000000..a5dfe4188 --- /dev/null +++ b/.github/workflows/cleanup-e2e-reports.yml @@ -0,0 +1,183 @@ +name: Cleanup E2E Reports + +permissions: + contents: read + pages: write + id-token: write + +on: + schedule: + # Run weekly on Sunday at 3am UTC + - cron: '0 3 * * 0' + workflow_dispatch: + inputs: + dry_run: + description: 'Dry run (do not delete anything)' + type: boolean + default: false + max_age_days: + description: 'Delete reports older than this many days (0 = use PR status only)' + type: number + default: 0 + +jobs: + cleanup: + runs-on: ubuntu-latest + concurrency: + group: e2e-pages-deploy + cancel-in-progress: false + environment: + name: github-pages + + steps: + - name: Fetch GitHub Pages content + uses: actions/checkout@v4 + with: + ref: gh-pages + path: pages + continue-on-error: true + + - name: Check if gh-pages exists + id: check + run: | + if [ ! -d pages ] || [ ! "$(ls -A pages 2>/dev/null)" ]; then + echo "No gh-pages content found, nothing to clean up" + echo "has_content=false" >> $GITHUB_OUTPUT + else + echo "has_content=true" >> $GITHUB_OUTPUT + fi + + - name: Cleanup old reports + if: steps.check.outputs.has_content == 'true' + env: + GH_TOKEN: ${{ github.token }} + DRY_RUN: ${{ inputs.dry_run || 'false' }} + MAX_AGE_DAYS: ${{ inputs.max_age_days || '0' }} + run: | + cd pages + rm -rf .git + + cleaned=0 + kept=0 + + for dir in pr-*/; do + if [ ! -d "$dir" ]; then + continue + fi + + pr_num=$(echo "$dir" | sed 's/pr-\([0-9]*\)\//\1/') + echo "Checking PR #$pr_num..." + + # Get PR state from GitHub API + pr_state=$(gh pr view "$pr_num" --repo "${{ github.repository }}" --json state --jq '.state' 2>/dev/null || echo "UNKNOWN") + + should_delete=false + reason="" + + # Check if PR is closed/merged + if [ "$pr_state" = "CLOSED" ] || [ "$pr_state" = "MERGED" ]; then + should_delete=true + reason="PR is $pr_state" + fi + + # Check age if max_age_days is set + if [ "$MAX_AGE_DAYS" != "0" ] && [ -f "$dir/index.html" ]; then + file_age_days=$(( ($(date +%s) - $(stat -c %Y "$dir/index.html" 2>/dev/null || stat -f %m "$dir/index.html" 2>/dev/null || echo 0)) / 86400 )) + if [ "$file_age_days" -gt "$MAX_AGE_DAYS" ]; then + should_delete=true + reason="Report is $file_age_days days old (max: $MAX_AGE_DAYS)" + fi + fi + + if [ "$should_delete" = "true" ]; then + if [ "$DRY_RUN" = "true" ]; then + echo " [DRY RUN] Would delete $dir ($reason)" + else + echo " Deleting $dir ($reason)" + rm -rf "$dir" + fi + cleaned=$((cleaned + 1)) + else + echo " Keeping $dir (PR state: $pr_state)" + kept=$((kept + 1)) + fi + done + + echo "" + echo "Summary: $cleaned reports to clean up, $kept reports kept" + echo "cleaned=$cleaned" >> $GITHUB_OUTPUT + echo "kept=$kept" >> $GITHUB_OUTPUT + + - name: Regenerate index page + if: steps.check.outputs.has_content == 'true' && inputs.dry_run != true + run: | + cd pages + + # Check if any pr-* directories remain + if ! ls -d pr-*/ 2>/dev/null | head -1 > /dev/null; then + # No reports left, create empty index + cat > index.html << 'EOF' + + + + + + Fresco E2E Test Reports + + + +

🎭 Fresco E2E Test Reports

+

No failed test reports available.

+ + + EOF + else + # Generate index with remaining reports + echo "
    " > index.html.tmp + for dir in pr-*/; do + if [ -d "$dir" ]; then + pr_num=$(basename "$dir") + echo "
  • $pr_num
  • " >> index.html.tmp + fi + done + echo "
" >> index.html.tmp + + cat > index.html << EOF + + + + + + Fresco E2E Test Reports + + + +

🎭 Fresco E2E Test Reports

+

Failed test reports by PR:

+ $(cat index.html.tmp) + + + EOF + rm index.html.tmp + fi + + - name: Upload Pages artifact + if: steps.check.outputs.has_content == 'true' && inputs.dry_run != true + uses: actions/upload-pages-artifact@v3 + with: + path: pages/ + + - name: Deploy to GitHub Pages + if: steps.check.outputs.has_content == 'true' && inputs.dry_run != true + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/deploy-storybook.yaml b/.github/workflows/deploy-storybook.yaml deleted file mode 100644 index 145c14586..000000000 --- a/.github/workflows/deploy-storybook.yaml +++ /dev/null @@ -1,64 +0,0 @@ -name: Deploy Storybook - -on: - push: - branches: - - main - pull_request: - branches: - - main - - 'v*' - workflow_dispatch: - -permissions: - contents: read - pull-requests: write - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Get pnpm store directory - id: pnpm-cache - run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Build Storybook - run: pnpm build-storybook - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v3 - with: - publish-dir: './storybook-static' - production-branch: main - production-deploy: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} - github-token: ${{ secrets.GITHUB_TOKEN }} - deploy-message: "Deploy from GitHub Actions - ${{ github.event.head_commit.message || github.event.pull_request.title }}" - enable-pull-request-comment: true - enable-commit-comment: false - overwrites-pull-request-comment: true - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_STORYBOOK_SITE_ID }} - timeout-minutes: 5 \ No newline at end of file diff --git a/.github/workflows/docker-build-pr.yml b/.github/workflows/docker-build-pr.yml deleted file mode 100644 index 35afc748e..000000000 --- a/.github/workflows/docker-build-pr.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Build Docker Image (PR) - -on: - # Allow other workflows to call this one - workflow_call: - inputs: - disable-image-optimization: - description: 'Disable Next.js image optimization for testing' - type: boolean - default: false - outputs: - artifact-name: - description: 'Name of the uploaded Docker image artifact' - value: ${{ jobs.build.outputs.artifact-name }} - image-name: - description: 'Name to use when loading the image' - value: ${{ jobs.build.outputs.image-name }} - - # Also run directly on PRs (all branches) - pull_request: - -permissions: - contents: read - -env: - IMAGE_NAME: fresco-pr - -jobs: - build: - runs-on: ubuntu-latest - - outputs: - artifact-name: ${{ steps.artifact-info.outputs.name }} - image-name: ${{ env.IMAGE_NAME }} - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Generate artifact name - id: artifact-info - run: | - # Use run_id for unique artifact names (works for both PR and workflow_call) - echo "name=docker-image-${{ github.run_id }}" >> $GITHUB_OUTPUT - - - name: Build Docker image - uses: docker/build-push-action@v6 - with: - context: . - platforms: linux/amd64 - push: false - tags: ${{ env.IMAGE_NAME }}:latest - cache-from: type=gha - cache-to: type=gha,mode=max - outputs: type=docker,dest=/tmp/image.tar - build-args: | - DISABLE_IMAGE_OPTIMIZATION=${{ inputs.disable-image-optimization || false }} - - - name: Upload Docker image artifact - uses: actions/upload-artifact@v4 - with: - name: ${{ steps.artifact-info.outputs.name }} - path: /tmp/image.tar - retention-days: 7 - compression-level: 1 # Minimal compression for faster upload/download diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 79be69bbc..70915c927 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -2,129 +2,122 @@ name: E2E Tests permissions: contents: read + pages: write + id-token: write + pull-requests: write on: push: - branches: [main, next] + branches: [main] pull_request: + workflow_dispatch: -jobs: - # Build Docker image for PRs - calls reusable workflow - build-image-pr: - if: github.event_name == 'pull_request' - uses: ./.github/workflows/docker-build-pr.yml - with: - disable-image-optimization: true +concurrency: + group: e2e-${{ github.ref }} + cancel-in-progress: true - # Build Docker image for push events - builds inline - build-image-push: - if: github.event_name == 'push' +jobs: + e2e: runs-on: ubuntu-latest + timeout-minutes: 45 outputs: - artifact-name: docker-image-push-${{ github.run_id }} - image-name: fresco + outcome: ${{ steps.tests.outcome }} + slug: ${{ steps.meta.outputs.slug }} steps: - - name: Checkout repository - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + + - name: Compute branch slug + id: meta + env: + REF: ${{ github.head_ref || github.ref_name }} + run: | + SLUG=$(printf '%s' "$REF" | tr '/' '-' | tr '[:upper:]' '[:lower:]') + printf 'slug=%s\n' "$SLUG" >> "$GITHUB_OUTPUT" - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + - name: Run E2E tests + id: tests + continue-on-error: true + run: ./scripts/run-e2e.sh - - name: Build Docker image - uses: docker/build-push-action@v6 - with: - context: . - platforms: linux/amd64 - push: false - tags: fresco:test - cache-from: type=gha - cache-to: type=gha,mode=max - outputs: type=docker,dest=/tmp/image.tar - build-args: | - DISABLE_IMAGE_OPTIMIZATION=true + - name: Reclaim artifact ownership + if: always() + run: sudo chown -R "$(id -u):$(id -g)" tests/e2e/playwright-report tests/e2e/test-results 2>/dev/null || true - - name: Upload Docker image artifact + - name: Upload report + if: always() uses: actions/upload-artifact@v4 with: - name: docker-image-push-${{ github.run_id }} - path: /tmp/image.tar - retention-days: 1 - compression-level: 1 + name: playwright-report + path: tests/e2e/playwright-report/ + if-no-files-found: warn + retention-days: 14 - # Run E2E tests using whichever image was built - e2e: + deploy-report: + if: always() + needs: e2e runs-on: ubuntu-latest - timeout-minutes: 30 - needs: [build-image-pr, build-image-push] - if: always() && (needs.build-image-pr.result == 'success' || needs.build-image-push.result == 'success') - + concurrency: + group: e2e-pages-deploy + cancel-in-progress: false + environment: + name: github-pages + url: https://complexdatacollective.github.io/Fresco/${{ needs.e2e.outputs.slug }}/ steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version-file: '.nvmrc' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Get pnpm store directory - id: pnpm-cache - run: echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Install Playwright browsers - run: pnpm playwright install --with-deps - - - name: Download Docker image (PR) - if: github.event_name == 'pull_request' + - name: Download new report uses: actions/download-artifact@v4 with: - name: ${{ needs.build-image-pr.outputs.artifact-name }} - path: /tmp + name: playwright-report + path: new-report - - name: Download Docker image (Push) - if: github.event_name == 'push' - uses: actions/download-artifact@v4 + - name: Fetch existing pages content + uses: actions/checkout@v4 with: - name: ${{ needs.build-image-push.outputs.artifact-name }} - path: /tmp - - - name: Load Docker image - run: docker load --input /tmp/image.tar + ref: gh-pages + path: existing-pages + continue-on-error: true - - name: Run E2E tests - run: pnpm test:e2e + - name: Merge report into branch subdirectory env: - CI: true - TEST_IMAGE_NAME: ${{ github.event_name == 'pull_request' && format('{0}:latest', needs.build-image-pr.outputs.image-name) || 'fresco:test' }} - - - name: Upload test results - if: always() - uses: actions/upload-artifact@v4 + SLUG: ${{ needs.e2e.outputs.slug }} + run: | + mkdir -p merged + if [ -d existing-pages ] && [ "$(ls -A existing-pages 2>/dev/null)" ]; then + cp -r existing-pages/. merged/ + rm -rf merged/.git + fi + rm -rf "merged/$SLUG" + mkdir -p "merged/$SLUG" + cp -r new-report/. "merged/$SLUG/" + + - uses: actions/upload-pages-artifact@v3 with: - name: playwright-report - path: | - tests/e2e/playwright-report/ - tests/e2e/test-results/ - retention-days: 30 + path: merged/ - - name: Upload test videos - if: failure() - uses: actions/upload-artifact@v4 + - uses: actions/deploy-pages@v4 + + - name: Comment on PR + if: github.event_name == 'pull_request' + uses: actions/github-script@v7 + env: + SLUG: ${{ needs.e2e.outputs.slug }} + OUTCOME: ${{ needs.e2e.outputs.outcome }} with: - name: test-videos - path: tests/e2e/test-results/**/*.webm - retention-days: 7 + script: | + const { SLUG, OUTCOME } = process.env; + const url = `https://complexdatacollective.github.io/Fresco/${SLUG}/`; + const emoji = OUTCOME === 'success' ? '✅' : '❌'; + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body: `${emoji} E2E tests — **${OUTCOME}**\n\n📊 [View report](${url})`, + }); + + result: + if: always() + needs: e2e + runs-on: ubuntu-latest + steps: + - name: Fail workflow if tests failed + if: needs.e2e.outputs.outcome != 'success' + run: exit 1 diff --git a/.github/workflows/netlify-cleanup-preview.yml b/.github/workflows/netlify-cleanup-preview.yml new file mode 100644 index 000000000..c31179850 --- /dev/null +++ b/.github/workflows/netlify-cleanup-preview.yml @@ -0,0 +1,26 @@ +name: Cleanup Deploy Preview +on: + pull_request: + types: [closed] +permissions: + contents: read +jobs: + delete-preview: + runs-on: ubuntu-latest + env: + BRANCH_NAME: ${{ github.event.pull_request.head.ref }} + PR_NUMBER: ${{ github.event.number }} + steps: + - uses: oven-sh/setup-bun@v2 + - name: Delete Neon Branch + env: + NEON_API_KEY: ${{ secrets.NEON_API_KEY }} + run: bunx neonctl branches delete "preview/pr-$PR_NUMBER-$BRANCH_NAME" --project-id ${{ vars.NEON_PROJECT_ID }} + + - name: Remove branch-scoped Netlify env vars + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + run: | + bunx netlify-cli env:unset DATABASE_URL --context "branch:$BRANCH_NAME" + bunx netlify-cli env:unset DATABASE_URL_UNPOOLED --context "branch:$BRANCH_NAME" diff --git a/.github/workflows/netlify-deploy-preview.yml b/.github/workflows/netlify-deploy-preview.yml new file mode 100644 index 000000000..ce4024e73 --- /dev/null +++ b/.github/workflows/netlify-deploy-preview.yml @@ -0,0 +1,115 @@ +name: Deploy Preview + +on: + pull_request: + +permissions: + contents: read + pull-requests: write + +jobs: + deploy-preview: + runs-on: ubuntu-latest + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + steps: + - name: Get branch name + id: branch-name + uses: tj-actions/branch-names@v9 + + - name: Create Neon Branch + id: create-branch + uses: neondatabase/create-branch-action@v6 + with: + project_id: ${{ vars.NEON_PROJECT_ID }} + branch_name: preview/pr-${{ github.event.number }}-${{ steps.branch-name.outputs.current_branch }} + api_key: ${{ secrets.NEON_API_KEY }} + + - name: Setup + uses: complexdatacollective/github-actions/setup-pnpm@v1 + + - name: Install Netlify CLI + run: pnpm add -g netlify-cli + + - name: Get branch preview URL + id: branch-preview + env: + BRANCH: ${{ steps.branch-name.outputs.current_branch }} + run: | + SUBDOMAIN=$(curl -s -H "Authorization: Bearer $NETLIFY_AUTH_TOKEN" \ + "https://api.netlify.com/api/v1/sites/$NETLIFY_SITE_ID" | jq -r '.name') + BRANCH_SLUG=$(echo "$BRANCH" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9-]/-/g' | sed 's/--*/-/g' | sed 's/^-//;s/-$//') + echo "url=https://${BRANCH_SLUG}--${SUBDOMAIN}.netlify.app" >> "$GITHUB_OUTPUT" + + - name: Write .env file + env: + NEON_DB_URL: ${{ steps.create-branch.outputs.db_url }} + NEON_DB_URL_POOLED: ${{ steps.create-branch.outputs.db_url_pooled }} + run: | + # Pull down the environment variables for the deploy-preview context + netlify env:list --context deploy-preview --plain >> .env + # Add the database connection URLs to the .env file + echo "DATABASE_URL_UNPOOLED=$NEON_DB_URL" >> .env + echo "DATABASE_URL=$NEON_DB_URL_POOLED" >> .env + + - name: Run Migrations + run: npx tsx scripts/setup-database.ts + + - name: Run Initialization + run: npx tsx scripts/initialize.ts + + - name: Set Netlify runtime environment variables + env: + BRANCH: ${{ steps.branch-name.outputs.current_branch }} + DB_URL_POOLED: ${{ steps.create-branch.outputs.db_url_pooled }} + DB_URL: ${{ steps.create-branch.outputs.db_url }} + run: | + netlify env:set DATABASE_URL "$DB_URL_POOLED" --context "branch:$BRANCH" + netlify env:set DATABASE_URL_UNPOOLED "$DB_URL" --context "branch:$BRANCH" + + - name: Check for existing deploy comment + id: find-comment + if: success() + env: + GH_TOKEN: ${{ github.token }} + PR_NUMBER: ${{ github.event.pull_request.number }} + REPO: ${{ github.repository }} + run: | + FOUND=$(gh api "repos/$REPO/issues/$PR_NUMBER/comments" \ + --jq '[.[] | select(.body | contains(""))] | length') + if [ "$FOUND" -gt 0 ]; then + echo "exists=true" >> "$GITHUB_OUTPUT" + else + echo "exists=false" >> "$GITHUB_OUTPUT" + fi + + - name: Comment on Pull Request + if: success() && steps.find-comment.outputs.exists != 'true' + uses: thollander/actions-comment-pull-request@v2 + with: + comment_tag: deploy-preview + message: | + | Resource | Link | + |----------|------| + | Branch Preview 🌐 | ${{ steps.branch-preview.outputs.url }} | + | Neon branch 🐘 | https://console.neon.tech/app/projects/${{ vars.NEON_PROJECT_ID }}/branches/${{ steps.create-branch.outputs.branch_id }} | + + - name: Remove failure comment on success + if: success() + continue-on-error: true + uses: thollander/actions-comment-pull-request@v2 + with: + comment_tag: deploy-preview-failure + mode: delete + message: '' + + - name: Comment on failure + if: failure() + uses: thollander/actions-comment-pull-request@v2 + with: + comment_tag: deploy-preview-failure + message: | + ⚠️ **Branch preview setup failed** + + Check the [workflow run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details. diff --git a/.gitignore b/.gitignore index b0861c59f..81b6d4721 100644 --- a/.gitignore +++ b/.gitignore @@ -47,3 +47,36 @@ yarn-error.log* *storybook.log storybook-static + +# e2e testing +/tests/e2e/playwright-report/ +/tests/e2e/test-results/ +/tests/e2e/test-results-*/ +/tests/e2e/screenshots/ +/tests/e2e/.auth/ +/tests/e2e/.context-data.json +/tests/e2e/.context/ +/tests/e2e/.db-snapshots/ +/.e2e-assets/ +/playwright-report/ +/test-results/ +/playwright/.cache/ + +# Serena +.serena + +# playwright MCP +./playwright-mcp + +.pnpm-store +.pnpm-docker-store + +# Local Netlify folder +.netlify + +# Git worktrees +.worktrees + +# plans +/docs/plans +/docs/superpowers diff --git a/.prettierrc b/.prettierrc index 30dc564a1..0f9304eab 100644 --- a/.prettierrc +++ b/.prettierrc @@ -2,5 +2,8 @@ "plugins": ["prettier-plugin-tailwindcss"], "printWidth": 80, "quoteProps": "consistent", - "singleQuote": true + "singleQuote": true, + "tabWidth": 2, + "useTabs": false, + "tailwindFunctions": ["cva", "cx"] } diff --git a/.serena/.gitignore b/.serena/.gitignore deleted file mode 100644 index 14d86ad62..000000000 --- a/.serena/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/cache diff --git a/.serena/memories/code_style_conventions.md b/.serena/memories/code_style_conventions.md deleted file mode 100644 index c687f2fbb..000000000 --- a/.serena/memories/code_style_conventions.md +++ /dev/null @@ -1,33 +0,0 @@ -# Code Style and Conventions - -## TypeScript Configuration -- Strict mode enabled -- noUncheckedIndexedAccess enabled -- ESModule imports/exports -- Path mapping with `~/*` for project root - -## ESLint Rules -- TypeScript strict rules enabled -- Consistent type definitions (prefer `type` over `interface`) -- Type imports preferred with inline syntax -- No unused variables (args starting with `_` ignored) -- No console statements (use proper logging) -- No direct process.env access - -## Code Style -- **Prettier**: Single quotes, 80 character line width, Tailwind CSS plugin -- **File Extensions**: `.tsx` for React components, `.ts` for utilities -- **Import Style**: Type imports inline, consistent type imports -- **Naming**: camelCase for variables/functions, PascalCase for components - -## Component Structure -- React functional components with TypeScript -- Props typed with explicit interfaces/types -- Default exports for pages and main components -- Named exports for utilities and hooks - -## Database -- Prisma ORM with PostgreSQL -- cuid() for IDs -- Proper indexing on foreign keys -- Json fields for complex data (protocols, networks) \ No newline at end of file diff --git a/.serena/memories/codebase_structure.md b/.serena/memories/codebase_structure.md deleted file mode 100644 index b8b348adb..000000000 --- a/.serena/memories/codebase_structure.md +++ /dev/null @@ -1,40 +0,0 @@ -# Codebase Structure - -## Main Directories - -### `/app` - Next.js App Router -- **`(blobs)/`** - Setup and authentication pages -- **`(interview)/`** - Interview interface and routing -- **`api/`** - API routes (analytics, uploadthing) -- **`dashboard/`** - Admin dashboard pages and components - -### `/components` - Shared UI Components -- **`ui/`** - Base UI components (shadcn/ui based) -- **`data-table/`** - Data table components -- **`layout/`** - Layout components - -### `/lib` - Core Libraries -- **`interviewer/`** - Network Canvas interview engine - - `behaviors/` - Drag & drop, form behaviors - - `components/` - Interview UI components - - `containers/` - Interface containers - - `ducks/` - Redux state management -- **`network-exporters/`** - Data export functionality -- **`ui/`** - UI library components - -### `/actions` - Server Actions -Server-side functions for data operations - -### `/queries` - Database Queries -Prisma-based data fetching functions - -### `/schemas` - Validation Schemas -Zod schemas for data validation - -### `/utils` - Utility Functions -Helper functions and utilities - -## Key Files -- **`prisma/schema.prisma`** - Database schema -- **`env.js`** - Environment validation -- **`fresco.config.ts`** - Application configuration \ No newline at end of file diff --git a/.serena/memories/project_overview.md b/.serena/memories/project_overview.md deleted file mode 100644 index 6b9f6cb0f..000000000 --- a/.serena/memories/project_overview.md +++ /dev/null @@ -1,23 +0,0 @@ -# Fresco Project Overview - -## Purpose -Fresco brings Network Canvas interviews to the web browser. It's a pilot project that provides a new way to conduct network interviews without adding new features to Network Canvas. - -## Tech Stack -- **Framework**: Next.js 14 with TypeScript -- **Database**: PostgreSQL with Prisma ORM -- **Authentication**: Lucia Auth -- **UI**: Tailwind CSS with Radix UI components -- **State Management**: Redux Toolkit for interviewer components -- **File Uploads**: UploadThing -- **Testing**: Vitest with React Testing Library -- **E2E Testing**: Playwright -- **Package Manager**: pnpm - -## Key Features -- Web-based network interviews -- Protocol upload and management -- Participant management -- Interview management with export capabilities -- Dashboard for administrators -- Real-time interview interface \ No newline at end of file diff --git a/.serena/memories/suggested_commands.md b/.serena/memories/suggested_commands.md deleted file mode 100644 index 1dc29f7f5..000000000 --- a/.serena/memories/suggested_commands.md +++ /dev/null @@ -1,31 +0,0 @@ -# Suggested Development Commands - -## Development -- `pnpm dev` - Start development server (includes Docker database setup) -- `pnpm build` - Build the application -- `pnpm start` - Start production server - -## Code Quality -- `pnpm lint` - Run ESLint (with env validation skipped) -- `pnpm ts-lint` - Run TypeScript type checking -- `pnpm ts-lint:watch` - Run TypeScript type checking in watch mode - -## Testing -- `pnpm test` - Run Vitest tests -- `pnpm load-test` - Run load testing with K6 - -## Database -- `npx prisma generate` - Generate Prisma client -- `npx prisma db push` - Push schema changes to database -- `npx prisma studio` - Open Prisma Studio - -## Utilities -- `pnpm knip` - Check for unused dependencies and exports -- `npx prettier --write .` - Format code with Prettier - -## System Commands (macOS) -- `ls` - List directory contents -- `cd` - Change directory -- `grep` - Search text patterns -- `find` - Find files and directories -- `git` - Git version control \ No newline at end of file diff --git a/.serena/memories/task_completion_checklist.md b/.serena/memories/task_completion_checklist.md deleted file mode 100644 index 04a729b2b..000000000 --- a/.serena/memories/task_completion_checklist.md +++ /dev/null @@ -1,39 +0,0 @@ -# Task Completion Checklist - -When completing any coding task, always run these commands in order: - -## 1. Type Checking -```bash -pnpm ts-lint -``` -Fix any TypeScript errors before proceeding. - -## 2. Linting -```bash -pnpm lint --fix -``` -This will automatically fix many ESLint issues. Fix any remaining issues manually. - -## 3. Code Formatting -```bash -npx prettier --write . -``` -Format all code according to project standards. - -## 4. Testing (if applicable) -```bash -pnpm test -``` -Run tests to ensure functionality is working correctly. - -## 5. Build Verification -```bash -pnpm build -``` -Verify the application builds successfully. - -## Additional Checks -- Ensure no `console.log` statements are left in production code -- Verify proper TypeScript types are used -- Check that imports use the `~/` path mapping where appropriate -- Ensure proper error handling is in place \ No newline at end of file diff --git a/.serena/project.yml b/.serena/project.yml deleted file mode 100644 index eee8c06a0..000000000 --- a/.serena/project.yml +++ /dev/null @@ -1,68 +0,0 @@ -# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby) -# * For C, use cpp -# * For JavaScript, use typescript -# Special requirements: -# * csharp: Requires the presence of a .sln file in the project folder. -language: typescript - -# whether to use the project's gitignore file to ignore files -# Added on 2025-04-07 -ignore_all_files_in_gitignore: true -# list of additional paths to ignore -# same syntax as gitignore, so you can use * and ** -# Was previously called `ignored_dirs`, please update your config if you are using that. -# Added (renamed)on 2025-04-07 -ignored_paths: [] - -# whether the project is in read-only mode -# If set to true, all editing tools will be disabled and attempts to use them will result in an error -# Added on 2025-04-18 -read_only: false - - -# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details. -# Below is the complete list of tools for convenience. -# To make sure you have the latest list of tools, and to view their descriptions, -# execute `uv run scripts/print_tool_overview.py`. -# -# * `activate_project`: Activates a project by name. -# * `check_onboarding_performed`: Checks whether project onboarding was already performed. -# * `create_text_file`: Creates/overwrites a file in the project directory. -# * `delete_lines`: Deletes a range of lines within a file. -# * `delete_memory`: Deletes a memory from Serena's project-specific memory store. -# * `execute_shell_command`: Executes a shell command. -# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced. -# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type). -# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type). -# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes. -# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file or directory. -# * `initial_instructions`: Gets the initial instructions for the current project. -# Should only be used in settings where the system prompt cannot be set, -# e.g. in clients you have no control over, like Claude Desktop. -# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol. -# * `insert_at_line`: Inserts content at a given line in a file. -# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol. -# * `list_dir`: Lists files and directories in the given directory (optionally with recursion). -# * `list_memories`: Lists memories in Serena's project-specific memory store. -# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building). -# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context). -# * `read_file`: Reads a file within the project directory. -# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store. -# * `remove_project`: Removes a project from the Serena configuration. -# * `replace_lines`: Replaces a range of lines within a file with new content. -# * `replace_symbol_body`: Replaces the full definition of a symbol. -# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen. -# * `search_for_pattern`: Performs a search for a pattern in the project. -# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase. -# * `switch_modes`: Activates modes by providing a list of their names -# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information. -# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task. -# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed. -# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store. -excluded_tools: [] - -# initial prompt for the project. It will always be given to the LLM upon activating the project -# (contrary to the memories, which are loaded on demand). -initial_prompt: "" - -project_name: "Fresco" diff --git a/.storybook/StoryInterviewShell.tsx b/.storybook/StoryInterviewShell.tsx new file mode 100644 index 000000000..3f346ffa8 --- /dev/null +++ b/.storybook/StoryInterviewShell.tsx @@ -0,0 +1,301 @@ +'use client'; +'use no memo'; + +import { type Middleware } from '@reduxjs/toolkit'; +import { AnimatePresence, motion } from 'motion/react'; +import { + type ElementType, + useCallback, + useLayoutEffect, + useMemo, + useRef, + useState, +} from 'react'; +import { Provider, useDispatch, useSelector } from 'react-redux'; +import SuperJSON from 'superjson'; +import DialogProvider from '~/lib/dialogs/DialogProvider'; +import useMediaQuery from '~/hooks/useMediaQuery'; +import { InterviewToastProvider } from '~/lib/interviewer/components/InterviewToast'; +import Navigation from '~/lib/interviewer/components/Navigation'; +import StageErrorBoundary from '~/lib/interviewer/components/StageErrorBoundary'; +import { StageMetadataProvider } from '~/lib/interviewer/contexts/StageMetadataContext'; +import { + updatePrompt, + updateStage, +} from '~/lib/interviewer/ducks/modules/session'; +import useReadyForNextStage from '~/lib/interviewer/hooks/useReadyForNextStage'; +import getInterface from '~/lib/interviewer/Interfaces'; +import { + getCurrentStage, + getNavigationInfo, + getPromptCount, + getStageCount, +} from '~/lib/interviewer/selectors/session'; +import { getNavigableStages } from '~/lib/interviewer/selectors/skip-logic'; +import { calculateProgress } from '~/lib/interviewer/selectors/utils'; +import { store } from '~/lib/interviewer/store'; +import { + type BeforeNextFunction, + type Direction, + type RegisterBeforeNext, + type StageProps, +} from '~/lib/interviewer/types'; +import { type GetInterviewByIdQuery } from '~/queries/interviews'; +import { cx } from '~/utils/cva'; + +const variants = { + initial: { opacity: 0 }, + animate: { opacity: 1, transition: { when: 'beforeChildren' } }, + exit: { opacity: 0, transition: { when: 'afterChildren' } }, +}; + +function StoryInterview() { + const dispatch = useDispatch(); + + const [forceNavigationDisabled, setForceNavigationDisabled] = useState(false); + const [showStage, setShowStage] = useState(false); + const pendingStepRef = useRef(null); + const isTransitioningRef = useRef(false); + + useLayoutEffect(() => { + setShowStage(true); + }, []); + + const stage = useSelector(getCurrentStage); + const CurrentInterface = stage + ? (getInterface(stage.type) as ElementType) + : null; + + const { isReady: isReadyForNextStage } = useReadyForNextStage(); + const { currentStep, isLastPrompt, isFirstPrompt, promptIndex } = + useSelector(getNavigationInfo); + const { nextValidStageIndex, previousValidStageIndex } = + useSelector(getNavigableStages); + const stageCount = useSelector(getStageCount); + const promptCount = useSelector(getPromptCount); + + const nextValidStageIndexRef = useRef(nextValidStageIndex); + const previousValidStageIndexRef = useRef(previousValidStageIndex); + nextValidStageIndexRef.current = nextValidStageIndex; + previousValidStageIndexRef.current = previousValidStageIndex; + + const [progress, setProgress] = useState( + calculateProgress(currentStep, stageCount, promptIndex, promptCount), + ); + + const beforeNextHandlers = useRef(new Map()); + const registerBeforeNext: RegisterBeforeNext = useCallback( + ( + ...args: [BeforeNextFunction | null] | [string, BeforeNextFunction | null] + ) => { + if (args.length === 1) { + const [fn] = args; + if (fn === null) { + beforeNextHandlers.current.clear(); + } else { + beforeNextHandlers.current.set('default', fn); + } + } else { + const [key, fn] = args; + if (fn === null) { + beforeNextHandlers.current.delete(key); + } else { + beforeNextHandlers.current.set(key, fn); + } + } + }, + [], + ) as RegisterBeforeNext; + + const canNavigate = async (direction: Direction) => { + const handlers = beforeNextHandlers.current; + if (handlers.size === 0) return true; + + let hasForce = false; + for (const fn of handlers.values()) { + const result = await fn(direction); + if (result === false) return false; + if (result === 'FORCE') hasForce = true; + } + return hasForce ? 'FORCE' : true; + }; + + const navigateToStep = useCallback( + (targetStep: number) => { + setProgress(calculateProgress(targetStep, stageCount, 0, promptCount)); + beforeNextHandlers.current.clear(); + pendingStepRef.current = targetStep; + isTransitioningRef.current = true; + setShowStage(false); + }, + [stageCount, promptCount], + ); + + const moveForward = useCallback(async () => { + if (isTransitioningRef.current) return; + setForceNavigationDisabled(true); + + const stageAllowsNavigation = await canNavigate('forwards'); + if (stageAllowsNavigation) { + if (stageAllowsNavigation !== 'FORCE' && !isLastPrompt) { + dispatch(updatePrompt(promptIndex + 1)); + } else { + navigateToStep(nextValidStageIndexRef.current); + } + } + + setForceNavigationDisabled(false); + }, [dispatch, isLastPrompt, promptIndex, navigateToStep]); + + const moveBackward = useCallback(async () => { + if (isTransitioningRef.current) return; + setForceNavigationDisabled(true); + + const stageAllowsNavigation = await canNavigate('backwards'); + if (stageAllowsNavigation) { + if (stageAllowsNavigation !== 'FORCE' && !isFirstPrompt) { + dispatch(updatePrompt(promptIndex - 1)); + } else { + navigateToStep(previousValidStageIndexRef.current); + } + } + + setForceNavigationDisabled(false); + }, [dispatch, isFirstPrompt, promptIndex, navigateToStep]); + + const getNavigationHelpers = useCallback( + () => ({ moveForward, moveBackward }), + [moveForward, moveBackward], + ); + + const handleExitComplete = useCallback(() => { + const target = pendingStepRef.current; + if (target === null) return; + + beforeNextHandlers.current.clear(); + dispatch(updateStage(target)); + pendingStepRef.current = null; + setShowStage(true); + isTransitioningRef.current = false; + }, [dispatch]); + + const forwardButtonRef = useRef(null); + const backButtonRef = useRef(null); + const isPortraitAspectRatio = useMediaQuery('(max-aspect-ratio: 3/4)'); + const navigationOrientation = isPortraitAspectRatio + ? 'horizontal' + : 'vertical'; + + const { canMoveForward, canMoveBackward } = useSelector(getNavigationInfo); + + return ( +
+ + + + {showStage && stage && ( + +
+ + {CurrentInterface && ( + + )} + +
+
+ )} +
+
+
+ +
+ ); +} + +const StoryInterviewShell = (props: { + rawPayload: string; + disableSync?: boolean; + onAction?: (action: { type: string; payload?: unknown }) => void; +}) => { + const decodedPayload = useMemo( + () => SuperJSON.parse>(props.rawPayload), + [props.rawPayload], + ); + + const actionMiddleware: Middleware | undefined = useMemo(() => { + if (!props.onAction) return undefined; + const callback = props.onAction; + const middleware: Middleware = () => (next) => (action) => { + const result = next(action); + if ( + typeof action === 'object' && + action !== null && + 'type' in action && + typeof action.type === 'string' + ) { + callback(action as { type: string; payload?: unknown }); + } + return result; + }; + return middleware; + }, [props.onAction]); + + const storeInstance = useMemo( + () => + store(decodedPayload, { + disableSync: props.disableSync, + extraMiddleware: actionMiddleware ? [actionMiddleware] : undefined, + }), + [decodedPayload, props.disableSync, actionMiddleware], + ); + + return ( + + + + + + ); +}; + +export default StoryInterviewShell; diff --git a/.storybook/main.ts b/.storybook/main.ts index ec1e6850e..7a1621ecf 100644 --- a/.storybook/main.ts +++ b/.storybook/main.ts @@ -1,16 +1,30 @@ -import type { StorybookConfig } from '@storybook/nextjs'; +import { defineMain } from '@storybook/nextjs-vite/node'; +import { stubUseServer } from './vite-plugin-stub-use-server.ts'; -const config: StorybookConfig = { - "stories": [ - "../stories/**/*.stories.@(js|jsx|mjs|ts|tsx)" +export default defineMain({ + addons: [ + '@storybook/addon-docs', + '@storybook/addon-a11y', + '@storybook/addon-vitest', + '@chromatic-com/storybook', ], - "addons": [], - "framework": { - "name": "@storybook/nextjs", - "options": {} + framework: { + name: '@storybook/nextjs-vite', + options: { + builder: { + // Customize the Vite builder options here + viteConfigPath: './vitest.config.ts', + }, + }, }, - "staticDirs": [ - "../public" - ] -}; -export default config; \ No newline at end of file + staticDirs: ['../public'], + typescript: { + check: false, + }, + stories: ['../**/*.stories.@(js|jsx|mjs|ts|tsx|mdx)'], + + viteFinal(config) { + config.plugins = [stubUseServer(), ...(config.plugins ?? [])]; + return config; + }, +}); diff --git a/.storybook/preview.ts b/.storybook/preview.ts deleted file mode 100644 index 73e6da9cf..000000000 --- a/.storybook/preview.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { Preview } from '@storybook/nextjs' - -const preview: Preview = { - parameters: { - controls: { - matchers: { - color: /(background|color)$/i, - date: /Date$/i, - }, - }, - }, -}; - -export default preview; \ No newline at end of file diff --git a/.storybook/preview.tsx b/.storybook/preview.tsx new file mode 100644 index 000000000..3d60b0e9e --- /dev/null +++ b/.storybook/preview.tsx @@ -0,0 +1,107 @@ +import addonA11y from '@storybook/addon-a11y'; +import addonDocs from '@storybook/addon-docs'; +import addonVitest from '@storybook/addon-vitest'; +import { definePreview } from '@storybook/nextjs-vite'; +import isChromatic from 'chromatic/isChromatic'; +import { NuqsTestingAdapter } from 'nuqs/adapters/testing'; +import { StrictMode } from 'react'; +import Providers from '../components/Providers'; +import '../styles/globals.css'; +import '../styles/themes/default.css'; +import '../styles/themes/interview.css'; +import { getInitialTheme, globalTypes, withTheme } from './theme-switcher'; + +// @chromatic-com/storybook is not included here because it doesn't export a +// CSF Next compatible preview addon. It only provides server-side preset +// functionality and manager UI, so it's configured in main.ts only. +// See: https://github.com/chromaui/addon-visual-tests/pull/404 + +export default definePreview({ + addons: [addonDocs(), addonA11y(), addonVitest()], + parameters: { + options: { + storySort: { + order: [ + 'Design System', + ['Colors', 'Elevation', 'Type Scale', 'Typography'], + 'UI', + 'Systems', + ['Form', 'Dialogs', 'DragAndDrop'], + 'Interview', + '*', + ], + }, + }, + controls: { + matchers: { + color: /(background|color)$/i, + date: /Date$/i, + }, + }, + a11y: { + // 'todo' - show a11y violations in the test UI only + // 'error' - fail CI on a11y violations + // 'off' - skip a11y checks entirely + test: 'todo', + /** + * base-ui dialog adds focus guards which are picked up by a11y tests + * but are necessary for proper focus management within the dialog, + * and compatible with WCAG guidelines, so we disable this rule here. + */ + config: { + rules: [ + { + id: 'aria-hidden-focus', + selector: '[data-base-ui-focus-guard]', + enabled: false, + }, + ], + }, + }, + }, + + decorators: [ + (Story) => { + // Disable Base UI animations whenever the browser is being driven by + // automation (Playwright in vitest browser mode, or Storybook's + // play-function runner). This makes Base UI dialog open/close flows + // deterministic: they no longer wait on `getAnimations()` so sequences + // like "click Cancel → confirm dialog opens → click Continue editing" + // don't race the form store against CSS animation completion. + // + // Also togglable via `?disableAnimations=1` on the URL for interactive + // debugging of the animation-disabled code path. + // + // Manual browsing has `navigator.webdriver === false`, so interactive + // development still gets the full animations by default. + const disableAnimationsFromAutomation = + typeof navigator !== 'undefined' && navigator.webdriver === true; + const disableAnimations = + disableAnimationsFromAutomation || isChromatic(); + + return ( + // nextjs-vite doesn't seem to pick up the strict mode setting from next config + + {/** + * required by base-ui: https://base-ui.com/react/overview/quick-start#portals + */} +
+ + + +
+
+ ); + }, + withTheme, + ], + + globalTypes, + + initialGlobals: { + theme: getInitialTheme(), + }, +}); diff --git a/.storybook/theme-switcher.tsx b/.storybook/theme-switcher.tsx new file mode 100644 index 000000000..ebacf65e9 --- /dev/null +++ b/.storybook/theme-switcher.tsx @@ -0,0 +1,112 @@ +import type { Decorator } from '@storybook/nextjs-vite'; +import { useLayoutEffect } from 'react'; +import { cx } from '~/utils/cva'; + +const THEME_KEY = 'theme'; +const STORAGE_KEY = 'storybook-theme-preference'; +const INTERVIEW_ATTR = 'data-interview'; + +const themes = { + dashboard: { + name: 'Dashboard', + }, + interview: { + name: 'Interview', + }, +} as const; + +type ThemeKey = keyof typeof themes; + +function getStoredTheme(): ThemeKey | null { + try { + const stored = localStorage.getItem(STORAGE_KEY); + if (stored && stored in themes) { + return stored as ThemeKey; + } + } catch (error) { + // eslint-disable-next-line no-console + console.warn('Failed to read theme from localStorage:', error); + } + return null; +} + +function setStoredTheme(theme: ThemeKey) { + try { + localStorage.setItem(STORAGE_KEY, theme); + } catch (error) { + // eslint-disable-next-line no-console + console.warn('Failed to save theme to localStorage:', error); + } +} + +function ThemeWrapper({ + selectedTheme, + children, +}: { + selectedTheme: ThemeKey; + children: React.ReactNode; +}) { + // Tie the interview theme attribute to a single, predictable DOM node + // (document.body) instead of a React-managed wrapper. Between stories in + // the same Chromatic worker iframe, React reconciliation over wrapper + // divs can leave the attribute in transitional state; toggling body + // directly with a cleanup function makes the write/remove deterministic. + useLayoutEffect(() => { + setStoredTheme(selectedTheme); + if (selectedTheme === 'interview') { + document.body.setAttribute(INTERVIEW_ATTR, ''); + } else { + document.body.removeAttribute(INTERVIEW_ATTR); + } + return () => { + document.body.removeAttribute(INTERVIEW_ATTR); + }; + }, [selectedTheme]); + + const isInterview = selectedTheme === 'interview'; + + return ( +
+ {children} +
+ ); +} + +export const withTheme: Decorator = (Story, context) => { + const selectedTheme = + (context.parameters.forceTheme as ThemeKey) ?? + (context.globals[THEME_KEY] as ThemeKey) ?? + 'dashboard'; + + return ( + + + + ); +}; + +export const globalTypes = { + [THEME_KEY]: { + name: 'Theme', + description: 'Global theme for components', + defaultValue: getStoredTheme() ?? 'dashboard', + toolbar: { + icon: 'paintbrush' as const, + items: Object.entries(themes).map(([key, { name }]) => ({ + value: key, + title: name, + })), + showName: true, + dynamicTitle: true, + }, + }, +}; + +export function getInitialTheme(): ThemeKey { + return getStoredTheme() ?? 'dashboard'; +} diff --git a/.storybook/tsconfig.json b/.storybook/tsconfig.json new file mode 100644 index 000000000..f45d85d0a --- /dev/null +++ b/.storybook/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "allowImportingTsExtensions": true, + "noEmit": true + }, + "include": ["**/*", "../types/**/*.d.ts"] +} diff --git a/.storybook/vite-plugin-stub-use-server.ts b/.storybook/vite-plugin-stub-use-server.ts new file mode 100644 index 000000000..24652d3fb --- /dev/null +++ b/.storybook/vite-plugin-stub-use-server.ts @@ -0,0 +1,48 @@ +const useServerRegex = /^['"]use server['"]/; +const jsExtRegex = /\.[cm]?[jt]sx?$/; + +const exportAsyncFunctionRegex = /^export\s+async\s+function\s+(\w+)/gm; +const exportConstAsyncRegex = /^export\s+const\s+(\w+)\s*=\s*async/gm; +const exportTypeRegex = /^export\s+type\s+(\w+)/gm; + +function getFirstNonEmptyLine(code: string): string { + for (const line of code.split('\n')) { + const trimmed = line.trim(); + if (trimmed !== '') return trimmed; + } + return ''; +} + +export function stubUseServer() { + return { + name: 'stub-use-server', + enforce: 'pre' as const, + + transform(code: string, id: string) { + if (id.includes('node_modules') || !jsExtRegex.test(id)) { + return null; + } + + const firstLine = getFirstNonEmptyLine(code); + if (!useServerRegex.test(firstLine)) { + return null; + } + + const stubs: string[] = [`'use server';`]; + + for (const match of code.matchAll(exportAsyncFunctionRegex)) { + stubs.push(`export async function ${match[1]}() {}`); + } + + for (const match of code.matchAll(exportConstAsyncRegex)) { + stubs.push(`export const ${match[1]} = async () => {};`); + } + + for (const match of code.matchAll(exportTypeRegex)) { + stubs.push(`export type ${match[1]} = never;`); + } + + return { code: stubs.join('\n'), map: null }; + }, + }; +} diff --git a/.vscode/extensions.json b/.vscode/extensions.json index fd1d9bf0c..940260d85 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,5 +1,3 @@ { - "recommendations": [ - "dbaeumer.vscode-eslint" - ] -} \ No newline at end of file + "recommendations": ["dbaeumer.vscode-eslint"] +} diff --git a/.vscode/launch.json b/.vscode/launch.json index f596762db..31ba0c876 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -18,12 +18,8 @@ "type": "node", "request": "launch", "program": "${workspaceFolder}/node_modules/.bin/next", - "runtimeArgs": [ - "--inspect" - ], - "skipFiles": [ - "/**" - ], + "runtimeArgs": ["--inspect"], + "skipFiles": ["/**"], "serverReadyAction": { "action": "debugWithEdge", "killOnServerStop": true, @@ -33,4 +29,4 @@ } } ] -} \ No newline at end of file +} diff --git a/.vscode/settings.json b/.vscode/settings.json index ba732724f..2d3801a51 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,7 +1,12 @@ { - "css.customData": [ - "./.vscode/css-data.json" - ], + "editor.tabSize": 2, + "editor.insertSpaces": true, + "editor.detectIndentation": false, + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.quickSuggestions": { + "strings": "on" + }, + "css.customData": ["./.vscode/css-data.json"], "typescript.tsdk": "node_modules/typescript/lib", "typescript.enablePromptUseWorkspaceTsdk": true, "editor.codeActionsOnSave": { @@ -9,6 +14,15 @@ "source.fixAll": "always", "source.fixAll.eslint": "always", "source.fixAll.typescript": "always", + "source.fixAll.tailwindcss": "always" }, - "editor.formatOnSave": true -} \ No newline at end of file + "editor.formatOnSave": true, + "tailwindCSS.classFunctions": ["cva", "cx"], + // Rule is broken: https://github.com/tailwindlabs/tailwindcss-intellisense/issues/1542 + // Implemented a fixable version using https://github.com/schoero/eslint-plugin-better-tailwindcss + // that can also be selectively disabled via eslint-disable-next-line comments + "tailwindCSS.lint.suggestCanonicalClasses": "ignore", + "files.associations": { + "*.css": "tailwindcss" + } +} diff --git a/CLAUDE.md b/CLAUDE.md index 37a968ca8..ddcaa3927 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -6,7 +6,7 @@ This document provides guidance for AI assistants working with the Fresco codeba Fresco is a web-based interview platform that brings Network Canvas interviews to the browser. It's built with Next.js 14 (App Router), TypeScript, and PostgreSQL. Version 3.0.0. -**Documentation**: https://documentation.networkcanvas.com/en/fresco +**Documentation**: ## Quick Reference @@ -18,7 +18,7 @@ pnpm storybook # Component library at :6006 # Quality Checks pnpm lint # ESLint -pnpm ts-lint # TypeScript type checking +pnpm typecheck # TypeScript type checking pnpm test # Vitest unit tests pnpm knip # Find unused code @@ -75,15 +75,20 @@ styles/ # Global CSS/SCSS ### TypeScript - **Strict mode enabled** with `noUncheckedIndexedAccess` +- **Do not use type assertions (`as`)** to fix type errors unless absolutely necessary. Find the root cause of the typing issue and refactor to resolve it. Type assertions should ALWAYS be confirmed with the user first. - Use `type` for type definitions (not `interface`) - enforced by ESLint - Prefer inline type imports: `import { type Foo } from './bar'` - Unused variables must start with underscore: `_unusedVar` -- Path alias: `~/` maps to project root +- **Always use path aliases** (`~/`) for imports - never use relative paths like `../` or `./` ```typescript -// Correct +// Correct - use path aliases import { type Protocol } from '@prisma/client'; -import { cn } from '~/utils/shadcn'; +import { cx } from '~/utils/cva'; +import { Button } from '~/components/ui/Button'; + +// Incorrect - never use relative paths +// import { Button } from '../components/ui/Button'; // Type definition export type CreateInterview = { @@ -106,6 +111,7 @@ const dbUrl = env.DATABASE_URL; - `no-console` ESLint rule is enforced - Must disable ESLint for intentional logs: + ```typescript // eslint-disable-next-line no-console console.log('Debug info'); @@ -114,17 +120,18 @@ console.log('Debug info'); ### Server Actions Located in `/actions/`. Pattern: + - Mark with `'use server'` directive - Use `requireApiAuth()` for authentication - Return `{ error, data }` pattern -- Use `safeRevalidateTag()` for cache invalidation +- Use `safeUpdateTag()` for cache invalidation (read-your-own-writes) - Track events with `addEvent()` for activity feed ```typescript 'use server'; import { requireApiAuth } from '~/utils/auth'; -import { safeRevalidateTag } from '~/lib/cache'; +import { safeUpdateTag } from '~/lib/cache'; import { prisma } from '~/utils/db'; export async function deleteItem(id: string) { @@ -132,7 +139,7 @@ export async function deleteItem(id: string) { try { const result = await prisma.item.delete({ where: { id } }); - safeRevalidateTag('getItems'); + safeUpdateTag('getItems'); return { error: null, data: result }; } catch (error) { return { error: 'Failed to delete', data: null }; @@ -164,9 +171,11 @@ export default async function DashboardPage() { ### UI Components Using shadcn/ui with Tailwind. Follow the pattern: + - Use `cva` (class-variance-authority) for variants - Use `cn()` utility from `~/utils/shadcn` for class merging - Export component + variants + skeleton when applicable +- **Spread HTML props onto root element** - Components should accept all valid HTML attributes for their root element and spread them. This allows consumers to pass `data-testid`, `aria-*`, event handlers, etc. without the component needing explicit props for each. ```typescript import { cva, type VariantProps } from 'class-variance-authority'; @@ -186,8 +195,19 @@ const buttonVariants = cva('base-classes', { export type ButtonProps = { variant?: VariantProps['variant']; } & React.ButtonHTMLAttributes; + +// Example: spreading props onto root element +const Button = ({ variant, className, ...props }: ButtonProps) => ( + - ); -} diff --git a/app/(blobs)/(setup)/_components/OnboardSteps/ConfigureStorage.tsx b/app/(blobs)/(setup)/_components/OnboardSteps/ConfigureStorage.tsx new file mode 100644 index 000000000..22fd0c395 --- /dev/null +++ b/app/(blobs)/(setup)/_components/OnboardSteps/ConfigureStorage.tsx @@ -0,0 +1,19 @@ +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; +import StorageProviderSelector from '../StorageProviderSelector'; + +export default function ConfigureStorage() { + return ( +
+
+ Configure Storage + + Fresco needs a storage provider for protocol assets and data exports. + Choose between UploadThing (managed service) or an S3-compatible + bucket (self-hosted or cloud). + + +
+
+ ); +} diff --git a/app/(blobs)/(setup)/_components/OnboardSteps/ConnectUploadThing.tsx b/app/(blobs)/(setup)/_components/OnboardSteps/ConnectUploadThing.tsx deleted file mode 100644 index 80cdccd3f..000000000 --- a/app/(blobs)/(setup)/_components/OnboardSteps/ConnectUploadThing.tsx +++ /dev/null @@ -1,59 +0,0 @@ -import { submitUploadThingForm } from '~/actions/appSettings'; -import Link from '~/components/Link'; -import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { UploadThingTokenForm } from '../UploadThingTokenForm'; - -function ConnectUploadThing() { - return ( -
-
- Connect UploadThing - - Fresco uses a third-party service called UploadThing to store media - files, including protocol assets. In order to use this service, you - need to create an account with UploadThing that will allow you to - generate a token that Fresco can use to securely communicate with it. - - - - Click here - {' '} - to visit UploadThing. Create an app and copy and paste your API key - below. - - - Good to know: - - Your UploadThing account is unique to you, meaning that no one else - will have access to the files stored in your instance of Fresco. For - more information about UploadThing, please review the{' '} - - UploadThing Docs - - . - - - - For help, please refer to the{' '} - - deployment guide - {' '} - in the Fresco documentation. - - -
-
- ); -} - -export default ConnectUploadThing; diff --git a/app/(blobs)/(setup)/_components/OnboardSteps/CreateAccount.tsx b/app/(blobs)/(setup)/_components/OnboardSteps/CreateAccount.tsx index 3a832143b..98fe55787 100644 --- a/app/(blobs)/(setup)/_components/OnboardSteps/CreateAccount.tsx +++ b/app/(blobs)/(setup)/_components/OnboardSteps/CreateAccount.tsx @@ -1,27 +1,18 @@ import { SignUpForm } from '~/app/(blobs)/(setup)/_components/SignUpForm'; -import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import Heading from '~/components/typography/Heading'; function CreateAccount() { return ( -
-
- Create an Account - - To use Fresco, you need to set up an administrator account which will - enable to you access the protected parts of the app. Only one - administrator account can be created. - -
- +
+ Create an Admin Account + {/* Important It is not possible to recover the account details if they are lost. Make sure to store the account details in a safe place, such as a password manager. - + */}
); diff --git a/app/(blobs)/(setup)/_components/OnboardSteps/Documentation.tsx b/app/(blobs)/(setup)/_components/OnboardSteps/Documentation.tsx index 28aabc5df..05884cb53 100644 --- a/app/(blobs)/(setup)/_components/OnboardSteps/Documentation.tsx +++ b/app/(blobs)/(setup)/_components/OnboardSteps/Documentation.tsx @@ -1,89 +1,64 @@ -import { createId } from '@paralleldrive/cuid2'; -import { FileText } from 'lucide-react'; -import { redirect } from 'next/navigation'; -import { setAppSetting } from '~/actions/appSettings'; -import Section from '~/components/layout/Section'; -import { Button } from '~/components/ui/Button'; -import SubmitButton from '~/components/ui/SubmitButton'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import trackEvent from '~/lib/analytics'; -import { getInstallationId } from '~/queries/appSettings'; - -function Documentation() { - const handleAppConfigured = async () => { - const installationId = await getInstallationId(); - if (!installationId) { - await setAppSetting('installationId', createId()); - } - await setAppSetting('configured', true); - void trackEvent({ - type: 'AppSetup', - metadata: { - installationId, - }, - }); +'use client'; - redirect('/dashboard'); - }; +import { FileText } from 'lucide-react'; +import { completeSetup } from '~/actions/appSettings'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; +import Button from '~/components/ui/Button'; +export default function Documentation() { return ( -
-
- Documentation +
+
+ Documentation This is the end of the onboarding process. You are now ready to use Fresco! For further help and information, consider using the resources below.
-
-
-
- - About Fresco - - Visit our documentation site to learn more about Fresco. -
- -
-
-
- - Using Fresco - - Read our guide on the basic workflow for using Fresco to conduct - your study. -
- -
+
-
- Go to the dashboard! -
+
); } - -export default Documentation; diff --git a/app/(blobs)/(setup)/_components/OnboardSteps/ManageParticipants.tsx b/app/(blobs)/(setup)/_components/OnboardSteps/ManageParticipants.tsx deleted file mode 100644 index 6d29ec2e0..000000000 --- a/app/(blobs)/(setup)/_components/OnboardSteps/ManageParticipants.tsx +++ /dev/null @@ -1,65 +0,0 @@ -import ImportCSVModal from '~/app/dashboard/participants/_components/ImportCSVModal'; -import AnonymousRecruitmentSwitchClient from '~/components/AnonymousRecruitmentSwitchClient'; -import SettingsSection from '~/components/layout/SettingsSection'; -import LimitInterviewsSwitchClient from '~/components/LimitInterviewsSwitchClient'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import OnboardContinue from '../OnboardContinue'; - -function ManageParticipants({ - allowAnonymousRecruitment, - limitInterviews, -}: { - allowAnonymousRecruitment: boolean; - limitInterviews: boolean; -}) { - return ( -
-
- Configure Participation - - You can now optionally upload a CSV file containing the details of - participants you wish to recruit for your study. You can also choose - to allow anonymous recruitment of participants. Both options can be - configured later from the dashboard. - -
-
- } - > - Upload a CSV file of participants. - - - } - > - - Allow participants to join your study by visiting a URL. - - - - } - > - - Limit each participant to being allowed to complete one interview - per protocol. - - -
-
- -
-
- ); -} - -export default ManageParticipants; diff --git a/app/(blobs)/(setup)/_components/OnboardSteps/UploadProtocol.tsx b/app/(blobs)/(setup)/_components/OnboardSteps/UploadProtocol.tsx index 653d40db6..3d0ff60cb 100644 --- a/app/(blobs)/(setup)/_components/OnboardSteps/UploadProtocol.tsx +++ b/app/(blobs)/(setup)/_components/OnboardSteps/UploadProtocol.tsx @@ -1,9 +1,10 @@ 'use client'; import { parseAsInteger, useQueryState } from 'nuqs'; -import ProtocolUploader from '~/app/dashboard/_components/ProtocolUploader'; +import ProtocolImportDropzone from '~/components/ProtocolImport/ProtocolImportDropzone'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import { useProtocolImport } from '~/hooks/useProtocolImport'; function ConfigureStudy() { const [currentStep, setCurrentStep] = useQueryState( @@ -11,31 +12,28 @@ function ConfigureStudy() { parseAsInteger.withDefault(1), ); + const { importProtocols } = useProtocolImport(); + const handleNextStep = () => { void setCurrentStep(currentStep + 1); }; return ( -
-
- Import Protocols - - If you have already created a Network Canvas protocol ( - .netcanvas) you can import it now. - - - If you don't have a protocol yet, you can upload one later from - the dashboard. - - -
-
- +
+ Import Protocols + + If you have already created a Network Canvas protocol ( + .netcanvas) you can import it now. + + + If you don't have a protocol yet, you can upload one later from the + dashboard. + + +
+
); diff --git a/app/(blobs)/(setup)/_components/S3ConfigForm.tsx b/app/(blobs)/(setup)/_components/S3ConfigForm.tsx new file mode 100644 index 000000000..e8f254711 --- /dev/null +++ b/app/(blobs)/(setup)/_components/S3ConfigForm.tsx @@ -0,0 +1,90 @@ +'use client'; + +import { useRouter } from 'next/navigation'; +import { saveS3Config } from '~/actions/storageProvider'; +import Field from '~/lib/form/components/Field/Field'; +import Form from '~/lib/form/components/Form'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import InputField from '~/lib/form/components/fields/InputField'; +import { s3ConfigSchema } from '~/schemas/s3Settings'; + +export const S3ConfigForm = () => { + const router = useRouter(); + + const handleSubmit = async (rawData: unknown) => { + try { + const result = await saveS3Config(rawData); + + if (!result.success) { + return { + success: false as const, + fieldErrors: result.fieldErrors ?? {}, + formErrors: 'error' in result && result.error ? [result.error] : [], + }; + } + + router.push('/setup?step=3'); + return { success: true as const }; + } catch (error) { + const message = + error instanceof Error ? error.message : 'An unexpected error occurred'; + return { + success: false as const, + formErrors: [message], + }; + } + }; + + return ( +
+ + + + + + Save and continue + + ); +}; diff --git a/app/(blobs)/(setup)/_components/SandboxCredentials.tsx b/app/(blobs)/(setup)/_components/SandboxCredentials.tsx index ff24262bc..02f690f02 100644 --- a/app/(blobs)/(setup)/_components/SandboxCredentials.tsx +++ b/app/(blobs)/(setup)/_components/SandboxCredentials.tsx @@ -1,4 +1,3 @@ -import { KeyRound } from 'lucide-react'; import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; import { env } from '~/env'; @@ -6,7 +5,6 @@ export default function SandboxCredentials() { if (!env.SANDBOX_MODE) return null; return ( - Sandbox Credentials
diff --git a/app/(blobs)/(setup)/_components/Sidebar.tsx b/app/(blobs)/(setup)/_components/Sidebar.tsx index bbf8ed3f4..4b4246b92 100644 --- a/app/(blobs)/(setup)/_components/Sidebar.tsx +++ b/app/(blobs)/(setup)/_components/Sidebar.tsx @@ -2,8 +2,9 @@ import { Check } from 'lucide-react'; import { parseAsInteger, useQueryState } from 'nuqs'; -import Heading from '~/components/ui/typography/Heading'; -import { cn } from '~/utils/shadcn'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import { cx } from '~/utils/cva'; function OnboardSteps({ steps }: { steps: string[] }) { const [currentStep, setCurrentStep] = useQueryState( @@ -12,11 +13,11 @@ function OnboardSteps({ steps }: { steps: string[] }) { ); return ( -
+ {steps.map((step, index) => (
index && 'pointer-events-auto cursor-pointer', @@ -24,10 +25,9 @@ function OnboardSteps({ steps }: { steps: string[] }) { onClick={() => void setCurrentStep(index + 1)} >
- + {step}
))} -
+
); } diff --git a/app/(blobs)/(setup)/_components/SignInForm.tsx b/app/(blobs)/(setup)/_components/SignInForm.tsx index b6c163a70..2e46d17f5 100644 --- a/app/(blobs)/(setup)/_components/SignInForm.tsx +++ b/app/(blobs)/(setup)/_components/SignInForm.tsx @@ -1,93 +1,364 @@ 'use client'; -import { Loader2 } from 'lucide-react'; +import { + browserSupportsWebAuthn, + startAuthentication, +} from '@simplewebauthn/browser'; +import { ArrowLeft, KeyRound, LockIcon, User2 } from 'lucide-react'; import { useRouter } from 'next/navigation'; -import { login } from '~/actions/auth'; +import { useEffect, useState } from 'react'; +import { login, recoveryCodeLogin, type LoginResult } from '~/actions/auth'; +import { verifyTwoFactor } from '~/actions/twoFactor'; +import { + generateAuthenticationOptions, + verifyAuthentication, +} from '~/actions/webauthn'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; -import { Input } from '~/components/ui/Input'; -import UnorderedList from '~/components/ui/typography/UnorderedList'; -import { useToast } from '~/components/ui/use-toast'; -import useZodForm from '~/hooks/useZodForm'; +import { DialogFooter } from '~/lib/dialogs/Dialog'; +import Field from '~/lib/form/components/Field/Field'; +import Form from '~/lib/form/components/Form'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import InputField from '~/lib/form/components/fields/InputField'; +import PasswordField from '~/lib/form/components/fields/PasswordField'; +import SegmentedCodeField from '~/lib/form/components/fields/SegmentedCodeField'; +import { type FormSubmitHandler } from '~/lib/form/store/types'; import { loginSchema } from '~/schemas/auth'; +function isRateLimited( + result: LoginResult, +): result is { success: false; rateLimited: true; retryAfter: number } { + return 'rateLimited' in result; +} + +function isTwoFactorRequired(result: LoginResult): result is { + success: false; + requiresTwoFactor: true; + twoFactorToken: string; +} { + return 'requiresTwoFactor' in result; +} + export const SignInForm = () => { - const { - register, - handleSubmit, - setError, - formState: { errors, isSubmitting }, - } = useZodForm({ - schema: loginSchema, - }); - - const { toast } = useToast(); const router = useRouter(); - const onSubmit = async (data: unknown) => { + const [twoFactorRequired, setTwoFactorRequired] = useState(false); + const [twoFactorToken, setTwoFactorToken] = useState(null); + const [retryAfter, setRetryAfter] = useState(null); + const [useRecovery, setUseRecovery] = useState(false); + + const [webauthnSupported, setWebauthnSupported] = useState(false); + const [passkeyLoading, setPasskeyLoading] = useState(false); + const [passkeyError, setPasskeyError] = useState(null); + const [showRecovery, setShowRecovery] = useState(false); + + useEffect(() => { + setWebauthnSupported(browserSupportsWebAuthn()); + }, []); + + useEffect(() => { + if (retryAfter === null || retryAfter <= 0) { + return; + } + + const interval = setInterval(() => { + setRetryAfter((prev) => { + if (prev === null || prev <= 1) { + return null; + } + return prev - 1; + }); + }, 1000); + + return () => clearInterval(interval); + }, [retryAfter]); + + const handleSubmit: FormSubmitHandler = async (data) => { const result = await login(data); + if (isRateLimited(result)) { + const secondsRemaining = Math.ceil( + (result.retryAfter - Date.now()) / 1000, + ); + setRetryAfter(Math.max(secondsRemaining, 1)); + return { + success: false, + formErrors: [ + `Too many attempts. Try again in ${String(Math.max(secondsRemaining, 1))} seconds.`, + ], + }; + } + + if (isTwoFactorRequired(result)) { + setTwoFactorToken(result.twoFactorToken); + setTwoFactorRequired(true); + return { success: false }; + } + if (result.success === true) { router.push('/dashboard'); - return; } - // Handle formErrors - if (result.formErrors.length > 0) { - toast({ - variant: 'destructive', - title: 'Login failed', - description: ( - <> - - {result.formErrors.map((error) => ( -
  • {error}
  • - ))} -
    - - ), - }); + return result; + }; + + const handleTwoFactorSubmit: FormSubmitHandler = async (data) => { + const values = data as Record; + const code = values.code; + if (!code) { + return { success: false, fieldErrors: { code: ['Code is required'] } }; + } + + const result = await verifyTwoFactor({ twoFactorToken, code }); + + if (!result.success) { + const error = + 'formErrors' in result && result.formErrors + ? (result.formErrors[0] ?? 'Verification failed') + : 'Verification failed'; + return { success: false, formErrors: [error] }; } - // Handle field errors - if (result.fieldErrors) { - for (const [field, message] of Object.entries(result.fieldErrors)) { - setError(`root.${field}`, { types: { type: 'manual', message } }); + router.push('/dashboard'); + return { success: true }; + }; + + const handlePasskeySignIn = async () => { + setPasskeyError(null); + setPasskeyLoading(true); + + try { + const { error, data } = await generateAuthenticationOptions(); + if (error || !data) { + setPasskeyError(error ?? 'Failed to start passkey authentication'); + return; + } + + // IMMEDIATELY call startAuthentication — preserves Safari user gesture + const credential = await startAuthentication({ + optionsJSON: data.options, + }); + + const result = await verifyAuthentication({ credential }); + if (result.error) { + setPasskeyError(result.error); + return; + } + + router.push('/dashboard'); + } catch (e) { + if (e instanceof Error && e.name === 'NotAllowedError') { + return; } + setPasskeyError('Passkey authentication failed'); + } finally { + setPasskeyLoading(false); + } + }; + + const handleRecoveryLogin: FormSubmitHandler = async (data) => { + const values = data as Record; + const username = values.username; + const recoveryCode = values.recoveryCode; + + if (!username || !recoveryCode) { + return { + success: false, + formErrors: ['Username and recovery code are required'], + }; } + + const result = await recoveryCodeLogin({ username, recoveryCode }); + + if (result.success) { + router.push('/dashboard'); + } + + return result; + }; + + const handleBackToSignIn = () => { + setTwoFactorRequired(false); + setTwoFactorToken(null); + setUseRecovery(false); + setShowRecovery(false); + setPasskeyError(null); }; + if (showRecovery) { + return ( +
    + } + /> + +
    + + + Sign in + +
    + + ); + } + + if (twoFactorRequired) { + return ( +
    + {useRecovery ? ( + + ) : ( + + )} + + + + + + Verify + + + + ); + } + return ( -
    void handleSubmit(onSubmit)(event)} - className="flex w-full flex-col" - > -
    - + + } /> -
    -
    - } /> -
    -
    - -
    -
    +
    + 0} + > + {retryAfter !== null && retryAfter > 0 + ? `Try again in ${String(retryAfter)}s` + : 'Sign in'} + +
    + + + {webauthnSupported && ( + <> +
    +
    + or +
    +
    + + + + {passkeyError && ( + + {passkeyError} + + )} + + )} + + + ); }; diff --git a/app/(blobs)/(setup)/_components/SignUpForm.tsx b/app/(blobs)/(setup)/_components/SignUpForm.tsx index ab16b5c87..b9b0890e8 100644 --- a/app/(blobs)/(setup)/_components/SignUpForm.tsx +++ b/app/(blobs)/(setup)/_components/SignUpForm.tsx @@ -1,80 +1,204 @@ 'use client'; -import { Loader2 } from 'lucide-react'; +import { + browserSupportsWebAuthn, + startRegistration, +} from '@simplewebauthn/browser'; +import { useRouter } from 'next/navigation'; +import { useEffect, useState } from 'react'; +import { useMediaQuery } from 'usehooks-ts'; import { signup } from '~/actions/auth'; -import { Button } from '~/components/ui/Button'; -import { Input } from '~/components/ui/Input'; -import useZodForm from '~/hooks/useZodForm'; +import { + generateSignupRegistrationOptions, + signupWithPasskey, +} from '~/actions/webauthn'; +import Field from '~/lib/form/components/Field/Field'; +import FieldGroup from '~/lib/form/components/FieldGroup'; +import InputField from '~/lib/form/components/fields/InputField'; +import PasswordField from '~/lib/form/components/fields/PasswordField'; +import RichSelectGroupField from '~/lib/form/components/fields/RichSelectGroup'; +import Form from '~/lib/form/components/Form'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import { + type FormSubmissionResult, + type FormSubmitHandler, +} from '~/lib/form/store/types'; import { createUserSchema } from '~/schemas/auth'; -export const SignUpForm = () => { - const { - register, - handleSubmit, - watch, - trigger, - formState: { errors, isValid, isSubmitting }, - } = useZodForm({ - schema: createUserSchema, - mode: 'onTouched', - }); - - const onSubmit = async (data: unknown) => { - await signup(data); +type SignUpFormProps = { + sandboxMode?: boolean; +}; + +export const SignUpForm = ({ sandboxMode = false }: SignUpFormProps) => { + const router = useRouter(); + const [webauthnSupported, setWebauthnSupported] = useState(false); + const [passkeyLoading, setPasskeyLoading] = useState(false); + const [passkeyError, setPasskeyError] = useState(null); + + useEffect(() => { + setWebauthnSupported(browserSupportsWebAuthn()); + }, []); + + const showAuthMethodChoice = webauthnSupported && !sandboxMode; + + const handleSubmit: FormSubmitHandler = async (data) => { + const values = data as Record; + const authMethod = + typeof values?.authMethod === 'string' ? values.authMethod : 'password'; + const username = + typeof values?.username === 'string' ? values.username : ''; + + if (authMethod === 'passkey') { + return handlePasskeySignup(username); + } + + return handlePasswordSignup(data); }; - const password = watch('password'); + const handlePasswordSignup: FormSubmitHandler = async (data) => { + const result = await signup(data); + + return { + success: false, + formErrors: result.error ? [result.error] : [], + }; + }; + + const handlePasskeySignup = async ( + username: string, + ): Promise => { + if (!username) { + return { + success: false, + formErrors: ['Username is required'], + }; + } + + setPasskeyError(null); + setPasskeyLoading(true); + + try { + // Step 1: Generate registration options (no session created yet) + const { error: genError, data: regData } = + await generateSignupRegistrationOptions(username); + if (genError || !regData) { + setPasskeyLoading(false); + return { + success: false, + formErrors: [genError ?? 'Failed to start passkey registration'], + }; + } + + // Step 2: OS passkey popup (still no session) + const credential = await startRegistration({ + optionsJSON: regData.options, + }); + + // Step 3: Atomic signup — creates user + stores passkey + session + const result = await signupWithPasskey({ username, credential }); + + if (result.error) { + setPasskeyLoading(false); + return { + success: false, + formErrors: [result.error], + }; + } + + // Session now exists — navigate to next step + router.refresh(); + router.push('/setup?step=2'); + return { success: true }; + } catch (e) { + if (e instanceof Error && e.name === 'NotAllowedError') { + setPasskeyLoading(false); + return { success: false }; + } + setPasskeyLoading(false); + return { + success: false, + formErrors: ['Passkey registration failed'], + }; + } + }; + + const isSmallScreen = useMediaQuery('(max-width: 640px)'); return ( -
    void handleSubmit(onSubmit)(event)} - autoComplete="do-not-autofill" - > -
    - + + {showAuthMethodChoice && ( + -
    -
    - values.authMethod !== 'passkey'} + > + trigger('password'), - })} + showValidationHints /> - {password && password.length > 0 && ( - !!values.password} + > + trigger('confirmPassword'), - })} /> - )} -
    -
    - -
    -
    + + + {passkeyError && ( +

    {passkeyError}

    + )} + + Create account + + ); }; diff --git a/app/(blobs)/(setup)/_components/StorageProviderSelector.tsx b/app/(blobs)/(setup)/_components/StorageProviderSelector.tsx new file mode 100644 index 000000000..f552ca11f --- /dev/null +++ b/app/(blobs)/(setup)/_components/StorageProviderSelector.tsx @@ -0,0 +1,43 @@ +'use client'; + +import { useState } from 'react'; +import type { RichSelectOption } from '~/lib/form/components/fields/RichSelectGroup'; +import RichSelectGroupField from '~/lib/form/components/fields/RichSelectGroup'; +import { S3ConfigForm } from './S3ConfigForm'; +import { UploadThingTokenForm } from './UploadThingTokenForm'; + +type Provider = 'uploadthing' | 's3'; + +const providerOptions: RichSelectOption[] = [ + { + value: 'uploadthing', + label: 'UploadThing', + description: + 'Third-party managed storage. Easy to set up — just paste your API token.', + }, + { + value: 's3', + label: 'S3 / S3-Compatible', + description: + 'Self-hosted or cloud object storage (AWS S3, MinIO, Cloudflare R2, Backblaze B2).', + }, +]; + +export default function StorageProviderSelector() { + const [selected, setSelected] = useState('uploadthing'); + + return ( +
    + setSelected(value as Provider)} + orientation="horizontal" + size="md" + /> + + {selected === 'uploadthing' && } + {selected === 's3' && } +
    + ); +} diff --git a/app/(blobs)/(setup)/_components/UploadThingTokenForm.tsx b/app/(blobs)/(setup)/_components/UploadThingTokenForm.tsx index a1dc979b7..f93b52fa8 100644 --- a/app/(blobs)/(setup)/_components/UploadThingTokenForm.tsx +++ b/app/(blobs)/(setup)/_components/UploadThingTokenForm.tsx @@ -1,55 +1,71 @@ -import { Loader2 } from 'lucide-react'; -import { z } from 'zod'; -import { Button } from '~/components/ui/Button'; -import { Input } from '~/components/ui/Input'; -import useZodForm from '~/hooks/useZodForm'; +'use client'; + +import { useRouter } from 'next/navigation'; +import { setUploadThingToken } from '~/actions/appSettings'; +import { setStorageProvider } from '~/actions/storageProvider'; +import Field from '~/lib/form/components/Field/Field'; +import Form from '~/lib/form/components/Form'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import InputField from '~/lib/form/components/fields/InputField'; import { createUploadThingTokenSchema } from '~/schemas/appSettings'; -export const UploadThingTokenForm = ({ - action, -}: { - action: (token: string) => Promise; -}) => { - const { - register, - handleSubmit, - formState: { errors, isValid, isSubmitting }, - } = useZodForm({ - schema: z.object({ - uploadThingToken: createUploadThingTokenSchema, - }), - }); - - const onSubmit = async ({ - uploadThingToken, - }: { - uploadThingToken: string; - }) => { - await action(uploadThingToken); +export const UploadThingTokenForm = () => { + const router = useRouter(); + + const handleSubmit = async (rawData: unknown) => { + try { + const result = await setUploadThingToken(rawData); + + if (!result.success) { + return { + success: false as const, + fieldErrors: result.fieldErrors, + }; + } + + const providerResult = await setStorageProvider('uploadthing'); + if (!providerResult.success) { + return { + success: false as const, + formErrors: [ + providerResult.error ?? 'Failed to set storage provider.', + ], + }; + } + + router.push('/setup?step=3'); + + return { + success: true as const, + }; + } catch (error) { + const message = + error instanceof Error ? error.message : 'An unexpected error occurred'; + return { + success: false as const, + formErrors: [message], + }; + } }; return ( -
    void handleSubmit(onSubmit)(event)} - > -
    - -
    -
    - -
    -
    +
    + + + Save and continue + + ); }; diff --git a/app/(blobs)/(setup)/layout.tsx b/app/(blobs)/(setup)/layout.tsx index 1faf603c1..c76c1f543 100644 --- a/app/(blobs)/(setup)/layout.tsx +++ b/app/(blobs)/(setup)/layout.tsx @@ -1,7 +1,18 @@ -import type { ReactNode } from 'react'; +import { Loader2 } from 'lucide-react'; +import { type ReactNode, Suspense } from 'react'; import { requireAppNotExpired } from '~/queries/appSettings'; -export default async function Layout({ children }: { children: ReactNode }) { +export default function Layout({ children }: { children: ReactNode }) { + return ( + } + > + {children} + + ); +} + +async function SetupLayoutContent({ children }: { children: ReactNode }) { await requireAppNotExpired(true); return children; } diff --git a/app/(blobs)/(setup)/setup/Setup.tsx b/app/(blobs)/(setup)/setup/Setup.tsx index 1408e0788..398184d8f 100644 --- a/app/(blobs)/(setup)/setup/Setup.tsx +++ b/app/(blobs)/(setup)/setup/Setup.tsx @@ -1,14 +1,13 @@ 'use client'; -import { motion } from 'motion/react'; import { parseAsInteger, useQueryState } from 'nuqs'; import { useEffect } from 'react'; import { containerClasses } from '~/components/ContainerClasses'; -import { cn } from '~/utils/shadcn'; -import ConnectUploadThing from '../_components/OnboardSteps/ConnectUploadThing'; +import Surface from '~/components/layout/Surface'; +import { cx } from '~/utils/cva'; +import ConfigureStorage from '../_components/OnboardSteps/ConfigureStorage'; import CreateAccount from '../_components/OnboardSteps/CreateAccount'; import Documentation from '../_components/OnboardSteps/Documentation'; -import ManageParticipants from '../_components/OnboardSteps/ManageParticipants'; import UploadProtocol from '../_components/OnboardSteps/UploadProtocol'; import OnboardSteps from '../_components/Sidebar'; import type { SetupData } from './page'; @@ -22,68 +21,46 @@ export default function Setup({ setupData }: { setupData: SetupData }) { component: CreateAccount, }, { - label: 'Connect UploadThing', - component: ConnectUploadThing, + label: 'Configure Storage', + component: ConfigureStorage, }, { label: 'Upload Protocol', component: UploadProtocol, }, - { - label: 'Configure Participation', - component: () => ( - - ), - }, { label: 'Documentation', component: Documentation, }, ]; - const cardClasses = cn(containerClasses, 'flex-row bg-transparent p-0 gap-6'); - const mainClasses = cn('bg-white flex w-full p-12 rounded-xl'); + const cardClasses = cx( + containerClasses, + 'tablet-portrait:flex-row tablet-portrait:gap-6 flex flex-col gap-4', + ); useEffect(() => { + // Redirect to step 1 if we aren't authenticated if (!setupData.hasAuth && step > 1) { void setStep(1); return; } + // Don't show the user creation step if we _are_ authenticated if (setupData.hasAuth && step === 1) { void setStep(2); return; } - - if (setupData.hasAuth && step === 2 && setupData.hasUploadThingToken) { - void setStep(3); - return; - } - - // if we're past step 2 but we still have null values, go back to step 2 - if (setupData.hasAuth && step > 2) { - if ( - !setupData.hasUploadThingToken || - setupData.allowAnonymousRecruitment === null || - setupData.limitInterviews === null - ) { - void setStep(2); - return; - } - } }, [step, setStep, setupData]); const StepComponent = steps[step - 1]!.component; return ( - +
    step.label)} /> -
    + -
    - + +
    ); } diff --git a/app/(blobs)/(setup)/setup/page.tsx b/app/(blobs)/(setup)/setup/page.tsx index 44bab7dff..be2ecf382 100644 --- a/app/(blobs)/(setup)/setup/page.tsx +++ b/app/(blobs)/(setup)/setup/page.tsx @@ -1,12 +1,12 @@ import { Loader2 } from 'lucide-react'; import { Suspense } from 'react'; +import { getServerSession } from '~/lib/auth/guards'; +import { prisma } from '~/lib/db'; import { getAppSetting, requireAppNotConfigured, requireAppNotExpired, } from '~/queries/appSettings'; -import { getServerSession } from '~/utils/auth'; -import { prisma } from '~/lib/db'; import Setup from './Setup'; async function getSetupData() { @@ -15,7 +15,7 @@ async function getSetupData() { 'allowAnonymousRecruitment', ); const limitInterviews = await getAppSetting('limitInterviews'); - const otherData = await prisma.$transaction([ + const otherData = await Promise.all([ prisma.protocol.count(), prisma.participant.count(), ]); @@ -34,19 +34,19 @@ async function getSetupData() { export type SetupData = Awaited>; -export const dynamic = 'force-dynamic'; - -export default async function Page() { - await requireAppNotExpired(true); - await requireAppNotConfigured(); - - const setupData = await getSetupData(); - +export default function Page() { return ( } + fallback={} > - + ); } + +async function SetupContent() { + await requireAppNotExpired(true); + await requireAppNotConfigured(); + const setupData = await getSetupData(); + return ; +} diff --git a/app/(blobs)/(setup)/signin/page.tsx b/app/(blobs)/(setup)/signin/page.tsx index d4319aec1..fa774966d 100644 --- a/app/(blobs)/(setup)/signin/page.tsx +++ b/app/(blobs)/(setup)/signin/page.tsx @@ -1,30 +1,36 @@ +import { type Metadata } from 'next'; import { redirect } from 'next/navigation'; +import { connection } from 'next/server'; import { containerClasses } from '~/components/ContainerClasses'; -import { getServerSession } from '~/utils/auth'; -import { cn } from '~/utils/shadcn'; +import { MotionSurface } from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import { getServerSession } from '~/lib/auth/guards'; +import { cx } from '~/utils/cva'; import SandboxCredentials from '../_components/SandboxCredentials'; import { SignInForm } from '../_components/SignInForm'; -export const metadata = { +export const metadata: Metadata = { title: 'Fresco - Sign In', description: 'Sign in to Fresco.', }; -export const dynamic = 'force-dynamic'; - export default async function Page() { + await connection(); const session = await getServerSession(); - - if (session) { - // If the user is already signed in, redirect to the dashboard - redirect('/dashboard'); - } - + if (session) redirect('/dashboard'); return ( -
    -

    Sign In To Fresco

    + + Sign In To Fresco -
    + ); } diff --git a/app/(blobs)/expired/page.tsx b/app/(blobs)/expired/page.tsx index e93b2aa8e..9a95dc655 100644 --- a/app/(blobs)/expired/page.tsx +++ b/app/(blobs)/expired/page.tsx @@ -1,34 +1,30 @@ -import { redirect } from 'next/navigation'; import { resetAppSettings } from '~/actions/reset'; import { containerClasses } from '~/components/ContainerClasses'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; import SubmitButton from '~/components/ui/SubmitButton'; import { env } from '~/env'; -import { isAppExpired } from '~/queries/appSettings'; - -export default async function Page() { - const isExpired = await isAppExpired(); - - if (!isExpired) { - redirect('/'); - } +import { cx } from '~/utils/cva'; +export default function Page() { return ( -
    -

    Installation expired

    -

    + + Installation expired + You did not configure this deployment of Fresco in time, and it has now been locked down for your security. -

    -

    + + Please redeploy a new instance of Fresco to continue using the software. -

    + {env.NODE_ENV === 'development' && ( -
    void resetAppSettings()}> - + + Dev mode: Reset Configuration )} -
    + ); } diff --git a/app/(blobs)/layout.tsx b/app/(blobs)/layout.tsx index d291a4300..2532a59f2 100644 --- a/app/(blobs)/layout.tsx +++ b/app/(blobs)/layout.tsx @@ -1,33 +1,37 @@ import Image from 'next/image'; import Link from 'next/link'; -import type { PropsWithChildren } from 'react'; +import { type PropsWithChildren, Suspense } from 'react'; import BackgroundBlobs from '~/components/BackgroundBlobs/BackgroundBlobs'; import NetlifyBadge from '~/components/NetlifyBadge'; export default function Layout({ children }: PropsWithChildren) { return ( <> -
    -
    +
    + + + +
    + +
    +
    Network Canvas -
    -
    - {children} -
    + +
    {children}
    -
    - -
    ); } diff --git a/app/(interview)/interview/[interviewId]/layout.tsx b/app/(interview)/interview/[interviewId]/layout.tsx new file mode 100644 index 000000000..5fe2377ca --- /dev/null +++ b/app/(interview)/interview/[interviewId]/layout.tsx @@ -0,0 +1,17 @@ +import { type ReactNode, Suspense } from 'react'; +import SmallScreenOverlay from '../_components/SmallScreenOverlay'; + +export default function InterviewSessionLayout({ + children, +}: { + children: ReactNode; +}) { + return ( + <> + + + + {children} + + ); +} diff --git a/app/(interview)/interview/[interviewId]/loading.tsx b/app/(interview)/interview/[interviewId]/loading.tsx deleted file mode 100644 index fe350aa90..000000000 --- a/app/(interview)/interview/[interviewId]/loading.tsx +++ /dev/null @@ -1,9 +0,0 @@ -import { Loader2 } from 'lucide-react'; - -export default function Loading() { - return ( -
    - -
    - ); -} diff --git a/app/(interview)/interview/[interviewId]/page.tsx b/app/(interview)/interview/[interviewId]/page.tsx index e1e03afc6..cdace63e1 100644 --- a/app/(interview)/interview/[interviewId]/page.tsx +++ b/app/(interview)/interview/[interviewId]/page.tsx @@ -1,49 +1,113 @@ +import dynamic from 'next/dynamic'; import { cookies } from 'next/headers'; import { notFound, redirect } from 'next/navigation'; -import { syncInterview } from '~/actions/interviews'; +import { after, connection } from 'next/server'; +import { Suspense } from 'react'; +import SuperJSON from 'superjson'; +import { type ActivityType } from '~/app/dashboard/_components/ActivityFeed/types'; +import Spinner from '~/components/Spinner'; +import { getServerSession } from '~/lib/auth/guards'; +import { safeRevalidateTag } from '~/lib/cache'; +import { prisma } from '~/lib/db'; +import { captureEvent, shutdownPostHog } from '~/lib/posthog-server'; import { getAppSetting } from '~/queries/appSettings'; -import { getInterviewById } from '~/queries/interviews'; -import { getServerSession } from '~/utils/auth'; -import InterviewShell from '../_components/InterviewShell'; +import { + getInterviewById, + type GetInterviewByIdQuery, +} from '~/queries/interviews'; -export const dynamic = 'force-dynamic'; // Force dynamic rendering for this page +const InterviewShell = dynamic( + () => import('~/lib/interviewer/InterviewShell'), + { + loading: () => ( +
    + +
    + ), + }, +); -export default async function Page({ - params, +export default function Page(props: { + params: Promise<{ interviewId: string }>; +}) { + return ( + + +
    + } + > + + + ); +} + +async function InterviewContent({ + params: paramsPromise, }: { - params: { interviewId: string }; + params: Promise<{ interviewId: string }>; }) { - const { interviewId } = params; + await connection(); + const { interviewId } = await paramsPromise; if (!interviewId) { return 'No interview id found'; } - const interview = await getInterviewById(interviewId); - const session = await getServerSession(); + const rawInterview = await getInterviewById(interviewId); - // If the interview is not found, redirect to the 404 page - if (!interview) { + if (!rawInterview) { notFound(); } - // if limitInterviews is enabled - // Check cookies for interview already completed for this user for this protocol - // and redirect to finished page + const interview = + SuperJSON.parse>(rawInterview); + const session = await getServerSession(); + const limitInterviews = await getAppSetting('limitInterviews'); - if (limitInterviews && cookies().get(interview?.protocol?.id ?? '')) { + if (limitInterviews && (await cookies()).get(interview.protocol.id)) { redirect('/interview/finished'); } - // If the interview is finished and there is no session, redirect to the finish page - if (interview?.finishTime && !session) { + if (!session && interview?.finishTime) { redirect('/interview/finished'); } - return ( - <> - - - ); + after(async () => { + try { + const message = session + ? `Interview "${interviewId}" was opened by user "${session.user.username}"` + : `Interview "${interviewId}" was opened`; + + const thirtyMinutesAgo = new Date(Date.now() - 30 * 60 * 1000); + + const recentEvent = await prisma.events.findFirst({ + where: { + type: 'Interview Opened', + message, + timestamp: { gte: thirtyMinutesAgo }, + }, + }); + + if (recentEvent) return; + + await prisma.events.create({ + data: { + type: 'Interview Opened' satisfies ActivityType, + message, + }, + }); + + safeRevalidateTag('activityFeed'); + + await captureEvent('Interview Opened', { message }); + await shutdownPostHog(); + } catch { + // Non-critical — don't block the interview + } + }); + + return ; } diff --git a/app/(interview)/interview/[interviewId]/sync/route.ts b/app/(interview)/interview/[interviewId]/sync/route.ts new file mode 100644 index 000000000..7c9088205 --- /dev/null +++ b/app/(interview)/interview/[interviewId]/sync/route.ts @@ -0,0 +1,89 @@ +import { NcNetworkSchema } from '@codaco/shared-consts'; +import { after, NextResponse, type NextRequest } from 'next/server'; +import { z } from 'zod'; +import { z as zm } from 'zod/mini'; +import { prisma } from '~/lib/db'; +import { StageMetadataSchema } from '~/lib/interviewer/ducks/modules/session'; +import { captureException, shutdownPostHog } from '~/lib/posthog-server'; +import { getAppSetting } from '~/queries/appSettings'; +import { ensureError } from '~/utils/ensureError'; + +/** + * Handle post requests from the client to store the current interview state. + */ +const routeHandler = async ( + request: NextRequest, + { params }: { params: Promise<{ interviewId: string }> }, +) => { + const { interviewId } = await params; + + const rawPayload = await request.json(); + + const Schema = z.object({ + id: z.string(), + network: NcNetworkSchema, + currentStep: z.number(), + stageMetadata: zm.optional(StageMetadataSchema), + lastUpdated: z.string(), + }); + + const validatedRequest = Schema.safeParse(rawPayload); + + if (!validatedRequest.success) { + after(async () => { + await captureException(validatedRequest.error, { + interviewId, + }); + await shutdownPostHog(); + }); + + return NextResponse.json( + { + error: validatedRequest.error, + }, + { status: 400 }, + ); + } + + const { network, currentStep, stageMetadata, lastUpdated } = + validatedRequest.data; + + const freezeEnabled = await getAppSetting('freezeInterviewsAfterCompletion'); + + if (freezeEnabled) { + const interview = await prisma.interview.findUnique({ + where: { id: interviewId }, + select: { finishTime: true }, + }); + + if (interview?.finishTime) { + return NextResponse.json({ success: true }); + } + } + + try { + await prisma.interview.update({ + where: { + id: interviewId, + }, + data: { + network, + currentStep, + stageMetadata: stageMetadata ?? undefined, + lastUpdated: new Date(lastUpdated), + }, + }); + + return NextResponse.json({ success: true }); + } catch (e) { + const error = ensureError(e); + return NextResponse.json( + { + error: error.message, + }, + { status: 500 }, + ); + } +}; + +export { routeHandler as POST }; diff --git a/app/(interview)/interview/_components/ErrorMessage.tsx b/app/(interview)/interview/_components/ErrorMessage.tsx index 459fd9927..e2b739e9f 100644 --- a/app/(interview)/interview/_components/ErrorMessage.tsx +++ b/app/(interview)/interview/_components/ErrorMessage.tsx @@ -1,3 +1,6 @@ +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; + type ErrorMessageProps = { title: string; message: string; @@ -7,8 +10,8 @@ export const ErrorMessage = ({ title, message }: ErrorMessageProps) => { return (
    -
    {title}
    -

    {message}

    + {title} + {message}
    ); diff --git a/app/(interview)/interview/_components/InterviewShell.tsx b/app/(interview)/interview/_components/InterviewShell.tsx deleted file mode 100644 index a1685c2c7..000000000 --- a/app/(interview)/interview/_components/InterviewShell.tsx +++ /dev/null @@ -1,74 +0,0 @@ -'use client'; - -import { Provider } from 'react-redux'; -import DialogManager from '~/lib/interviewer/components/DialogManager'; -import ProtocolScreen from '~/lib/interviewer/containers/ProtocolScreen'; -import { - SET_SERVER_SESSION, - type SetServerSessionAction, -} from '~/lib/interviewer/ducks/modules/setServerSession'; -import { store } from '~/lib/interviewer/store'; -import ServerSync from './ServerSync'; -import { useEffect, useState } from 'react'; -import { parseAsInteger, useQueryState } from 'nuqs'; -import type { SyncInterviewType } from '~/actions/interviews'; -import type { getInterviewById } from '~/queries/interviews'; - -// The job of interview shell is to receive the server-side session and protocol -// and create a redux store with that data. -// Eventually it will handle syncing this data back. -const InterviewShell = ({ - interview, - syncInterview, -}: { - interview: Awaited>; - syncInterview: SyncInterviewType; -}) => { - const [initialized, setInitialized] = useState(false); - const [currentStage, setCurrentStage] = useQueryState('step', parseAsInteger); - - useEffect(() => { - if (initialized || !interview) { - return; - } - - const { protocol, ...serverSession } = interview; - - // If we have a current stage in the URL bar, and it is different from the - // server session, set the server session to the current stage. - // - // If we don't have a current stage in the URL bar, set it to the server - // session, and set the URL bar to the server session. - if (currentStage === null) { - void setCurrentStage(serverSession.currentStep); - } else if (currentStage !== serverSession.currentStep) { - serverSession.currentStep = currentStage; - } - - // If there's no current stage in the URL bar, set it. - store.dispatch({ - type: SET_SERVER_SESSION, - payload: { - protocol, - session: serverSession, - }, - }); - - setInitialized(true); - }, [initialized, setInitialized, currentStage, setCurrentStage, interview]); - - if (!initialized || !interview) { - return null; - } - - return ( - - - - - - - ); -}; - -export default InterviewShell; diff --git a/app/(interview)/interview/_components/ServerSync.tsx b/app/(interview)/interview/_components/ServerSync.tsx deleted file mode 100644 index f067b57bb..000000000 --- a/app/(interview)/interview/_components/ServerSync.tsx +++ /dev/null @@ -1,65 +0,0 @@ -'use client'; - -import { debounce, isEqual } from 'es-toolkit'; -import { type ReactNode, useCallback, useEffect, useState } from 'react'; -import { useSelector } from 'react-redux'; -import type { SyncInterviewType } from '~/actions/interviews'; -import usePrevious from '~/hooks/usePrevious'; -import { getActiveSession } from '~/lib/interviewer/selectors/shared'; - -// The job of ServerSync is to listen to actions in the redux store, and to sync -// data with the server. -const ServerSync = ({ - interviewId, - children, - serverSync, -}: { - interviewId: string; - children: ReactNode; - serverSync: SyncInterviewType; -}) => { - const [init, setInit] = useState(false); - // Current stage - const currentSession = useSelector(getActiveSession); - const prevCurrentSession = usePrevious(currentSession); - - // eslint-disable-next-line react-hooks/exhaustive-deps - const debouncedSessionSync = useCallback( - debounce(serverSync, 2000, { - edges: ['trailing', 'leading'], - }), - [serverSync], - ); - - useEffect(() => { - if (!init) { - setInit(true); - return; - } - - if ( - isEqual(currentSession, prevCurrentSession) || - !currentSession || - !prevCurrentSession - ) { - return; - } - - void debouncedSessionSync({ - id: interviewId, - network: currentSession.network, - currentStep: currentSession.currentStep ?? 0, - stageMetadata: currentSession.stageMetadata, // Temporary storage used by tiestrengthcensus/dyadcensus to store negative responses - }); - }, [ - currentSession, - prevCurrentSession, - interviewId, - init, - debouncedSessionSync, - ]); - - return children; -}; - -export default ServerSync; diff --git a/app/(interview)/interview/_components/SmallScreenOverlay.tsx b/app/(interview)/interview/_components/SmallScreenOverlay.tsx index 637fb26d9..b2968e99b 100644 --- a/app/(interview)/interview/_components/SmallScreenOverlay.tsx +++ b/app/(interview)/interview/_components/SmallScreenOverlay.tsx @@ -1,10 +1,12 @@ import Image from 'next/image'; +import { connection } from 'next/server'; import { env } from 'node:process'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; import { getAppSetting } from '~/queries/appSettings'; const SmallScreenOverlay = async () => { + await connection(); const disableSmallScreenOverlay = await getAppSetting( 'disableSmallScreenOverlay', ); @@ -13,7 +15,7 @@ const SmallScreenOverlay = async () => { } return ( -
    +
    { height={300} alt="Screen too small" /> - Screen Size Too Small - - + Screen Size Too Small + To complete this interview, please use a device with a larger screen, or maximize your browser window. - + Note: it is not possible to complete this interview using a mobile phone. diff --git a/app/(interview)/interview/finished/page.tsx b/app/(interview)/interview/finished/page.tsx index ced3d6e59..45ff65548 100644 --- a/app/(interview)/interview/finished/page.tsx +++ b/app/(interview)/interview/finished/page.tsx @@ -1,15 +1,14 @@ import { BadgeCheck } from 'lucide-react'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; - -export const dynamic = 'force-dynamic'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; export default function InterviewCompleted() { return ( -
    - - Thank you for participating! + + + Thank you for participating! Your interview has been successfully completed. -
    + ); } diff --git a/app/(interview)/interview/layout.tsx b/app/(interview)/interview/layout.tsx deleted file mode 100644 index b36dbda6d..000000000 --- a/app/(interview)/interview/layout.tsx +++ /dev/null @@ -1,18 +0,0 @@ -import SmallScreenOverlay from '~/app/(interview)/interview/_components/SmallScreenOverlay'; -import '~/styles/interview.scss'; - -export const metadata = { - title: 'Network Canvas Fresco - Interview', - description: 'Interview', -}; - -function RootLayout({ children }: { children: React.ReactNode }) { - return ( -
    - - {children} -
    - ); -} - -export default RootLayout; diff --git a/app/(interview)/layout.tsx b/app/(interview)/layout.tsx new file mode 100644 index 000000000..1ea52eb1c --- /dev/null +++ b/app/(interview)/layout.tsx @@ -0,0 +1,20 @@ +import { type Metadata } from 'next'; +import '~/styles/themes/interview.css'; + +export const metadata: Metadata = { + title: 'Network Canvas Fresco - Interview', + description: 'Interview', +}; + +function RootLayout({ children }: { children: React.ReactNode }) { + return ( +
    + {children} +
    + ); +} + +export default RootLayout; diff --git a/app/(interview)/onboard/[protocolId]/__tests__/route.test.ts b/app/(interview)/onboard/[protocolId]/__tests__/route.test.ts new file mode 100644 index 000000000..caa8f6d56 --- /dev/null +++ b/app/(interview)/onboard/[protocolId]/__tests__/route.test.ts @@ -0,0 +1,432 @@ +import { NextRequest } from 'next/server'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// Mock dependencies before importing the handler +vi.mock('~/actions/interviews', () => ({ + createInterview: vi.fn(), +})); + +vi.mock('~/queries/appSettings', () => ({ + getAppSetting: vi.fn(), +})); + +vi.mock('next/headers', () => ({ + cookies: vi.fn(() => ({ + get: vi.fn(), + })), +})); + +vi.mock('~/env', () => ({ + env: { + PUBLIC_URL: 'http://localhost:3000', + }, +})); + +vi.mock('next/server', async (importOriginal) => { + const actual = await importOriginal>(); + return { + ...actual, + after: vi.fn(), + }; +}); + +vi.mock('~/lib/posthog-server', () => ({ + captureEvent: vi.fn(), + captureException: vi.fn(), + shutdownPostHog: vi.fn(), +})); + +// Import after mocks are set up +import { createInterview } from '~/actions/interviews'; +import { getAppSetting } from '~/queries/appSettings'; +import { cookies } from 'next/headers'; + +// Import the handlers +import { GET, POST } from '../route'; + +const mockCreateInterview = vi.mocked(createInterview); +const mockGetAppSetting = vi.mocked(getAppSetting); +const mockCookies = vi.mocked(cookies); + +describe('Onboard Route Handler', () => { + beforeEach(() => { + vi.clearAllMocks(); + + // Default mock implementations + mockGetAppSetting.mockResolvedValue(false); + mockCookies.mockResolvedValue({ + get: vi.fn().mockReturnValue(undefined), + has: vi.fn().mockReturnValue(false), + getAll: vi.fn().mockReturnValue([]), + set: vi.fn(), + delete: vi.fn(), + size: 0, + [Symbol.iterator]: vi.fn(), + } as Awaited>); + }); + + describe('GET handler', () => { + it('should redirect to error page when no protocolId is provided', async () => { + const request = new NextRequest( + 'http://localhost:3000/onboard/undefined', + ); + const params = Promise.resolve({ protocolId: 'undefined' }); + + const response = await GET(request, { params }); + + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + 'http://localhost:3000/onboard/error', + ); + }); + + it('should extract participantIdentifier from query string', async () => { + const protocolId = 'test-protocol-id'; + const participantIdentifier = 'TEST-PARTICIPANT-001'; + const createdInterviewId = 'interview-123'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}?participantIdentifier=${participantIdentifier}`, + ); + const params = Promise.resolve({ protocolId }); + + const response = await GET(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier, + protocolId, + }); + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + `http://localhost:3000/interview/${createdInterviewId}`, + ); + }); + + it('should pass undefined when no participantIdentifier is provided', async () => { + const protocolId = 'test-protocol-id'; + const createdInterviewId = 'interview-456'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + ); + const params = Promise.resolve({ protocolId }); + + await GET(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier: undefined, + protocolId, + }); + }); + + it('should redirect to finished page when limitInterviews is enabled and cookie exists', async () => { + const protocolId = 'test-protocol-id'; + + mockGetAppSetting.mockResolvedValue(true); + mockCookies.mockResolvedValue({ + get: vi.fn().mockReturnValue({ value: 'completed' }), + } as unknown as Awaited>); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + ); + const params = Promise.resolve({ protocolId }); + + const response = await GET(request, { params }); + + expect(mockCreateInterview).not.toHaveBeenCalled(); + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + 'http://localhost:3000/interview/finished', + ); + }); + + it('should allow new interview when limitInterviews is enabled but no cookie exists', async () => { + const protocolId = 'test-protocol-id'; + const createdInterviewId = 'interview-789'; + + mockGetAppSetting.mockResolvedValue(true); + mockCookies.mockResolvedValue({ + get: vi.fn().mockReturnValue(undefined), + } as unknown as Awaited>); + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + ); + const params = Promise.resolve({ protocolId }); + + const response = await GET(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalled(); + expect(response.headers.get('location')).toBe( + `http://localhost:3000/interview/${createdInterviewId}`, + ); + }); + + it('should redirect to error page when createInterview returns an error', async () => { + const protocolId = 'test-protocol-id'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId: null, + error: 'Failed to create interview', + errorType: 'unknown-error', + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + ); + const params = Promise.resolve({ protocolId }); + + const response = await GET(request, { params }); + + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + 'http://localhost:3000/onboard/error', + ); + }); + + it('should redirect to no-anonymous-recruitment page when anonymous recruitment is disabled', async () => { + const protocolId = 'test-protocol-id'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId: null, + error: 'Anonymous recruitment is not enabled', + errorType: 'no-anonymous-recruitment', + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + ); + const params = Promise.resolve({ protocolId }); + + const response = await GET(request, { params }); + + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + 'http://localhost:3000/onboard/no-anonymous-recruitment', + ); + }); + }); + + describe('POST handler', () => { + it('should extract participantIdentifier from JSON body', async () => { + const protocolId = 'test-protocol-id'; + const participantIdentifier = 'POST-PARTICIPANT-001'; + const createdInterviewId = 'interview-post-123'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + { + method: 'POST', + body: JSON.stringify({ participantIdentifier }), + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + const params = Promise.resolve({ protocolId }); + + const response = await POST(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier, + protocolId, + }); + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + `http://localhost:3000/interview/${createdInterviewId}`, + ); + }); + + it('should handle POST with empty body gracefully', async () => { + const protocolId = 'test-protocol-id'; + const createdInterviewId = 'interview-post-456'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + { + method: 'POST', + body: JSON.stringify({}), + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + const params = Promise.resolve({ protocolId }); + + await POST(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier: undefined, + protocolId, + }); + }); + + it('should redirect to error page when POST body parsing fails', async () => { + const protocolId = 'test-protocol-id'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId: null, + error: 'Failed to create interview', + errorType: 'parse-error', + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + { + method: 'POST', + body: JSON.stringify(null), + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + const params = Promise.resolve({ protocolId }); + + await POST(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier: undefined, + protocolId, + }); + }); + + it('should redirect to no-anonymous-recruitment page when anonymous recruitment is disabled', async () => { + const protocolId = 'test-protocol-id'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId: null, + error: 'Anonymous recruitment is not enabled', + errorType: 'no-anonymous-recruitment', + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + { + method: 'POST', + body: JSON.stringify({}), + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + const params = Promise.resolve({ protocolId }); + + const response = await POST(request, { params }); + + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe( + 'http://localhost:3000/onboard/no-anonymous-recruitment', + ); + }); + + it('should check limitInterviews for POST requests too', async () => { + const protocolId = 'test-protocol-id'; + + mockGetAppSetting.mockResolvedValue(true); + mockCookies.mockResolvedValue({ + get: vi.fn().mockReturnValue({ value: 'completed' }), + } as unknown as Awaited>); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + { + method: 'POST', + body: JSON.stringify({ participantIdentifier: 'test' }), + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + const params = Promise.resolve({ protocolId }); + + const response = await POST(request, { params }); + + expect(mockCreateInterview).not.toHaveBeenCalled(); + expect(response.headers.get('location')).toBe( + 'http://localhost:3000/interview/finished', + ); + }); + }); + + describe('Edge cases', () => { + it('should handle protocolId with special characters', async () => { + const protocolId = 'test-protocol-123_abc'; + const createdInterviewId = 'interview-special'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}`, + ); + const params = Promise.resolve({ protocolId }); + + const response = await GET(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier: undefined, + protocolId, + }); + expect(response.headers.get('location')).toBe( + `http://localhost:3000/interview/${createdInterviewId}`, + ); + }); + + it('should handle URL-encoded participantIdentifier', async () => { + const protocolId = 'test-protocol-id'; + const participantIdentifier = 'user@example.com'; + const createdInterviewId = 'interview-encoded'; + + mockCreateInterview.mockResolvedValue({ + createdInterviewId, + error: null, + errorType: null, + }); + + const request = new NextRequest( + `http://localhost:3000/onboard/${protocolId}?participantIdentifier=${encodeURIComponent(participantIdentifier)}`, + ); + const params = Promise.resolve({ protocolId }); + + await GET(request, { params }); + + expect(mockCreateInterview).toHaveBeenCalledWith({ + participantIdentifier, + protocolId, + }); + }); + }); +}); diff --git a/app/(interview)/onboard/[protocolId]/route.ts b/app/(interview)/onboard/[protocolId]/route.ts index 0cadd1f92..1cd33e8f7 100644 --- a/app/(interview)/onboard/[protocolId]/route.ts +++ b/app/(interview)/onboard/[protocolId]/route.ts @@ -1,17 +1,15 @@ import { cookies } from 'next/headers'; -import { NextResponse, type NextRequest } from 'next/server'; +import { after, NextResponse, type NextRequest } from 'next/server'; import { createInterview } from '~/actions/interviews'; import { env } from '~/env'; -import trackEvent from '~/lib/analytics'; +import { captureEvent, shutdownPostHog } from '~/lib/posthog-server'; import { getAppSetting } from '~/queries/appSettings'; -export const dynamic = 'force-dynamic'; - const handler = async ( req: NextRequest, - { params }: { params: { protocolId: string } }, + { params }: { params: Promise<{ protocolId: string }> }, ) => { - const protocolId = params.protocolId; // From route segment + const { protocolId } = await params; // when deployed via docker `req.url` and `req.nextUrl` // shows Docker Container ID instead of real host @@ -30,7 +28,7 @@ const handler = async ( // if limitInterviews is enabled // Check cookies for interview already completed for this user for this protocol // and redirect to finished page - if (limitInterviews && cookies().get(protocolId)) { + if (limitInterviews && (await cookies()).get(protocolId)) { url.pathname = '/interview/finished'; return NextResponse.redirect(url); } @@ -51,21 +49,26 @@ const handler = async ( } // Create a new interview given the protocolId and participantId - const { createdInterviewId, error } = await createInterview({ + const { createdInterviewId, error, errorType } = await createInterview({ participantIdentifier, protocolId, }); if (error) { - void trackEvent({ - type: 'Error', - name: error, - message: 'Failed to create interview', - metadata: { + after(async () => { + await captureEvent('Error', { + name: error, + message: 'Failed to create interview', path: '/onboard/[protocolId]/route.ts', - }, + }); + await shutdownPostHog(); }); + if (errorType === 'no-anonymous-recruitment') { + url.pathname = '/onboard/no-anonymous-recruitment'; + return NextResponse.redirect(url); + } + url.pathname = '/onboard/error'; return NextResponse.redirect(url); } @@ -77,16 +80,23 @@ const handler = async ( }...`, ); - void trackEvent({ - type: 'InterviewStarted', - metadata: { + after(async () => { + await captureEvent('InterviewStarted', { usingAnonymousParticipant: !participantIdentifier, - }, + }); + await shutdownPostHog(); }); // Redirect to the interview + // Explicitly disable caching to prevent Netlify from caching this redirect + // (Netlify adds max-age=86400 by default, causing all users to get the same interview) + // See: https://github.com/opennextjs/opennextjs-netlify/issues/3460 url.pathname = `/interview/${createdInterviewId}`; - return NextResponse.redirect(url); + return NextResponse.redirect(url, { + headers: { + 'Cache-Control': 'no-cache, no-store, must-revalidate', + }, + }); }; export { handler as GET, handler as POST }; diff --git a/app/(interview)/preview/[protocolId]/interview/page.tsx b/app/(interview)/preview/[protocolId]/interview/page.tsx new file mode 100644 index 000000000..1780bdef8 --- /dev/null +++ b/app/(interview)/preview/[protocolId]/interview/page.tsx @@ -0,0 +1,89 @@ +import { + entityAttributesProperty, + entityPrimaryKeyProperty, +} from '@codaco/shared-consts'; +import { Loader2 } from 'lucide-react'; +import { notFound } from 'next/navigation'; +import { connection } from 'next/server'; +import { Suspense } from 'react'; +import SuperJSON from 'superjson'; +import { v4 as uuid } from 'uuid'; +import InterviewShell from '~/lib/interviewer/InterviewShell'; +import { prisma } from '~/lib/db'; +import { getPreviewMode } from '~/queries/appSettings'; +import { getProtocolForPreview } from '~/queries/protocols'; + +export default function PreviewInterviewPage(props: { + params: Promise<{ protocolId: string }>; +}) { + return ( + + +
    + } + > + + + ); +} + +async function PreviewContent({ + params: paramsPromise, +}: { + params: Promise<{ protocolId: string }>; +}) { + await connection(); + const { protocolId } = await paramsPromise; + + const previewMode = await getPreviewMode(); + if (!previewMode) { + notFound(); + } + + if (!protocolId) { + notFound(); + } + + const protocol = await getProtocolForPreview(protocolId); + + if (!protocol) { + notFound(); + } + + // Don't allow pending protocols (still uploading assets) + if (protocol.isPending) { + notFound(); + } + + // Update timestamp to prevent premature pruning + await prisma.previewProtocol.update({ + where: { id: protocolId }, + data: { importedAt: new Date() }, + }); + + const now = new Date(); + const previewInterview = { + id: `preview-${uuid()}`, + startTime: now, + finishTime: null, + exportTime: null, + lastUpdated: now, + currentStep: 0, + stageMetadata: null, + network: { + ego: { + [entityPrimaryKeyProperty]: uuid(), + [entityAttributesProperty]: {}, + }, + nodes: [], + edges: [], + }, + protocol, + }; + + const rawPayload = SuperJSON.stringify(previewInterview); + + return ; +} diff --git a/app/(interview)/preview/[protocolId]/route.ts b/app/(interview)/preview/[protocolId]/route.ts new file mode 100644 index 000000000..40c5112b5 --- /dev/null +++ b/app/(interview)/preview/[protocolId]/route.ts @@ -0,0 +1,70 @@ +import { after, NextResponse, type NextRequest } from 'next/server'; +import { env } from '~/env'; +import { prisma } from '~/lib/db'; +import { captureEvent, shutdownPostHog } from '~/lib/posthog-server'; +import { getPreviewMode } from '~/queries/appSettings'; + +const handler = async ( + req: NextRequest, + { params }: { params: Promise<{ protocolId: string }> }, +) => { + const { protocolId } = await params; + + // Check if preview mode is enabled + const previewMode = await getPreviewMode(); + if (!previewMode) { + const url = new URL(env.PUBLIC_URL ?? req.nextUrl.clone()); + url.pathname = '/onboard/error'; + return NextResponse.redirect(url); + } + + const url = new URL(env.PUBLIC_URL ?? req.nextUrl.clone()); + + // Validate protocol ID + if (!protocolId || protocolId === 'undefined') { + url.pathname = '/onboard/error'; + return NextResponse.redirect(url); + } + + // Verify that this is a preview protocol + const protocol = await prisma.previewProtocol.findUnique({ + where: { id: protocolId }, + select: { isPending: true, name: true }, + }); + + if (!protocol) { + url.pathname = '/onboard/error'; + return NextResponse.redirect(url); + } + + if (protocol.isPending) { + // Protocol assets are still being uploaded + url.pathname = '/onboard/error'; + return NextResponse.redirect(url); + } + + // eslint-disable-next-line no-console + console.log( + `🎨 Starting preview interview using preview protocol ${protocol.name}...`, + ); + + after(async () => { + await captureEvent('InterviewStarted', { + protocolId, + isPreview: true, + }); + await shutdownPostHog(); + }); + + // Redirect to the preview interview page (no database persistence) + // Explicitly disable caching to prevent Netlify from caching this redirect + // See: https://github.com/opennextjs/opennextjs-netlify/issues/3460 + url.pathname = `/preview/${protocolId}/interview`; + return NextResponse.redirect(url, { + headers: { + 'Cache-Control': 'no-cache, no-store, must-revalidate', + }, + }); +}; + +export { handler as GET, handler as POST }; diff --git a/app/(interview)/preview/layout.tsx b/app/(interview)/preview/layout.tsx new file mode 100644 index 000000000..5f4ecf3ac --- /dev/null +++ b/app/(interview)/preview/layout.tsx @@ -0,0 +1,13 @@ +import { type ReactNode, Suspense } from 'react'; +import SmallScreenOverlay from '../interview/_components/SmallScreenOverlay'; + +export default function PreviewLayout({ children }: { children: ReactNode }) { + return ( + <> + + + + {children} + + ); +} diff --git a/app/api/[version]/interview/[interviewId]/route.ts b/app/api/[version]/interview/[interviewId]/route.ts new file mode 100644 index 000000000..8c325d495 --- /dev/null +++ b/app/api/[version]/interview/[interviewId]/route.ts @@ -0,0 +1,92 @@ +import { after, type NextRequest, NextResponse } from 'next/server'; +import { + createCorsHeaders, + requireApiTokenAuth, +} from '~/app/api/_helpers/auth'; +import { prisma } from '~/lib/db'; +import { captureException, shutdownPostHog } from '~/lib/posthog-server'; +import { getAppSetting } from '~/queries/appSettings'; +import { ensureError } from '~/utils/ensureError'; + +const corsHeaders = createCorsHeaders('GET, OPTIONS'); + +export function OPTIONS() { + return new NextResponse(null, { + status: 204, + headers: corsHeaders, + }); +} + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ version: string; interviewId: string }> }, +) { + const { version, interviewId } = await params; + + if (version !== 'v1') { + return NextResponse.json( + { error: `Unsupported API version: ${version}` }, + { status: 404, headers: corsHeaders }, + ); + } + + const enabled = await getAppSetting('enableInterviewDataApi'); + if (!enabled) { + return NextResponse.json( + { error: 'Interview Data API is not enabled' }, + { status: 403, headers: corsHeaders }, + ); + } + + const authResult = await requireApiTokenAuth(request); + if ('error' in authResult) { + return NextResponse.json( + { error: 'Authentication required. Provide a Bearer token.' }, + { status: 401, headers: corsHeaders }, + ); + } + + try { + const interview = await prisma.interview.findUnique({ + where: { id: interviewId }, + include: { + participant: { + select: { + id: true, + identifier: true, + label: true, + }, + }, + protocol: { + select: { + id: true, + name: true, + schemaVersion: true, + description: true, + codebook: true, + }, + }, + }, + }); + + if (!interview) { + return NextResponse.json( + { error: 'Interview not found' }, + { status: 404, headers: corsHeaders }, + ); + } + + return NextResponse.json({ data: interview }, { headers: corsHeaders }); + } catch (e) { + const error = ensureError(e); + await captureException(error); + after(async () => { + await shutdownPostHog(); + }); + + return NextResponse.json( + { error: 'Failed to fetch interview' }, + { status: 500, headers: corsHeaders }, + ); + } +} diff --git a/app/api/[version]/interview/route.ts b/app/api/[version]/interview/route.ts new file mode 100644 index 000000000..783ea22e3 --- /dev/null +++ b/app/api/[version]/interview/route.ts @@ -0,0 +1,128 @@ +import { after, type NextRequest, NextResponse } from 'next/server'; +import { + createCorsHeaders, + requireApiTokenAuth, +} from '~/app/api/_helpers/auth'; +import { createVersionedHandler } from '~/app/api/_helpers/versioning'; +import { prisma } from '~/lib/db'; +import { type Prisma } from '~/lib/db/generated/client'; +import { captureException, shutdownPostHog } from '~/lib/posthog-server'; +import { getAppSetting } from '~/queries/appSettings'; +import { ensureError } from '~/utils/ensureError'; + +const corsHeaders = createCorsHeaders('GET, OPTIONS'); + +export function OPTIONS() { + return new NextResponse(null, { + status: 204, + headers: corsHeaders, + }); +} + +async function v1(request: NextRequest) { + const enabled = await getAppSetting('enableInterviewDataApi'); + if (!enabled) { + return NextResponse.json( + { error: 'Interview Data API is not enabled' }, + { status: 403, headers: corsHeaders }, + ); + } + + const authResult = await requireApiTokenAuth(request); + if ('error' in authResult) { + return NextResponse.json( + { error: 'Authentication required. Provide a Bearer token.' }, + { status: 401, headers: corsHeaders }, + ); + } + + try { + const { searchParams } = request.nextUrl; + const page = Math.max(1, Number(searchParams.get('page') ?? '1')); + const perPage = Math.min( + 100, + Math.max(1, Number(searchParams.get('perPage') ?? '10')), + ); + const protocolId = searchParams.get('protocolId'); + const participantId = searchParams.get('participantId'); + const status = searchParams.get('status'); + + const where: Prisma.InterviewWhereInput = {}; + + if (protocolId) { + where.protocolId = protocolId; + } + + if (participantId) { + where.participantId = participantId; + } + + if (status === 'completed') { + where.finishTime = { not: null }; + } else if (status === 'in-progress') { + where.finishTime = null; + } + + const [interviews, total] = await Promise.all([ + prisma.interview.findMany({ + where, + select: { + id: true, + startTime: true, + finishTime: true, + lastUpdated: true, + currentStep: true, + protocolId: true, + participantId: true, + participant: { + select: { + id: true, + identifier: true, + label: true, + }, + }, + protocol: { + select: { + id: true, + name: true, + }, + }, + }, + orderBy: { lastUpdated: 'desc' }, + skip: (page - 1) * perPage, + take: perPage, + }), + prisma.interview.count({ where }), + ]); + + return NextResponse.json( + { + data: interviews, + meta: { + page, + perPage, + pageCount: Math.ceil(total / perPage), + total, + }, + }, + { headers: corsHeaders }, + ); + } catch (e) { + const error = ensureError(e); + await captureException(error); + after(async () => { + await shutdownPostHog(); + }); + + return NextResponse.json( + { error: 'Failed to fetch interviews' }, + { status: 500, headers: corsHeaders }, + ); + } +} + +const handlers = { + v1: { GET: v1 }, +}; + +export const GET = createVersionedHandler(handlers, 'GET'); diff --git a/app/api/[version]/preview/_handlers/v1/__tests__/handler.test.ts b/app/api/[version]/preview/_handlers/v1/__tests__/handler.test.ts new file mode 100644 index 000000000..e42773823 --- /dev/null +++ b/app/api/[version]/preview/_handlers/v1/__tests__/handler.test.ts @@ -0,0 +1,611 @@ +import { Effect, Layer } from 'effect'; +import { NextRequest } from 'next/server'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { + AssetStorage, + type PresignedUploadUrl, +} from '~/lib/storage/services/AssetStorage'; + +// Hoisted mocks +const { + mockCheckPreviewAuth, + mockValidateAndMigrateProtocol, + mockPrunePreviewProtocols, + mockPrisma, + mockGetExistingAssets, + mockDeleteAssets, + mockGeneratePresignedUploadUrls, + mockGetStorageProvider, + mockAddEvent, + mockCaptureException, + mockExtractApikeyAssetsFromManifest, + mockParseUploadThingToken, + mockGeneratePresignedUploadUrl, + mockRegisterUploadWithUploadThing, + mockGetBaseUrl, +} = vi.hoisted(() => ({ + mockCheckPreviewAuth: vi.fn(), + mockValidateAndMigrateProtocol: vi.fn(), + mockPrunePreviewProtocols: vi.fn(), + mockPrisma: { + previewProtocol: { + findFirst: vi.fn(), + findUnique: vi.fn(), + create: vi.fn(), + update: vi.fn(), + delete: vi.fn(), + }, + asset: { + findMany: vi.fn(), + deleteMany: vi.fn(), + }, + }, + mockGetExistingAssets: vi.fn(), + mockDeleteAssets: vi.fn(), + mockGeneratePresignedUploadUrls: + vi.fn<(files: { name: string; size: number }[]) => PresignedUploadUrl[]>(), + mockGetStorageProvider: vi.fn<() => Promise<'s3' | 'uploadthing'>>(), + mockAddEvent: vi.fn(), + mockCaptureException: vi.fn(), + mockExtractApikeyAssetsFromManifest: vi.fn(), + mockParseUploadThingToken: vi.fn(), + mockGeneratePresignedUploadUrl: vi.fn(), + mockRegisterUploadWithUploadThing: vi.fn(), + mockGetBaseUrl: vi.fn(), +})); + +vi.mock('next/server', async (importOriginal) => { + const actual = await importOriginal>(); + return { ...actual, after: vi.fn() }; +}); + +vi.mock('../helpers', async () => { + const { NextResponse } = await import('next/server'); + return { + checkPreviewAuth: mockCheckPreviewAuth, + jsonResponse: (data: unknown, status = 200) => + NextResponse.json(data, { status }), + corsHeaders: {}, + }; +}); + +vi.mock('~/actions/activityFeed', () => ({ + addEvent: mockAddEvent, +})); + +vi.mock('~/actions/preview-protocol-pruning', () => ({ + prunePreviewProtocols: mockPrunePreviewProtocols, +})); + +vi.mock('~/env', () => ({ + env: { PUBLIC_URL: 'http://localhost:3000' }, +})); + +vi.mock('~/lib/db', () => ({ + prisma: mockPrisma, +})); + +vi.mock('~/lib/db/generated/client', () => ({ + Prisma: { JsonNull: null }, +})); + +vi.mock('~/lib/posthog-server', () => ({ + captureException: mockCaptureException, + shutdownPostHog: vi.fn(), +})); + +vi.mock('~/lib/protocol/validateAndMigrateProtocol', () => ({ + validateAndMigrateProtocol: mockValidateAndMigrateProtocol, +})); + +vi.mock('~/lib/storage/layers/StorageLayer', () => ({ + getStorageLayer: () => { + const mockAssetStorageLayer = Layer.succeed( + AssetStorage, + AssetStorage.of({ + generatePresignedUploadUrls: ( + files: { name: string; size: number }[], + ) => Effect.succeed(mockGeneratePresignedUploadUrls(files)), + deleteAssets: (keys: string[]) => { + mockDeleteAssets(keys); + return Effect.void; + }, + }), + ); + return Promise.resolve(mockAssetStorageLayer); + }, +})); + +vi.mock('~/queries/protocols', () => ({ + getExistingAssets: mockGetExistingAssets, +})); + +vi.mock('~/queries/storageProvider', () => ({ + getStorageProvider: mockGetStorageProvider, +})); + +vi.mock('~/utils/ensureError', () => ({ + ensureError: (e: unknown) => (e instanceof Error ? e : new Error(String(e))), +})); + +vi.mock('~/utils/protocolImport', () => ({ + extractApikeyAssetsFromManifest: mockExtractApikeyAssetsFromManifest, +})); + +vi.mock('~/lib/uploadthing/token', () => ({ + parseUploadThingToken: mockParseUploadThingToken, +})); + +vi.mock('~/lib/uploadthing/presigned', () => ({ + generatePresignedUploadUrl: mockGeneratePresignedUploadUrl, + registerUploadWithUploadThing: mockRegisterUploadWithUploadThing, +})); + +vi.mock('~/utils/getBaseUrl', () => ({ + getBaseUrl: mockGetBaseUrl, +})); + +import { v1 } from '../handler'; + +type JsonBody = { + status: string; + message?: string; + previewUrl?: string; + protocolId?: string; + presignedUrls?: { + assetId: string; + url: string; + headers: Record; + bodyFormat: 'raw' | 'formdata'; + }[]; +}; + +function createPostRequest( + body: unknown, + extraHeaders?: Record, +): NextRequest { + return new NextRequest('http://localhost:3000/api/v1/preview', { + method: 'POST', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json', ...extraHeaders }, + }); +} + +const validProtocol = { + schemaVersion: 7, + stages: [], + codebook: {}, + description: 'Test protocol', + lastModified: '2024-01-01T00:00:00.000Z', +}; + +describe('Preview API v1 handler', () => { + beforeEach(() => { + vi.clearAllMocks(); + mockCheckPreviewAuth.mockResolvedValue(null); + mockPrunePreviewProtocols.mockResolvedValue({ deletedCount: 0 }); + mockExtractApikeyAssetsFromManifest.mockReturnValue([]); + mockGetExistingAssets.mockResolvedValue([]); + mockAddEvent.mockResolvedValue({ success: true, error: null }); + mockGetStorageProvider.mockResolvedValue('s3'); + mockParseUploadThingToken.mockResolvedValue({ + apiKey: 'sk_test', + appId: 'app-id', + regions: ['sea1'], + ingestHost: 'ingest.uploadthing.com', + }); + mockRegisterUploadWithUploadThing.mockResolvedValue(undefined); + mockGetBaseUrl.mockReturnValue('http://localhost:3000'); + }); + + describe('authentication', () => { + it('should return auth error when preview auth fails', async () => { + mockCheckPreviewAuth.mockResolvedValue({ + response: { status: 'error', message: 'Preview mode is not enabled' }, + status: 403, + }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(403); + expect(body.status).toBe('error'); + expect(body.message).toBe('Preview mode is not enabled'); + }); + + it('should return 401 when auth requires token and none provided', async () => { + mockCheckPreviewAuth.mockResolvedValue({ + response: { + status: 'error', + message: 'Authentication required. Provide session or API token.', + }, + status: 401, + }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [], + }); + + const response = await v1(request); + + expect(response.status).toBe(401); + }); + }); + + describe('initialize-preview', () => { + it('should reject invalid protocols', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ success: false }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: { invalid: true }, + assetMeta: [], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(400); + expect(body.status).toBe('rejected'); + }); + + it('should return ready when protocol already exists', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue({ + id: 'existing-id', + }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('ready'); + expect(body.previewUrl).toContain('/preview/existing-id'); + }); + + it('should create protocol and return ready when no assets needed', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue(null); + mockPrisma.previewProtocol.create.mockResolvedValue({ + id: 'new-protocol-id', + }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('ready'); + expect(body.previewUrl).toContain('/preview/new-protocol-id'); + expect(mockPrisma.previewProtocol.create).toHaveBeenCalled(); + expect(mockAddEvent).toHaveBeenCalledWith( + 'Preview Mode', + 'Preview protocol upload initiated', + ); + }); + + it('should return job-created with presigned URLs when assets need uploading', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue(null); + mockPrisma.previewProtocol.create.mockResolvedValue({ + id: 'new-protocol-id', + }); + mockGeneratePresignedUploadUrls.mockReturnValue([ + { + uploadUrl: 'https://upload.example.com/file', + fileKey: 'file-key-1', + publicUrl: 'https://cdn.example.com/file', + }, + ]); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [{ assetId: 'asset-1', name: 'image.png', size: 1024 }], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('job-created'); + expect(body.protocolId).toBe('new-protocol-id'); + expect(body.presignedUrls).toHaveLength(1); + expect(body.presignedUrls![0]!.assetId).toBe('asset-1'); + expect(body.presignedUrls![0]!.headers).toEqual({}); + expect(body.presignedUrls![0]!.bodyFormat).toBe('raw'); + }); + + it('should generate direct UploadThing presigned URLs and register with UT for uploadthing provider', async () => { + mockGetStorageProvider.mockResolvedValue('uploadthing'); + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue(null); + mockPrisma.previewProtocol.create.mockResolvedValue({ + id: 'new-protocol-id', + }); + mockGeneratePresignedUploadUrl.mockReturnValue({ + uploadUrl: 'https://sea1.ingest.uploadthing.com/file-key-1?signature=abc', + fileKey: 'file-key-1', + publicUrl: 'https://app-id.ufs.sh/f/file-key-1', + }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [{ assetId: 'asset-1', name: 'image.png', size: 1024 }], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('job-created'); + expect(body.presignedUrls).toHaveLength(1); + expect(body.presignedUrls![0]!.assetId).toBe('asset-1'); + expect(body.presignedUrls![0]!.url).toMatch( + /^https:\/\/sea1\.ingest\.uploadthing\.com\/file-key-1/, + ); + expect(body.presignedUrls![0]!.headers).toEqual({}); + expect(body.presignedUrls![0]!.bodyFormat).toBe('formdata'); + + expect(mockParseUploadThingToken).toHaveBeenCalled(); + expect(mockGeneratePresignedUploadUrl).toHaveBeenCalledWith( + expect.objectContaining({ + fileName: 'image.png', + fileSize: 1024, + }), + ); + expect(mockRegisterUploadWithUploadThing).toHaveBeenCalledWith( + expect.objectContaining({ + fileKeys: ['file-key-1'], + callbackUrl: 'http://localhost:3000/api/uploadthing', + }), + ); + expect(mockGeneratePresignedUploadUrls).not.toHaveBeenCalled(); + }); + + it('should return 500 when uploadthing provider is selected but token is not configured', async () => { + mockGetStorageProvider.mockResolvedValue('uploadthing'); + mockParseUploadThingToken.mockResolvedValue(null); + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue(null); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [{ assetId: 'asset-1', name: 'image.png', size: 1024 }], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(500); + expect(body.status).toBe('error'); + expect(body.message).toBe('UploadThing is not configured'); + expect(mockGeneratePresignedUploadUrl).not.toHaveBeenCalled(); + expect(mockRegisterUploadWithUploadThing).not.toHaveBeenCalled(); + expect(mockPrisma.previewProtocol.create).not.toHaveBeenCalled(); + }); + + it('should prune old preview protocols before creating new ones', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue(null); + mockPrisma.previewProtocol.create.mockResolvedValue({ + id: 'new-id', + }); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [], + }); + + await v1(request); + + expect(mockPrunePreviewProtocols).toHaveBeenCalled(); + }); + + it('should skip uploading for already-existing assets', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockResolvedValue(null); + mockPrisma.previewProtocol.create.mockResolvedValue({ + id: 'new-id', + }); + mockGetExistingAssets.mockResolvedValue([{ assetId: 'asset-1' }]); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [{ assetId: 'asset-1', name: 'image.png', size: 1024 }], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('ready'); + expect(mockGeneratePresignedUploadUrls).not.toHaveBeenCalled(); + }); + }); + + describe('complete-preview', () => { + it('should mark protocol as complete and return preview URL', async () => { + mockPrisma.previewProtocol.findUnique.mockResolvedValue({ + id: 'protocol-1', + name: 'Test Protocol', + }); + mockPrisma.previewProtocol.update.mockResolvedValue({}); + + const request = createPostRequest({ + type: 'complete-preview', + protocolId: 'protocol-1', + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('ready'); + expect(body.previewUrl).toContain('/preview/protocol-1'); + expect(mockPrisma.previewProtocol.update).toHaveBeenCalledWith({ + where: { id: 'protocol-1' }, + data: { importedAt: expect.any(Date) as Date, isPending: false }, + }); + expect(mockAddEvent).toHaveBeenCalledWith( + 'Preview Mode', + 'Preview protocol upload completed', + ); + }); + + it('should return 404 when protocol not found', async () => { + mockPrisma.previewProtocol.findUnique.mockResolvedValue(null); + + const request = createPostRequest({ + type: 'complete-preview', + protocolId: 'nonexistent', + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(404); + expect(body.status).toBe('error'); + expect(body.message).toBe('Preview job not found'); + }); + }); + + describe('abort-preview', () => { + it('should delete protocol and return removed status', async () => { + mockPrisma.previewProtocol.findUnique.mockResolvedValue({ + id: 'protocol-1', + name: 'Test Protocol', + }); + mockPrisma.asset.findMany.mockResolvedValue([]); + mockPrisma.previewProtocol.delete.mockResolvedValue({}); + + const request = createPostRequest({ + type: 'abort-preview', + protocolId: 'protocol-1', + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(200); + expect(body.status).toBe('removed'); + expect(mockPrisma.previewProtocol.delete).toHaveBeenCalledWith({ + where: { id: 'protocol-1' }, + }); + expect(mockAddEvent).toHaveBeenCalledWith( + 'Protocol Uninstalled', + 'Preview protocol "Test Protocol" was aborted and removed', + ); + }); + + it('should return 404 when protocol not found', async () => { + mockPrisma.previewProtocol.findUnique.mockResolvedValue(null); + + const request = createPostRequest({ + type: 'abort-preview', + protocolId: 'nonexistent', + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(404); + expect(body.status).toBe('error'); + }); + + it('should delete assets from storage and database', async () => { + mockPrisma.previewProtocol.findUnique.mockResolvedValue({ + id: 'protocol-1', + name: 'Test Protocol', + }); + mockPrisma.asset.findMany.mockResolvedValue([ + { key: 'ut-key-1' }, + { key: 'ut-key-2' }, + ]); + mockPrisma.asset.deleteMany.mockResolvedValue({ count: 2 }); + mockPrisma.previewProtocol.delete.mockResolvedValue({}); + + const request = createPostRequest({ + type: 'abort-preview', + protocolId: 'protocol-1', + }); + + await v1(request); + + expect(mockDeleteAssets).toHaveBeenCalledWith(['ut-key-1', 'ut-key-2']); + expect(mockPrisma.asset.deleteMany).toHaveBeenCalledWith({ + where: { key: { in: ['ut-key-1', 'ut-key-2'] } }, + }); + }); + }); + + describe('error handling', () => { + it('should return 500 and capture exception on unexpected errors', async () => { + mockValidateAndMigrateProtocol.mockResolvedValue({ + success: true, + protocol: validProtocol, + }); + mockPrisma.previewProtocol.findFirst.mockRejectedValue( + new Error('Database connection lost'), + ); + + const request = createPostRequest({ + type: 'initialize-preview', + protocol: validProtocol, + assetMeta: [], + }); + + const response = await v1(request); + const body = (await response.json()) as JsonBody; + + expect(response.status).toBe(500); + expect(body.status).toBe('error'); + expect(body.message).toBe('Failed to process preview request'); + expect(mockCaptureException).toHaveBeenCalled(); + }); + }); +}); diff --git a/app/api/[version]/preview/_handlers/v1/__tests__/helpers.test.ts b/app/api/[version]/preview/_handlers/v1/__tests__/helpers.test.ts new file mode 100644 index 000000000..52554292c --- /dev/null +++ b/app/api/[version]/preview/_handlers/v1/__tests__/helpers.test.ts @@ -0,0 +1,99 @@ +import { NextRequest } from 'next/server'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +const { + mockGetPreviewMode, + mockGetAppSetting, + mockGetServerSession, + mockRequireApiTokenAuth, +} = vi.hoisted(() => ({ + mockGetPreviewMode: vi.fn(), + mockGetAppSetting: vi.fn(), + mockGetServerSession: vi.fn(), + mockRequireApiTokenAuth: vi.fn(), +})); + +vi.mock('~/queries/appSettings', () => ({ + getPreviewMode: mockGetPreviewMode, + getAppSetting: mockGetAppSetting, +})); + +vi.mock('~/lib/auth/guards', () => ({ + getServerSession: mockGetServerSession, +})); + +vi.mock('~/app/api/_helpers/auth', () => ({ + requireApiTokenAuth: mockRequireApiTokenAuth, +})); + +import { checkPreviewAuth } from '../helpers'; + +describe('checkPreviewAuth', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('should return 403 when preview mode is disabled', async () => { + mockGetPreviewMode.mockResolvedValue(false); + + const request = new NextRequest('http://localhost:3000/api/v1/preview'); + const result = await checkPreviewAuth(request); + + expect(result).not.toBeNull(); + expect(result!.status).toBe(403); + expect(result!.response.message).toBe('Preview mode is not enabled'); + }); + + it('should return null when preview mode is enabled and auth not required', async () => { + mockGetPreviewMode.mockResolvedValue(true); + mockGetAppSetting.mockResolvedValue(false); + + const request = new NextRequest('http://localhost:3000/api/v1/preview'); + const result = await checkPreviewAuth(request); + + expect(result).toBeNull(); + }); + + it('should return null when auth required and session exists', async () => { + mockGetPreviewMode.mockResolvedValue(true); + mockGetAppSetting.mockResolvedValue(true); + mockGetServerSession.mockResolvedValue({ user: { id: 'user-1' } }); + + const request = new NextRequest('http://localhost:3000/api/v1/preview'); + const result = await checkPreviewAuth(request); + + expect(result).toBeNull(); + expect(mockRequireApiTokenAuth).not.toHaveBeenCalled(); + }); + + it('should fall back to token auth when no session exists', async () => { + mockGetPreviewMode.mockResolvedValue(true); + mockGetAppSetting.mockResolvedValue(true); + mockGetServerSession.mockResolvedValue(null); + mockRequireApiTokenAuth.mockResolvedValue({ valid: true }); + + const request = new NextRequest('http://localhost:3000/api/v1/preview', { + headers: { Authorization: 'Bearer valid-token' }, + }); + const result = await checkPreviewAuth(request); + + expect(result).toBeNull(); + expect(mockRequireApiTokenAuth).toHaveBeenCalledWith(request); + }); + + it('should return 401 when no session and token auth fails', async () => { + mockGetPreviewMode.mockResolvedValue(true); + mockGetAppSetting.mockResolvedValue(true); + mockGetServerSession.mockResolvedValue(null); + mockRequireApiTokenAuth.mockResolvedValue({ + error: new Response(null, { status: 401 }), + }); + + const request = new NextRequest('http://localhost:3000/api/v1/preview'); + const result = await checkPreviewAuth(request); + + expect(result).not.toBeNull(); + expect(result!.status).toBe(401); + expect(result!.response.message).toContain('Authentication required'); + }); +}); diff --git a/app/api/[version]/preview/_handlers/v1/handler.ts b/app/api/[version]/preview/_handlers/v1/handler.ts new file mode 100644 index 000000000..3daae811e --- /dev/null +++ b/app/api/[version]/preview/_handlers/v1/handler.ts @@ -0,0 +1,351 @@ +import { Effect } from 'effect'; +import { after, type NextRequest } from 'next/server'; +import { hash } from 'ohash'; +import { addEvent } from '~/actions/activityFeed'; +import { prunePreviewProtocols } from '~/actions/preview-protocol-pruning'; +import { env } from '~/env'; +import { prisma } from '~/lib/db'; +import { Prisma } from '~/lib/db/generated/client'; +import { captureException, shutdownPostHog } from '~/lib/posthog-server'; +import { validateAndMigrateProtocol } from '~/lib/protocol/validateAndMigrateProtocol'; +import { getStorageLayer } from '~/lib/storage/layers/StorageLayer'; +import { + AssetStorage, + type PresignedUploadUrl, +} from '~/lib/storage/services/AssetStorage'; +import { + generatePresignedUploadUrl, + registerUploadWithUploadThing, +} from '~/lib/uploadthing/presigned'; +import { parseUploadThingToken } from '~/lib/uploadthing/token'; +import { getExistingAssets } from '~/queries/protocols'; +import { getStorageProvider } from '~/queries/storageProvider'; +import { ensureError } from '~/utils/ensureError'; +import { getBaseUrl } from '~/utils/getBaseUrl'; +import { extractApikeyAssetsFromManifest } from '~/utils/protocolImport'; +import { checkPreviewAuth, jsonResponse } from './helpers'; +import { + type AbortResponse, + type CompleteResponse, + type InitializeResponse, + type PresignedUrlWithAssetId, + type PreviewRequest, + type ReadyResponse, + type RejectedResponse, +} from './types'; + +export async function v1(request: NextRequest) { + const authError = await checkPreviewAuth(request); + + if (authError) { + return jsonResponse(authError.response, authError.status); + } + + const REJECTED_RESPONSE: RejectedResponse = { + status: 'rejected', + message: 'Invalid protocol', + }; + + try { + const body = (await request.json()) as PreviewRequest; + const { type } = body; + + switch (type) { + case 'initialize-preview': { + const { protocol: protocolJson, assetMeta } = body; + + // Validate and migrate protocol + const validationResult = await validateAndMigrateProtocol(protocolJson); + if (!validationResult.success) { + return jsonResponse(REJECTED_RESPONSE, 400); + } + + const protocolToValidate = validationResult.protocol; + + // Calculate protocol hash + const protocolHash = hash(protocolJson); + + // Prune existing preview protocols based on age limit + // - Pending protocols (abandoned uploads) are deleted after 15 minutes + // - Completed protocols are deleted after 24 hours + // Ensures that we dont accumulate old preview protocols + await prunePreviewProtocols(); + + // Check if this exact preview protocol already exists + const existingPreview = await prisma.previewProtocol.findFirst({ + where: { + hash: protocolHash, + }, + }); + + // If protocol exists, return ready immediately + if (existingPreview) { + const url = new URL(env.PUBLIC_URL ?? request.nextUrl.clone()); + url.pathname = `/preview/${existingPreview.id}`; + + const response: ReadyResponse = { + status: 'ready', + previewUrl: url.toString(), + }; + return jsonResponse(response); + } + const assetManifest = protocolToValidate.assetManifest ?? {}; + + // Extract apikey assets from the manifest — they store a value + const apikeyAssets = extractApikeyAssetsFromManifest(assetManifest); + + const allAssetIds = [ + ...assetMeta.map((a) => a.assetId), + ...apikeyAssets.map((a) => a.assetId), + ]; + const existingDbAssets = await getExistingAssets(allAssetIds); + const existingAssetIdSet = new Set( + existingDbAssets.map((a) => a.assetId), + ); + + const existingAssetIds = allAssetIds.filter((id) => + existingAssetIdSet.has(id), + ); + const newApikeyAssets = apikeyAssets.filter( + (a) => !existingAssetIdSet.has(a.assetId), + ); + const newAssets = assetMeta.filter( + (a) => !existingAssetIdSet.has(a.assetId), + ); + + // Both storage providers pre-compute a presigned upload URL per + // asset. The client (Architect) PUTs directly to the storage + // provider, bypassing our own server — this keeps us off the + // Netlify 4.5MB function-payload cap that made the old proxy model + // unusable for large videos. + const provider = + newAssets.length > 0 ? await getStorageProvider() : null; + + let presigned: PresignedUploadUrl[] = []; + + if (provider === 's3') { + const storageLayer = await getStorageLayer(); + presigned = await Effect.gen(function* () { + const assetStorage = yield* AssetStorage; + return yield* assetStorage.generatePresignedUploadUrls( + newAssets.map((a) => ({ name: a.name, size: a.size })), + ); + }).pipe(Effect.provide(storageLayer), Effect.runPromise); + } else if (provider === 'uploadthing') { + const tokenData = await parseUploadThingToken(); + if (!tokenData) { + const errorResponse: InitializeResponse = { + status: 'error', + message: 'UploadThing is not configured', + }; + return jsonResponse(errorResponse, 500); + } + + presigned = newAssets.map((asset) => + generatePresignedUploadUrl({ + fileName: asset.name, + fileSize: asset.size, + tokenData, + }), + ); + + // Required for UploadThing to accept CORS preflight on the PUT + // from external clients. No-op for the actual storage result. + await registerUploadWithUploadThing({ + fileKeys: presigned.map((p) => p.fileKey), + tokenData, + callbackUrl: `${getBaseUrl()}/api/uploadthing`, + }); + } + + const assetsToCreate = presigned.map((entry, i) => { + const asset = newAssets[i]!; + const manifestEntry = assetManifest[asset.assetId]; + return { + assetId: asset.assetId, + key: entry.fileKey, + name: asset.name, + type: manifestEntry?.type ?? 'file', + url: entry.publicUrl, + size: asset.size, + }; + }); + + const protocol = await prisma.previewProtocol.create({ + data: { + hash: protocolHash, + name: `preview-${Date.now()}`, + schemaVersion: protocolToValidate.schemaVersion, + description: protocolToValidate.description, + lastModified: protocolToValidate.lastModified + ? new Date(protocolToValidate.lastModified) + : new Date(), + stages: protocolToValidate.stages, + codebook: protocolToValidate.codebook, + experiments: protocolToValidate.experiments ?? Prisma.JsonNull, + isPending: newAssets.length > 0, + assets: { + create: [...assetsToCreate, ...newApikeyAssets], + connect: existingAssetIds.map((assetId) => ({ assetId })), + }, + }, + }); + + void addEvent('Preview Mode', `Preview protocol upload initiated`); + + if (newAssets.length === 0) { + const url = new URL(env.PUBLIC_URL ?? request.nextUrl.clone()); + url.pathname = `/preview/${protocol.id}`; + + const response: InitializeResponse = { + status: 'ready', + previewUrl: url.toString(), + }; + return jsonResponse(response); + } + + // UploadThing's ingest endpoint only accepts multipart/form-data; + // S3 presigned PUTs take the file as the raw request body. + const bodyFormat: 'raw' | 'formdata' = + provider === 'uploadthing' ? 'formdata' : 'raw'; + + const presignedUrls: PresignedUrlWithAssetId[] = newAssets.map( + (asset, i) => ({ + assetId: asset.assetId, + url: presigned[i]!.uploadUrl, + headers: {}, + bodyFormat, + }), + ); + + const response: InitializeResponse = { + status: 'job-created', + protocolId: protocol.id, + presignedUrls, + }; + return jsonResponse(response); + } + + case 'complete-preview': { + const { protocolId } = body; + + // Find the preview protocol + const protocol = await prisma.previewProtocol.findUnique({ + where: { id: protocolId }, + }); + + if (!protocol) { + const response: CompleteResponse = { + status: 'error', + message: 'Preview job not found', + }; + return jsonResponse(response, 404); + } + + // Update timestamp and clear pending flag to mark completion + await prisma.previewProtocol.update({ + where: { id: protocol.id }, + data: { importedAt: new Date(), isPending: false }, + }); + + void addEvent('Preview Mode', `Preview protocol upload completed`); + + const url = new URL(env.PUBLIC_URL ?? request.nextUrl.clone()); + url.pathname = `/preview/${protocol.id}`; + + const response: CompleteResponse = { + status: 'ready', + previewUrl: url.toString(), + }; + return jsonResponse(response); + } + + case 'abort-preview': { + const { protocolId } = body; + + // Find the preview protocol + const protocol = await prisma.previewProtocol.findUnique({ + where: { id: protocolId }, + }); + + if (!protocol) { + const response: AbortResponse = { + status: 'error', + message: 'Preview job not found', + }; + return jsonResponse(response, 404); + } + + // Find assets that are ONLY associated with this preview protocol + // (not shared with any regular protocols or other preview protocols) + const assetsToDelete = await prisma.asset.findMany({ + where: { + AND: [ + { previewProtocols: { some: { id: protocolId } } }, + { previewProtocols: { every: { id: protocolId } } }, + { protocols: { none: {} } }, + ], + }, + select: { key: true }, + }); + + // Delete assets from UploadThing (best effort) + if (assetsToDelete.length > 0) { + try { + const storageLayer = await getStorageLayer(); + await Effect.gen(function* () { + const assetStorage = yield* AssetStorage; + yield* assetStorage.deleteAssets( + assetsToDelete.map((a) => a.key), + ); + }).pipe(Effect.provide(storageLayer), Effect.runPromise); + } catch (error) { + // eslint-disable-next-line no-console + console.error('Error deleting preview protocol assets:', error); + } + } + + // Delete assets from database + if (assetsToDelete.length > 0) { + await prisma.asset.deleteMany({ + where: { + key: { + in: assetsToDelete.map((a) => a.key), + }, + }, + }); + } + + // Delete the preview protocol + await prisma.previewProtocol.delete({ + where: { id: protocolId }, + }); + + void addEvent( + 'Protocol Uninstalled', + `Preview protocol "${protocol.name}" was aborted and removed`, + ); + + const response: AbortResponse = { + status: 'removed', + protocolId: protocolId, + }; + return jsonResponse(response); + } + } + } catch (e) { + const error = ensureError(e); + await captureException(error); + after(async () => { + await shutdownPostHog(); + }); + + return jsonResponse( + { + status: 'error', + message: 'Failed to process preview request', + }, + 500, + ); + } +} diff --git a/app/api/[version]/preview/_handlers/v1/helpers.ts b/app/api/[version]/preview/_handlers/v1/helpers.ts new file mode 100644 index 000000000..6378dec4d --- /dev/null +++ b/app/api/[version]/preview/_handlers/v1/helpers.ts @@ -0,0 +1,52 @@ +import { type NextRequest, NextResponse } from 'next/server'; +import { requireApiTokenAuth } from '~/app/api/_helpers/auth'; +import { getServerSession } from '~/lib/auth/guards'; +import { getAppSetting, getPreviewMode } from '~/queries/appSettings'; +import type { AuthError, PreviewResponse } from './types'; + +export const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'POST, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type, Authorization', +}; + +export function jsonResponse(data: PreviewResponse, status = 200) { + return NextResponse.json(data, { status, headers: corsHeaders }); +} + +export async function checkPreviewAuth( + req: NextRequest, +): Promise { + const previewMode = await getPreviewMode(); + if (!previewMode) { + return { + response: { + status: 'error', + message: 'Preview mode is not enabled', + }, + status: 403, + }; + } + + const requireAuth = await getAppSetting('previewModeRequireAuth'); + + if (requireAuth) { + const session = await getServerSession(); + + if (!session) { + const result = await requireApiTokenAuth(req); + + if ('error' in result) { + return { + response: { + status: 'error', + message: 'Authentication required. Provide session or API token.', + }, + status: 401, + }; + } + } + } + + return null; +} diff --git a/app/api/[version]/preview/_handlers/v1/types.ts b/app/api/[version]/preview/_handlers/v1/types.ts new file mode 100644 index 000000000..8490e8277 --- /dev/null +++ b/app/api/[version]/preview/_handlers/v1/types.ts @@ -0,0 +1,97 @@ +/* + Types that cover the preview message exchange + Lives here and in Architect (/architect-vite/src/utils/preview/types.ts). + This must be kept in sync and updated in both places. + TODO: Move to shared package when in the monorepo +*/ + +import { type VersionedProtocol } from '@codaco/protocol-validation'; + +// REQUEST TYPES +type AssetMetadata = { + assetId: string; + name: string; + size: number; +}; + +type InitializePreviewRequest = { + type: 'initialize-preview'; + protocol: VersionedProtocol; + assetMeta: AssetMetadata[]; +}; + +type CompletePreviewRequest = { + type: 'complete-preview'; + protocolId: string; +}; + +type AbortPreviewRequest = { + type: 'abort-preview'; + protocolId: string; +}; + +export type PreviewRequest = + | InitializePreviewRequest + | CompletePreviewRequest + | AbortPreviewRequest; + +// RESPONSE TYPES + +export type PresignedUrlWithAssetId = { + assetId: string; + url: string; + /** Headers the client must include on the upload PUT request. */ + headers: Record; + /** + * How to wrap the file bytes when PUTting to `url`: + * - `raw`: PUT the file as the request body (S3 presigned URLs). + * - `formdata`: wrap the file in a `FormData` with field name `file` + * (UploadThing ingest endpoint requires multipart/form-data). + */ + bodyFormat: 'raw' | 'formdata'; +}; + +type JobCreatedResponse = { + status: 'job-created'; + protocolId: string; + presignedUrls: PresignedUrlWithAssetId[]; +}; + +// No assets to upload +export type ReadyResponse = { + status: 'ready'; + previewUrl: string; +}; + +export type RejectedResponse = { + status: 'rejected'; + message: 'Invalid protocol'; +}; + +export type ErrorResponse = { + status: 'error'; + message: string; +}; + +type RemovedResponse = { + status: 'removed'; + protocolId: string; +}; + +export type InitializeResponse = + | JobCreatedResponse + | RejectedResponse + | ErrorResponse + | ReadyResponse; +export type CompleteResponse = ReadyResponse | ErrorResponse; +export type AbortResponse = RemovedResponse | ErrorResponse; + +export type PreviewResponse = + | InitializeResponse + | CompleteResponse + | AbortResponse; + +export type AuthError = { + response: ErrorResponse; + status: number; +}; diff --git a/app/api/[version]/preview/route.ts b/app/api/[version]/preview/route.ts new file mode 100644 index 000000000..99013ce6f --- /dev/null +++ b/app/api/[version]/preview/route.ts @@ -0,0 +1,17 @@ +import { NextResponse } from 'next/server'; +import { createVersionedHandler } from '~/app/api/_helpers/versioning'; +import { v1 } from './_handlers/v1/handler'; +import { corsHeaders } from './_handlers/v1/helpers'; + +export function OPTIONS() { + return new NextResponse(null, { + status: 204, + headers: corsHeaders, + }); +} + +const handlers = { + v1: { POST: v1 }, +}; + +export const POST = createVersionedHandler(handlers, 'POST'); diff --git a/app/api/_helpers/__tests__/auth.test.ts b/app/api/_helpers/__tests__/auth.test.ts new file mode 100644 index 000000000..a8d0f297f --- /dev/null +++ b/app/api/_helpers/__tests__/auth.test.ts @@ -0,0 +1,83 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +const { mockVerifyApiToken } = vi.hoisted(() => ({ + mockVerifyApiToken: vi.fn(), +})); + +vi.mock('~/actions/apiTokens', () => ({ + verifyApiToken: mockVerifyApiToken, +})); + +import { createCorsHeaders, requireApiTokenAuth } from '../auth'; + +describe('API auth helpers', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe('createCorsHeaders', () => { + it('should return headers with specified methods', () => { + const headers = createCorsHeaders('GET, POST'); + + expect(headers['Access-Control-Allow-Origin']).toBe('*'); + expect(headers['Access-Control-Allow-Methods']).toBe('GET, POST'); + expect(headers['Access-Control-Allow-Headers']).toBe( + 'Content-Type, Authorization', + ); + }); + }); + + describe('requireApiTokenAuth', () => { + it('should return error when no authorization header is present', async () => { + const request = new NextRequest('http://localhost:3000/api/v1/test'); + + const result = await requireApiTokenAuth(request); + + expect('error' in result).toBe(true); + if ('error' in result) { + expect(result.error).toBeInstanceOf(NextResponse); + const body = (await result.error.json()) as { error: string }; + expect(body.error).toContain('Authentication required'); + } + }); + + it('should return error when token is invalid', async () => { + mockVerifyApiToken.mockResolvedValue({ valid: false }); + + const request = new NextRequest('http://localhost:3000/api/v1/test', { + headers: { Authorization: 'Bearer invalid-token' }, + }); + + const result = await requireApiTokenAuth(request); + + expect('error' in result).toBe(true); + expect(mockVerifyApiToken).toHaveBeenCalledWith('invalid-token'); + }); + + it('should return valid when token is valid', async () => { + mockVerifyApiToken.mockResolvedValue({ valid: true }); + + const request = new NextRequest('http://localhost:3000/api/v1/test', { + headers: { Authorization: 'Bearer valid-token' }, + }); + + const result = await requireApiTokenAuth(request); + + expect(result).toEqual({ valid: true }); + expect(mockVerifyApiToken).toHaveBeenCalledWith('valid-token'); + }); + + it('should extract token from Bearer prefix', async () => { + mockVerifyApiToken.mockResolvedValue({ valid: true }); + + const request = new NextRequest('http://localhost:3000/api/v1/test', { + headers: { Authorization: 'Bearer my-secret-token' }, + }); + + await requireApiTokenAuth(request); + + expect(mockVerifyApiToken).toHaveBeenCalledWith('my-secret-token'); + }); + }); +}); diff --git a/app/api/_helpers/__tests__/versioning.test.ts b/app/api/_helpers/__tests__/versioning.test.ts new file mode 100644 index 000000000..f45a771d0 --- /dev/null +++ b/app/api/_helpers/__tests__/versioning.test.ts @@ -0,0 +1,64 @@ +import { NextRequest } from 'next/server'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { createVersionedHandler } from '../versioning'; + +describe('createVersionedHandler', () => { + const mockV1Handler = vi.fn(); + const mockV2Handler = vi.fn(); + + const handlers = { + v1: { GET: mockV1Handler, POST: mockV1Handler }, + v2: { GET: mockV2Handler }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + mockV1Handler.mockResolvedValue(Response.json({ ok: true })); + mockV2Handler.mockResolvedValue(Response.json({ ok: true })); + }); + + it('should route to the correct version handler', async () => { + const handler = createVersionedHandler(handlers, 'GET'); + const request = new NextRequest('http://localhost:3000/api/v1/test'); + + await handler(request, { params: Promise.resolve({ version: 'v1' }) }); + + expect(mockV1Handler).toHaveBeenCalledWith(request); + }); + + it('should return 404 for unsupported versions', async () => { + const handler = createVersionedHandler(handlers, 'GET'); + const request = new NextRequest('http://localhost:3000/api/v99/test'); + + const response = await handler(request, { + params: Promise.resolve({ version: 'v99' }), + }); + + expect(response.status).toBe(404); + const body = (await response.json()) as { error: string }; + expect(body.error).toContain('Unsupported API version'); + }); + + it('should return 405 for unsupported methods', async () => { + const handler = createVersionedHandler(handlers, 'DELETE'); + const request = new NextRequest('http://localhost:3000/api/v1/test'); + + const response = await handler(request, { + params: Promise.resolve({ version: 'v1' }), + }); + + expect(response.status).toBe(405); + const body = (await response.json()) as { error: string }; + expect(body.error).toContain('DELETE not supported'); + }); + + it('should support multiple versions', async () => { + const handler = createVersionedHandler(handlers, 'GET'); + const request = new NextRequest('http://localhost:3000/api/v2/test'); + + await handler(request, { params: Promise.resolve({ version: 'v2' }) }); + + expect(mockV2Handler).toHaveBeenCalledWith(request); + expect(mockV1Handler).not.toHaveBeenCalled(); + }); +}); diff --git a/app/api/_helpers/auth.ts b/app/api/_helpers/auth.ts new file mode 100644 index 000000000..93b8b95ee --- /dev/null +++ b/app/api/_helpers/auth.ts @@ -0,0 +1,36 @@ +import { type NextRequest, NextResponse } from 'next/server'; +import { verifyApiToken } from '~/actions/apiTokens'; + +export function createCorsHeaders(methods: string) { + return { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': methods, + 'Access-Control-Allow-Headers': 'Content-Type, Authorization', + }; +} + +export async function requireApiTokenAuth( + req: NextRequest, +): Promise<{ valid: true } | { error: NextResponse }> { + const authHeader = req.headers.get('authorization'); + const token = authHeader?.replace('Bearer ', ''); + + if (!token) { + return { + error: NextResponse.json( + { error: 'Authentication required. Provide a Bearer token.' }, + { status: 401 }, + ), + }; + } + + const { valid } = await verifyApiToken(token); + + if (!valid) { + return { + error: NextResponse.json({ error: 'Invalid API token' }, { status: 401 }), + }; + } + + return { valid: true }; +} diff --git a/app/api/_helpers/versioning.ts b/app/api/_helpers/versioning.ts new file mode 100644 index 000000000..1dc54e501 --- /dev/null +++ b/app/api/_helpers/versioning.ts @@ -0,0 +1,34 @@ +import { type HTTP_METHOD } from 'next/dist/server/web/http'; +import { type NextRequest } from 'next/server'; + +type Handler = (request: NextRequest) => Response | Promise; + +export function createVersionedHandler( + handlers: Record>, + method: HTTP_METHOD, +) { + return async ( + request: NextRequest, + { params }: { params: Promise<{ version: string }> }, + ) => { + const { version } = await params; + + const versionHandlers = handlers[version]; + if (!versionHandlers) { + return Response.json( + { error: `Unsupported API version: ${version}` }, + { status: 404 }, + ); + } + + const handler = versionHandlers[method]; + if (!handler) { + return Response.json( + { error: `${method} not supported in ${version}` }, + { status: 405 }, + ); + } + + return handler(request); + }; +} diff --git a/app/api/analytics/route.ts b/app/api/analytics/route.ts deleted file mode 100644 index ba6cf4052..000000000 --- a/app/api/analytics/route.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { createRouteHandler } from '@codaco/analytics'; -import { type NextRequest } from 'next/server'; -import { getDisableAnalytics, getInstallationId } from '~/queries/appSettings'; - -const routeHandler = async (request: NextRequest) => { - const installationId = await getInstallationId(); - const disableAnalytics = await getDisableAnalytics(); - - return createRouteHandler({ - installationId: installationId ?? 'Unknown Installation ID', - disableAnalytics, - })(request); -}; - -export { routeHandler as POST }; diff --git a/app/api/export-interviews/route.ts b/app/api/export-interviews/route.ts new file mode 100644 index 000000000..45d886bd5 --- /dev/null +++ b/app/api/export-interviews/route.ts @@ -0,0 +1,106 @@ +import { Effect, Queue, Stream } from 'effect'; +import { addEvent } from '~/actions/activityFeed'; +import { requireApiAuth } from '~/lib/auth/guards'; +import { safeRevalidateTag } from '~/lib/cache'; +import { type ExportEvent, formatSSE } from '~/lib/export/exportEvents'; +import { exportPipeline } from '~/lib/export/pipeline'; +import { + captureEvent, + captureException, + shutdownPostHog, +} from '~/lib/posthog-server'; +import { getStorageLayer } from '~/lib/storage/layers/StorageLayer'; +import { exportInterviewsSchema } from '~/schemas/export'; + +export async function POST(request: Request) { + let username: string; + try { + const session = await requireApiAuth(); + username = session.user.username; + } catch { + return new Response(JSON.stringify({ error: 'Unauthorized' }), { + status: 401, + }); + } + + let body: unknown; + try { + body = await request.json(); + } catch { + return new Response(JSON.stringify({ error: 'Invalid JSON body' }), { + status: 400, + }); + } + + const parsed = exportInterviewsSchema.safeParse(body); + + if (!parsed.success) { + return new Response(JSON.stringify({ error: 'Invalid request body' }), { + status: 400, + }); + } + + const { interviewIds, exportOptions } = parsed.data; + + const storageLayer = await getStorageLayer(); + + const program = Effect.gen(function* () { + const queue = yield* Queue.unbounded(); + + yield* exportPipeline(interviewIds, exportOptions, queue).pipe( + Effect.tap((result) => + Effect.sync(() => { + safeRevalidateTag(['getInterviews', 'activityFeed']); + void addEvent( + 'Data Exported', + `${username} exported data for ${String(interviewIds.length)} interview(s)`, + ); + void captureEvent('Data Exported', { + interviewCount: interviewIds.length, + }).then(() => shutdownPostHog()); + }).pipe( + Effect.andThen( + Queue.offer(queue, { + type: 'complete', + zipUrl: result.zipUrl ?? '', + zipKey: result.zipKey ?? '', + }), + ), + ), + ), + Effect.tapError((error) => + Effect.sync(() => { + void captureException(error).then(() => shutdownPostHog()); + }).pipe( + Effect.andThen( + Queue.offer(queue, { + type: 'error', + message: error.userMessage, + }), + ), + ), + ), + Effect.catchAll(() => Effect.void), + Effect.ensuring(Queue.shutdown(queue)), + Effect.provide(storageLayer), + Effect.forkDaemon, + ); + + const encoder = new TextEncoder(); + const sseStream = Stream.fromQueue(queue).pipe( + Stream.map((event) => encoder.encode(formatSSE(event))), + ); + + return Stream.toReadableStream(sseStream); + }); + + const readableStream = await Effect.runPromise(program); + + return new Response(readableStream, { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + }, + }); +} diff --git a/app/api/generate-test-interviews/route.ts b/app/api/generate-test-interviews/route.ts new file mode 100644 index 000000000..242d49549 --- /dev/null +++ b/app/api/generate-test-interviews/route.ts @@ -0,0 +1,181 @@ +import { createId } from '@paralleldrive/cuid2'; +import { addEvent } from '~/actions/activityFeed'; +import { requireApiAuth } from '~/lib/auth/guards'; +import { prisma } from '~/lib/db'; +import { generateNetwork } from '~/lib/synthetic-interviews/generateNetwork'; +import { generateSyntheticInterviewsSchema } from '~/schemas/synthetic-interviews'; + +export async function POST(request: Request) { + try { + await requireApiAuth(); + } catch { + return new Response(JSON.stringify({ error: 'Unauthorized' }), { + status: 401, + }); + } + + let body: unknown; + try { + body = await request.json(); + } catch { + return new Response(JSON.stringify({ error: 'Invalid JSON body' }), { + status: 400, + }); + } + + const parsed = generateSyntheticInterviewsSchema.safeParse(body); + + if (!parsed.success) { + return new Response(JSON.stringify({ error: 'Invalid request body' }), { + status: 400, + }); + } + + const { protocolId, count, simulateDropOut, respectSkipLogicAndFiltering } = + parsed.data; + + const protocol = await prisma.protocol.findUnique({ + where: { id: protocolId }, + }); + + if (!protocol) { + return new Response(JSON.stringify({ error: 'Protocol not found' }), { + status: 404, + }); + } + + const stream = new ReadableStream({ + async start(controller) { + const encoder = new TextEncoder(); + const send = (data: Record) => { + controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`)); + }; + + try { + const stages = protocol.stages as { id: string }[]; + const typedStages = stages as Parameters[1]; + const typedCodebook = protocol.codebook as Parameters< + typeof generateNetwork + >[0]; + + const genOptions = { simulateDropOut, respectSkipLogicAndFiltering }; + + let completedCount = 0; + const incompleteInterviewIds: string[] = []; + + for (let i = 0; i < count; i++) { + const { network, stageMetadata, currentStep, droppedOut } = + generateNetwork(typedCodebook, typedStages, undefined, genOptions); + + const isCompleted = !droppedOut; + if (isCompleted) { + completedCount++; + } + + const participantIdentifier = `test-${createId()}`; + const startTime = new Date( + Date.now() - Math.floor(Math.random() * 3600000), + ); + const finishTime = isCompleted + ? new Date( + startTime.getTime() + + Math.floor(Math.random() * 1800000) + + 300000, + ) + : null; + + const created = await prisma.interview.create({ + data: { + network: network as object, + currentStep, + startTime, + finishTime, + isSynthetic: true, + stageMetadata: stageMetadata as object | undefined, + participant: { + create: { + identifier: participantIdentifier, + label: participantIdentifier, + isSynthetic: true, + }, + }, + protocol: { + connect: { id: protocolId }, + }, + }, + }); + + if (!isCompleted) { + incompleteInterviewIds.push(created.id); + } + + send({ type: 'progress', current: i + 1, total: count }); + } + + // Enforce 10% minimum completion when drop-out is enabled. + // Regenerate incomplete interviews from this batch with drop-out + // disabled and update them in-place. + if (simulateDropOut) { + const minCompleted = Math.max(1, Math.ceil(count * 0.1)); + + if (completedCount < minCompleted) { + const deficit = minCompleted - completedCount; + const toFix = incompleteInterviewIds.slice(0, deficit); + + const incompleteInterviews = await prisma.interview.findMany({ + where: { id: { in: toFix } }, + select: { id: true, startTime: true }, + }); + + for (const interview of incompleteInterviews) { + const { network, stageMetadata, currentStep } = generateNetwork( + typedCodebook, + typedStages, + undefined, + { + ...genOptions, + simulateDropOut: false, + }, + ); + + await prisma.interview.update({ + where: { id: interview.id }, + data: { + network: network as object, + currentStep, + stageMetadata: stageMetadata as object | undefined, + finishTime: new Date( + interview.startTime.getTime() + + Math.floor(Math.random() * 1800000) + + 300000, + ), + }, + }); + } + } + } + + void addEvent( + 'Synthetic Data Generated', + `Generated ${String(count)} synthetic interviews for protocol "${protocol.name}"`, + ); + + send({ type: 'complete', created: count }); + } catch (error) { + const message = + error instanceof Error ? error.message : 'Unknown error'; + send({ type: 'error', message }); + } finally { + controller.close(); + } + }, + }); + + return new Response(stream, { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + }, + }); +} diff --git a/app/api/health/route.ts b/app/api/health/route.ts new file mode 100644 index 000000000..bd6b6aec9 --- /dev/null +++ b/app/api/health/route.ts @@ -0,0 +1,134 @@ +import { type NextRequest, NextResponse } from 'next/server'; + +import { env } from '~/env.js'; + +type HealthStatus = 'healthy' | 'degraded' | 'unhealthy'; + +type HealthCheck = { + name: string; + status: HealthStatus; + duration: number; + error?: string; + details?: Record; +}; + +type HealthResponse = { + status: HealthStatus; + timestamp: string; + uptime: number; + version?: string; + checks: HealthCheck[]; +}; + +function checkBasicHealth(): HealthCheck { + const start = performance.now(); + + try { + // Basic health check - just verify the service is running + const nodeVersion = process.version; + const duration = performance.now() - start; + + return { + name: 'basic', + status: 'healthy', + duration: Math.round(duration), + details: { + nodeVersion, + environment: env.NODE_ENV, + uptime: Math.round(process.uptime()), + }, + }; + } catch (error) { + const duration = performance.now() - start; + + return { + name: 'basic', + status: 'unhealthy', + duration: Math.round(duration), + error: + error instanceof Error ? error.message : 'Basic health check failed', + }; + } +} + +function getOverallStatus(checks: HealthCheck[]): HealthStatus { + const hasUnhealthy = checks.some((check) => check.status === 'unhealthy'); + const hasDegraded = checks.some((check) => check.status === 'degraded'); + + if (hasUnhealthy) return 'unhealthy'; + if (hasDegraded) return 'degraded'; + return 'healthy'; +} + +function getStatusCode(status: HealthStatus): number { + switch (status) { + case 'healthy': + return 200; + case 'degraded': + return 200; // Still operational + case 'unhealthy': + return 503; // Service Unavailable + } +} + +export function GET(_request: NextRequest): NextResponse { + const startTime = performance.now(); + + try { + // Run health checks + const basicCheck = checkBasicHealth(); + const checks = [basicCheck]; + + const overallStatus = getOverallStatus(checks); + const statusCode = getStatusCode(overallStatus); + + const response: HealthResponse = { + status: overallStatus, + timestamp: new Date().toISOString(), + uptime: Math.round(process.uptime()), + version: env.APP_VERSION ?? 'unknown', + checks, + }; + + const totalDuration = Math.round(performance.now() - startTime); + + return NextResponse.json( + { + ...response, + duration: totalDuration, + }, + { + status: statusCode, + headers: { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-cache, no-store, must-revalidate', + 'X-Health-Check': 'true', + }, + }, + ); + } catch (error) { + // Fallback error response + const response: HealthResponse = { + status: 'unhealthy', + timestamp: new Date().toISOString(), + uptime: Math.round(process.uptime()), + checks: [ + { + name: 'health_check', + status: 'unhealthy', + duration: Math.round(performance.now() - startTime), + error: error instanceof Error ? error.message : 'Health check failed', + }, + ], + }; + + return NextResponse.json(response, { + status: 503, + headers: { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-cache, no-store, must-revalidate', + 'X-Health-Check': 'true', + }, + }); + } +} diff --git a/app/api/interviews/[interviewId]/finish/route.ts b/app/api/interviews/[interviewId]/finish/route.ts new file mode 100644 index 000000000..5b0560a61 --- /dev/null +++ b/app/api/interviews/[interviewId]/finish/route.ts @@ -0,0 +1,54 @@ +import { cookies } from 'next/headers'; +import { after, NextResponse } from 'next/server'; +import { addEvent } from '~/actions/activityFeed'; +import { safeRevalidateTag } from '~/lib/cache'; +import { prisma } from '~/lib/db'; +import { captureException, shutdownPostHog } from '~/lib/posthog-server'; +import { ensureError } from '~/utils/ensureError'; + +export async function POST( + _request: Request, + { params }: { params: Promise<{ interviewId: string }> }, +) { + const { interviewId } = await params; + + try { + const updatedInterview = await prisma.interview.update({ + where: { id: interviewId }, + data: { finishTime: new Date() }, + include: { participant: true }, + }); + + const { label, identifier } = updatedInterview.participant; + const participantDisplay = label ? `${label} (${identifier})` : identifier; + + const network = updatedInterview.network; + + void addEvent( + 'Interview Completed', + `Participant "${participantDisplay}" completed an interview`, + { + nodeCount: network?.nodes?.length ?? 0, + edgeCount: network?.edges?.length ?? 0, + }, + ); + + (await cookies()).set(updatedInterview.protocolId, 'completed'); + + safeRevalidateTag(['getInterviews', 'summaryStatistics', 'activityFeed']); + + return NextResponse.json({ success: true }); + } catch (e) { + const error = ensureError(e); + + after(async () => { + await captureException(error, { interviewId }); + await shutdownPostHog(); + }); + + return NextResponse.json( + { error: 'Failed to finish interview' }, + { status: 500 }, + ); + } +} diff --git a/app/api/storage/presign/route.ts b/app/api/storage/presign/route.ts new file mode 100644 index 000000000..2434dfc28 --- /dev/null +++ b/app/api/storage/presign/route.ts @@ -0,0 +1,62 @@ +import { Effect } from 'effect'; +import { z } from 'zod'; +import { requireApiAuth } from '~/lib/auth/guards'; +import { getStorageLayer } from '~/lib/storage/layers/StorageLayer'; +import { AssetStorage } from '~/lib/storage/services/AssetStorage'; +import { getStorageProvider } from '~/queries/storageProvider'; + +const requestSchema = z.object({ + files: z.array( + z.object({ + name: z.string(), + size: z.number().positive(), + }), + ), +}); + +export async function POST(request: Request) { + try { + await requireApiAuth(); + } catch { + return Response.json({ error: 'Unauthorized' }, { status: 401 }); + } + + let body: unknown; + try { + body = await request.json(); + } catch { + return Response.json({ error: 'Invalid JSON body' }, { status: 400 }); + } + + const parsed = requestSchema.safeParse(body); + if (!parsed.success) { + return Response.json({ error: 'Invalid request body' }, { status: 400 }); + } + + const provider = await getStorageProvider(); + + // UploadThing's ingest protocol is not a plain presigned-PUT; the client + // must use the UploadThing SDK's uploader directly, which hits + // /api/uploadthing. We only generate presigned URLs for S3. + if (provider === 'uploadthing') { + return Response.json({ provider: 'uploadthing' }); + } + + try { + const storageLayer = await getStorageLayer(); + + const urls = await Effect.gen(function* () { + const assetStorage = yield* AssetStorage; + return yield* assetStorage.generatePresignedUploadUrls(parsed.data.files); + }).pipe(Effect.provide(storageLayer), Effect.runPromise); + + return Response.json({ provider: 's3', urls }); + } catch (error) { + // eslint-disable-next-line no-console + console.error('Failed to generate presigned URLs:', error); + return Response.json( + { error: 'Failed to generate upload URLs' }, + { status: 500 }, + ); + } +} diff --git a/app/api/test/exports/[filename]/route.ts b/app/api/test/exports/[filename]/route.ts new file mode 100644 index 000000000..098d63794 --- /dev/null +++ b/app/api/test/exports/[filename]/route.ts @@ -0,0 +1,37 @@ +import { readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { NextResponse } from 'next/server'; +import { env } from '~/env.js'; +import { LOCAL_EXPORT_DIR } from '~/lib/storage/layers/LocalFileStorage'; + +// Intentionally unauthenticated: this route is only active when +// E2E_TEST is set (test environments only) and returns 404 otherwise. +// Auth is not needed for test-only file serving. +export async function GET( + _request: Request, + { params }: { params: Promise<{ filename: string }> }, +) { + if (!env.E2E_TEST) { + return new NextResponse('Not found', { status: 404 }); + } + + const { filename } = await params; + + if (!/^networkCanvasExport-\d+\.zip$/.test(filename)) { + return new NextResponse('Invalid filename', { status: 400 }); + } + + try { + const filePath = join(LOCAL_EXPORT_DIR, filename); + const fileBuffer = await readFile(filePath); + + return new NextResponse(fileBuffer, { + headers: { + 'Content-Type': 'application/zip', + 'Content-Disposition': `attachment; filename="${filename}"`, + }, + }); + } catch { + return new NextResponse('File not found', { status: 404 }); + } +} diff --git a/app/api/uploadthing/core.ts b/app/api/uploadthing/core.ts index bd9a91a73..f92f0670c 100644 --- a/app/api/uploadthing/core.ts +++ b/app/api/uploadthing/core.ts @@ -1,5 +1,5 @@ import { createUploadthing } from 'uploadthing/next'; -import { getServerSession } from '~/utils/auth'; +import { getServerSession } from '~/lib/auth/guards'; const f = createUploadthing(); diff --git a/app/api/uploadthing/route.ts b/app/api/uploadthing/route.ts index d0f78da24..aa0176667 100644 --- a/app/api/uploadthing/route.ts +++ b/app/api/uploadthing/route.ts @@ -1,21 +1,23 @@ +import { invariant } from 'es-toolkit'; import { type NextRequest } from 'next/server'; import { createRouteHandler } from 'uploadthing/next'; -import { env } from '~/env'; import { getAppSetting } from '~/queries/appSettings'; +import { getBaseUrl } from '~/utils/getBaseUrl'; import { ourFileRouter } from './core'; /** - * Tricky problem here: getAppSetting uses unstable_cache, which can't be - * called at the top level of a route handler, but _can_ be called inside - * a function that is called by the route handler. So we need to wrap the - * route handler in a function that calls getAppSetting. - * - * Better solutions welcome! - * + * getAppSetting uses 'use cache', which can't be called at the top level of + * a route handler. We wrap the route handler in a function that calls + * getAppSetting to work around this limitation. */ const routeHandler = async () => { const uploadThingToken = await getAppSetting('uploadThingToken'); + invariant( + uploadThingToken, + 'UploadThing token is not set. Please set it in the app settings.', + ); + const handler = createRouteHandler({ router: ourFileRouter, config: { @@ -23,8 +25,8 @@ const routeHandler = async () => { // UploadThing attempts to automatically detect this value based on the request URL and headers // However, the automatic detection fails in docker deployments // docs: https://docs.uploadthing.com/api-reference/server#config - callbackUrl: env.PUBLIC_URL && `${env.PUBLIC_URL}/api/uploadthing`, - token: uploadThingToken, + callbackUrl: `${getBaseUrl()}/api/uploadthing`, + token: uploadThingToken ?? undefined, }, }); diff --git a/app/dashboard/_components/ActivityFeed/ActivityFeed.tsx b/app/dashboard/_components/ActivityFeed/ActivityFeed.tsx index 4737a017f..5c46936ec 100644 --- a/app/dashboard/_components/ActivityFeed/ActivityFeed.tsx +++ b/app/dashboard/_components/ActivityFeed/ActivityFeed.tsx @@ -1,17 +1,15 @@ -import { hash } from 'ohash'; import { Suspense } from 'react'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; -import { getActivities } from '~/queries/activityFeed'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; +import { type fetchActivities } from '~/queries/activityFeed'; import ActivityFeedTable from './ActivityFeedTable'; -import { searchParamsCache } from './SearchParams'; -export default function ActivityFeed() { - const searchParams = searchParamsCache.all(); - const activitiesPromise = getActivities(searchParams); +type ActivityFeedProps = { + activitiesPromise: ReturnType; +}; +export default function ActivityFeed({ activitiesPromise }: ActivityFeedProps) { return ( } > diff --git a/app/dashboard/_components/ActivityFeed/ActivityFeedRows.tsx b/app/dashboard/_components/ActivityFeed/ActivityFeedRows.tsx new file mode 100644 index 000000000..f422e9fd7 --- /dev/null +++ b/app/dashboard/_components/ActivityFeed/ActivityFeedRows.tsx @@ -0,0 +1,126 @@ +'use client'; + +import { + getCoreRowModel, + getPaginationRowModel, + getSortedRowModel, + useReactTable, + type ColumnDef, + type ColumnFiltersState, + type PaginationState, + type SortingState, +} from '@tanstack/react-table'; +import { + parseAsArrayOf, + parseAsInteger, + parseAsString, + parseAsStringLiteral, + useQueryStates, +} from 'nuqs'; +import { use, useMemo } from 'react'; +import { DataTable } from '~/components/DataTable/DataTable'; +import { useNuqsTable } from '~/components/DataTable/nuqs/NuqsTableProvider'; +import type { Events } from '~/lib/db/generated/client'; +import type { ActivitiesFeed } from '~/queries/activityFeed'; +import { fetchActivityFeedTableColumnDefs } from './ColumnDefinition'; +import { searchParamsUrlKeys } from './SearchParams'; +import { activityTypes, sortableFields, sortOrder } from './types'; + +export default function ActivityFeedRows({ + activitiesPromise, +}: { + activitiesPromise: ActivitiesFeed; +}) { + // TanStack Table returns a mutable ref with stable identity, defeating React Compiler memoization. + 'use no memo'; + const tableData = use(activitiesPromise); + const { startTransition } = useNuqsTable(); + + const columns = useMemo[]>( + () => fetchActivityFeedTableColumnDefs(), + [], + ); + + // Pagination + sort writes go through the shared transition so the table + // fades during the refetch. Filter keys (q, type) are read-only here — + // they're owned by the toolbar components, and we mirror them into the + // react-table `columnFilters` state below so the column highlight styling + // reflects the active filter. + const [{ page, perPage, sort, sortField, q, type }, setTableState] = + useQueryStates( + { + page: parseAsInteger.withDefault(1), + perPage: parseAsInteger.withDefault(10), + sort: parseAsStringLiteral(sortOrder).withDefault('none'), + sortField: + parseAsStringLiteral(sortableFields).withDefault('timestamp'), + q: parseAsString, + type: parseAsArrayOf(parseAsStringLiteral(activityTypes)), + }, + { + urlKeys: searchParamsUrlKeys, + shallow: false, + clearOnDefault: true, + startTransition, + }, + ); + + const pagination: PaginationState = { + pageIndex: page - 1, + pageSize: perPage, + }; + + const sorting: SortingState = + sort === 'none' ? [] : [{ id: sortField, desc: sort === 'desc' }]; + + // Derived for column highlighting only. The server does the actual + // filtering via `manualFiltering: true`, and the toolbar owns writes to + // these URL params — this local state is feed-only. + const columnFilters = useMemo(() => { + const filters: ColumnFiltersState = []; + if (q) filters.push({ id: 'message', value: q }); + if (type && type.length > 0) filters.push({ id: 'type', value: type }); + return filters; + }, [q, type]); + + const table = useReactTable({ + data: tableData.events, + columns, + pageCount: tableData.pageCount, + state: { pagination, sorting, columnFilters }, + onPaginationChange: (updater) => { + const next = + typeof updater === 'function' ? updater(pagination) : updater; + void setTableState({ + page: next.pageIndex + 1, + perPage: next.pageSize, + }); + }, + onSortingChange: (updater) => { + const next = typeof updater === 'function' ? updater(sorting) : updater; + const first = next[0]; + if (!first) { + void setTableState({ sort: null, sortField: null }); + return; + } + if ( + first.id === 'timestamp' || + first.id === 'type' || + first.id === 'message' + ) { + void setTableState({ + sort: first.desc ? 'desc' : 'asc', + sortField: first.id, + }); + } + }, + getCoreRowModel: getCoreRowModel(), + getPaginationRowModel: getPaginationRowModel(), + getSortedRowModel: getSortedRowModel(), + manualPagination: true, + manualSorting: true, + manualFiltering: true, + }); + + return ; +} diff --git a/app/dashboard/_components/ActivityFeed/ActivityFeedTable.tsx b/app/dashboard/_components/ActivityFeed/ActivityFeedTable.tsx index 6afa21383..d39923660 100644 --- a/app/dashboard/_components/ActivityFeed/ActivityFeedTable.tsx +++ b/app/dashboard/_components/ActivityFeed/ActivityFeedTable.tsx @@ -1,44 +1,54 @@ 'use client'; -import type { Events } from '~/lib/db/generated/client'; -import type { ColumnDef } from '@tanstack/react-table'; -import { use, useMemo } from 'react'; -import { DataTable } from '~/components/data-table/data-table'; -import { useDataTable } from '~/hooks/use-data-table'; -import type { ActivitiesFeed } from '~/queries/activityFeed'; +import { Suspense } from 'react'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; import { - fetchActivityFeedTableColumnDefs, - filterableColumns, - searchableColumns, -} from './ColumnDefinition'; + NuqsTableProvider, + useNuqsTable, +} from '~/components/DataTable/nuqs/NuqsTableProvider'; +import type { ActivitiesFeed } from '~/queries/activityFeed'; +import { cx } from '~/utils/cva'; +import ActivityFeedRows from './ActivityFeedRows'; +import ActivityFeedToolbar from './ActivityFeedToolbar'; +import { ACTIVITY_FEED_PREFIX } from './SearchParams'; export default function ActivityFeedTable({ activitiesPromise, }: { activitiesPromise: ActivitiesFeed; }) { - const tableData = use(activitiesPromise); - - // Memoize the columns so they don't re-render on every render - const columns = useMemo[]>( - () => fetchActivityFeedTableColumnDefs(), - [], + return ( + + + ); +} - const { dataTable } = useDataTable({ - data: tableData.events, - columns, - pageCount: tableData.pageCount, - searchableColumns, - filterableColumns, - }); +function ActivityFeedTableInner({ + activitiesPromise, +}: { + activitiesPromise: ActivitiesFeed; +}) { + const { isPending } = useNuqsTable(); return ( - +
    + + + } + > +
    + +
    +
    +
    ); } diff --git a/app/dashboard/_components/ActivityFeed/ActivityFeedToolbar.tsx b/app/dashboard/_components/ActivityFeed/ActivityFeedToolbar.tsx new file mode 100644 index 000000000..a86e31e61 --- /dev/null +++ b/app/dashboard/_components/ActivityFeed/ActivityFeedToolbar.tsx @@ -0,0 +1,28 @@ +'use client'; + +import NuqsClearFilters from '~/components/DataTable/nuqs/NuqsClearFilters'; +import NuqsFacetedFilter from '~/components/DataTable/nuqs/NuqsFacetedFilter'; +import NuqsSearchFilter from '~/components/DataTable/nuqs/NuqsSearchFilter'; +import { activityTypes } from './types'; + +const clearableFilters = ['q', 'type'] as const; + +export default function ActivityFeedToolbar() { + return ( +
    + + + +
    + ); +} diff --git a/app/dashboard/_components/ActivityFeed/ColumnDefinition.tsx b/app/dashboard/_components/ActivityFeed/ColumnDefinition.tsx index 1a4350beb..37d9e0e39 100644 --- a/app/dashboard/_components/ActivityFeed/ColumnDefinition.tsx +++ b/app/dashboard/_components/ActivityFeed/ColumnDefinition.tsx @@ -1,53 +1,37 @@ 'use client'; -import { type ColumnDef } from '@tanstack/react-table'; +import { type StrictColumnDef } from '~/components/DataTable/types'; +import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; import { Badge } from '~/components/ui/badge'; -import { - type ActivityType, - type DataTableFilterableColumn, - type DataTableSearchableColumn, - type Activity, - activityTypes, -} from '~/lib/data-table/types'; -import type { Events } from '~/lib/db/generated/client'; import TimeAgo from '~/components/ui/TimeAgo'; -import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; +import type { Events } from '~/lib/db/generated/client'; +import { type ActivityType } from './types'; import { getBadgeColorsForActivityType } from './utils'; -export function fetchActivityFeedTableColumnDefs(): ColumnDef< - Events, - unknown ->[] { +export function fetchActivityFeedTableColumnDefs(): StrictColumnDef[] { return [ { accessorKey: 'timestamp', + sortingFn: 'datetime', header: ({ column }) => ( ), cell: ({ row }) => { const timestamp: string = row.getValue('timestamp'); - return ( -
    - -
    - ); + return ; }, }, { accessorKey: 'type', + sortingFn: 'text', header: ({ column }) => ( ), cell: ({ row }) => { const activityType: ActivityType = row.getValue('type'); const color = getBadgeColorsForActivityType(activityType); - return ( -
    - {activityType} -
    - ); + return {activityType}; }, - enableSorting: false, enableHiding: false, }, { @@ -55,33 +39,9 @@ export function fetchActivityFeedTableColumnDefs(): ColumnDef< header: ({ column }) => ( ), - cell: ({ row }) => ( -
    - - {row.original.message} - -
    - ), + cell: ({ row }) => row.original.message, enableSorting: false, enableHiding: false, }, ]; } - -export const filterableColumns: DataTableFilterableColumn[] = [ - { - id: 'type', - title: 'Type', - options: activityTypes.map((status) => ({ - label: status, - value: status, - })), - }, -] as const; - -export const searchableColumns: DataTableSearchableColumn[] = [ - { - id: 'message', - title: 'by activity details', - }, -] as const; diff --git a/app/dashboard/_components/ActivityFeed/SearchParams.ts b/app/dashboard/_components/ActivityFeed/SearchParams.ts index 924413770..d894fdfdc 100644 --- a/app/dashboard/_components/ActivityFeed/SearchParams.ts +++ b/app/dashboard/_components/ActivityFeed/SearchParams.ts @@ -1,20 +1,42 @@ import { - createSearchParamsCache, - parseAsArrayOf, - parseAsInteger, - parseAsJson, - parseAsStringLiteral, + createSearchParamsCache, + parseAsArrayOf, + parseAsInteger, + parseAsString, + parseAsStringLiteral, } from 'nuqs/server'; -import { FilterParam, sortOrder, sortableFields } from '~/lib/data-table/types'; +import { activityTypes, sortableFields, sortOrder } from './types'; -export const searchParamsParsers = { +/** + * URL namespace prefix for this table. Used by the client provider to + * namespace URL params so multiple server-fetched tables can coexist on + * the same page without colliding. + */ +export const ACTIVITY_FEED_PREFIX = 'af'; + +/** + * Logical-name parsers — these are what the rest of the app sees. The + * actual URL keys are prefixed via `urlKeys` so the URL stays + * `?af_q=foo&af_type=...&af_page=2` etc. + */ +const searchParamsParsers = { page: parseAsInteger.withDefault(1), perPage: parseAsInteger.withDefault(10), - sort: parseAsStringLiteral(sortOrder).withDefault('desc'), + sort: parseAsStringLiteral(sortOrder).withDefault('none'), sortField: parseAsStringLiteral(sortableFields).withDefault('timestamp'), - filterParams: parseAsArrayOf( - parseAsJson((value) => FilterParam.parse(value)), - ), + q: parseAsString, + type: parseAsArrayOf(parseAsStringLiteral(activityTypes)), +}; + +export const searchParamsUrlKeys = { + page: `${ACTIVITY_FEED_PREFIX}_page`, + perPage: `${ACTIVITY_FEED_PREFIX}_perPage`, + sort: `${ACTIVITY_FEED_PREFIX}_sort`, + sortField: `${ACTIVITY_FEED_PREFIX}_sortField`, + q: `${ACTIVITY_FEED_PREFIX}_q`, + type: `${ACTIVITY_FEED_PREFIX}_type`, }; -export const searchParamsCache = createSearchParamsCache(searchParamsParsers); \ No newline at end of file +export const searchParamsCache = createSearchParamsCache(searchParamsParsers, { + urlKeys: searchParamsUrlKeys, +}); diff --git a/app/dashboard/_components/ActivityFeed/types.ts b/app/dashboard/_components/ActivityFeed/types.ts new file mode 100644 index 000000000..fa441c675 --- /dev/null +++ b/app/dashboard/_components/ActivityFeed/types.ts @@ -0,0 +1,63 @@ +import { type Prisma } from '~/lib/db/generated/client'; + +export const activityTypes = [ + 'Protocol Installed', + 'Protocol Uninstalled', + 'Participant(s) Added', + 'Participant(s) Removed', + 'Interview Started', + 'Interview Completed', + 'Interview Opened', + 'Interview(s) Deleted', + 'Data Exported', + 'API Token Created', + 'API Token Updated', + 'API Token Deleted', + 'Preview Mode', + 'User Login', + 'User Created', + 'User Deleted', + 'Password Changed', + 'Two-Factor Enabled', + 'Two-Factor Disabled', + 'Two-Factor Reset', + 'Two-Factor Login', + 'Recovery Code Used', + 'Recovery Codes Regenerated', + 'Passkey Registered', + 'Passkey Removed', + 'Passkey Login', + 'Password Removed', + 'Password Set', + 'Auth Reset', + 'Recovery Code Login', + 'Switched to Passkey Mode', + 'Switched to Password Mode', + 'Setting Changed', + 'Synthetic Data Generated', + 'Synthetic Data Deleted', +] as const; + +export type ActivityType = (typeof activityTypes)[number]; + +export type Activity = Prisma.EventsGetPayload<{ + select: { + id: true; + timestamp: true; + type: true; + message: true; + }; +}>; + +export const sortOrder = ['asc', 'desc', 'none'] as const; + +export const sortableFields = ['timestamp', 'type', 'message'] as const; + +export type SearchParams = { + page: number; + perPage: number; + sort: (typeof sortOrder)[number]; + sortField: (typeof sortableFields)[number]; + q: string | null; + type: ActivityType[] | null; +}; diff --git a/app/dashboard/_components/ActivityFeed/useTableStateFromSearchParams.ts b/app/dashboard/_components/ActivityFeed/useTableStateFromSearchParams.ts deleted file mode 100644 index 30a29a400..000000000 --- a/app/dashboard/_components/ActivityFeed/useTableStateFromSearchParams.ts +++ /dev/null @@ -1,31 +0,0 @@ -'use client'; -import { useQueryStates } from 'nuqs'; -import { searchParamsParsers } from './SearchParams'; - -/** - * This hook implements the table state items required by the DataTable. - * - * Ultimately, we could abstract this further, and implement a generic - * useSearchParamsTableState hook so that the way the state is stored is an - * implementation detail. This would allow us to store table state in novel - * ways, such as in localStorage, in the URL, or even in a database. - * - */ -export const useTableStateFromSearchParams = () => { - const [{ page, perPage, sort, sortField, filterParams }, setSearchParams] = - useQueryStates(searchParamsParsers, { - clearOnDefault: true, - shallow: false, - }); - - return { - searchParams: { - page, - perPage, - sort, - sortField, - filterParams, - }, - setSearchParams, - }; -}; \ No newline at end of file diff --git a/app/dashboard/_components/ActivityFeed/utils.ts b/app/dashboard/_components/ActivityFeed/utils.ts index a8213d0ca..94f6170d0 100644 --- a/app/dashboard/_components/ActivityFeed/utils.ts +++ b/app/dashboard/_components/ActivityFeed/utils.ts @@ -1,22 +1,76 @@ -import { type ActivityType } from '~/lib/data-table/types'; +import { type ActivityType } from './types'; export const getBadgeColorsForActivityType = (type: ActivityType) => { - switch (type.toLowerCase()) { - case 'protocol installed': + switch (type) { + case 'Protocol Installed': return 'bg-slate-blue hover:bg-slate-blue-dark'; - case 'protocol uninstalled': + case 'Protocol Uninstalled': return 'bg-neon-carrot hover:bg-neon-carrot-dark'; - case 'participant(s) added': + case 'Participant(s) Added': return 'bg-sea-green hover:bg-sea-green'; - case 'participant(s) removed': + case 'Participant(s) Removed': return 'bg-tomato hover:bg-tomato-dark'; - case 'interview started': + case 'Interview Started': return 'bg-sea-serpent hover:bg-sea-serpent-dark'; - case 'interview completed': + case 'Interview Completed': return 'bg-purple-pizazz hover:bg-purple-pizazz-dark'; - case 'interview(s) deleted': + case 'Interview Opened': + return 'bg-cerulean-blue hover:bg-cerulean-blue-dark'; + case 'Interview(s) Deleted': return 'bg-paradise-pink hover:bg-paradise-pink-dark'; - case 'data exported': + case 'Data Exported': return 'bg-kiwi hover:bg-kiwi-dark'; + case 'API Token Created': + return 'bg-cerulean-blue hover:bg-cerulean-blue-dark'; + case 'API Token Updated': + return 'bg-kiwi hover:bg-kiwi-dark'; + case 'API Token Deleted': + return 'bg-cyber-grape hover:bg-cyber-grape-dark'; + case 'Password Changed': + return 'bg-mustard hover:bg-mustard-dark'; + case 'User Login': + return 'bg-neon-coral hover:bg-neon-coral-dark'; + case 'User Created': + return 'bg-sea-green hover:bg-sea-green-dark'; + case 'Preview Mode': + return 'bg-tomato hover:bg-tomato-dark'; + case 'User Deleted': + return 'bg-charcoal hover:bg-charcoal-dark'; + case 'Two-Factor Enabled': + return 'bg-sea-green hover:bg-sea-green-dark'; + case 'Two-Factor Disabled': + return 'bg-neon-carrot hover:bg-neon-carrot-dark'; + case 'Two-Factor Reset': + return 'bg-mustard hover:bg-mustard-dark'; + case 'Two-Factor Login': + return 'bg-neon-coral hover:bg-neon-coral-dark'; + case 'Recovery Code Used': + return 'bg-purple-pizazz hover:bg-purple-pizazz-dark'; + case 'Recovery Codes Regenerated': + return 'bg-cerulean-blue hover:bg-cerulean-blue-dark'; + case 'Passkey Registered': + return 'bg-sea-green hover:bg-sea-green-dark'; + case 'Passkey Removed': + return 'bg-neon-carrot hover:bg-neon-carrot-dark'; + case 'Passkey Login': + return 'bg-neon-coral hover:bg-neon-coral-dark'; + case 'Password Removed': + return 'bg-mustard hover:bg-mustard-dark'; + case 'Password Set': + return 'bg-sea-green hover:bg-sea-green-dark'; + case 'Auth Reset': + return 'bg-tomato hover:bg-tomato-dark'; + case 'Recovery Code Login': + return 'bg-purple-pizazz hover:bg-purple-pizazz-dark'; + case 'Switched to Passkey Mode': + return 'bg-sea-green hover:bg-sea-green-dark'; + case 'Switched to Password Mode': + return 'bg-mustard hover:bg-mustard-dark'; + case 'Setting Changed': + return 'bg-mustard hover:bg-mustard-dark'; + case 'Synthetic Data Generated': + return 'bg-sea-green hover:bg-sea-green'; + case 'Synthetic Data Deleted': + return 'bg-neon-carrot hover:bg-neon-carrot-dark'; } }; diff --git a/app/dashboard/_components/AnalyticsButton.tsx b/app/dashboard/_components/AnalyticsButton.tsx deleted file mode 100644 index 83898923a..000000000 --- a/app/dashboard/_components/AnalyticsButton.tsx +++ /dev/null @@ -1,42 +0,0 @@ -'use client'; -import { Button } from '~/components/ui/Button'; -import { useToast } from '~/components/ui/use-toast'; -import trackEvent from '~/lib/analytics'; -import { ensureError } from '~/utils/ensureError'; - -const AnalyticsButton = () => { - const { toast } = useToast(); - const sendEvent = () => - trackEvent({ - type: 'ProtocolInstalled', - metadata: { - protocol: 'ethereum', - version: '1.0.0', - }, - }) - .then(() => { - toast({ - title: 'Success', - description: 'Test event sent', - variant: 'success', - }); - }) - .catch((e) => { - const error = ensureError(e); - // eslint-disable-next-line no-console - console.log(error); - toast({ - title: 'Error', - description: 'Sending event failed', - variant: 'destructive', - }); - }); - - return ( - <> - - - ); -}; - -export default AnalyticsButton; diff --git a/app/dashboard/_components/InterviewsTable/ActionsDropdown.tsx b/app/dashboard/_components/InterviewsTable/ActionsDropdown.tsx index 378d6c9b9..ad8884e2e 100644 --- a/app/dashboard/_components/InterviewsTable/ActionsDropdown.tsx +++ b/app/dashboard/_components/InterviewsTable/ActionsDropdown.tsx @@ -1,33 +1,42 @@ 'use client'; -import type { Interview } from '~/lib/db/generated/client'; import type { Row } from '@tanstack/react-table'; -import { MoreHorizontal } from 'lucide-react'; +import { + DeleteIcon, + DoorOpenIcon, + FileIcon, + MoreHorizontal, +} from 'lucide-react'; import Link from 'next/link'; import { hash as objectHash } from 'ohash'; import { useState } from 'react'; import { DeleteInterviewsDialog } from '~/app/dashboard/interviews/_components/DeleteInterviewsDialog'; import { ExportInterviewsDialog } from '~/app/dashboard/interviews/_components/ExportInterviewsDialog'; -import { Button } from '~/components/ui/Button'; +import { IconButton } from '~/components/ui/Button'; import { DropdownMenu, DropdownMenuContent, + DropdownMenuGroup, DropdownMenuItem, DropdownMenuLabel, DropdownMenuTrigger, } from '~/components/ui/dropdown-menu'; +import type { GetInterviewsQuery } from '~/queries/interviews'; + +type InterviewRow = GetInterviewsQuery[number]; -export const ActionsDropdown = ({ row }: { row: Row }) => { +export const ActionsDropdown = ({ row }: { row: Row }) => { const [showDeleteModal, setShowDeleteModal] = useState(false); const [showExportModal, setShowExportModal] = useState(false); - const [selectedInterviews, setSelectedInterviews] = useState(); + const [selectedInterviews, setSelectedInterviews] = + useState(); - const handleDelete = (data: Interview) => { + const handleDelete = (data: InterviewRow) => { setSelectedInterviews([data]); setShowDeleteModal(true); }; - const handleExport = (data: Interview) => { + const handleExport = (data: InterviewRow) => { setSelectedInterviews([data]); setShowExportModal(true); }; @@ -51,22 +60,37 @@ export const ActionsDropdown = ({ row }: { row: Row }) => { interviewsToDelete={selectedInterviews ?? []} /> - - - + } + size="sm" + /> + } + nativeButton + /> - Actions - handleDelete(row.original)}> - Delete - - handleExport(row.original)}> - Export - + + Actions + handleDelete(row.original)} + icon={} + > + Delete + + handleExport(row.original)} + icon={} + > + Export + + - Enter Interview + }> + Enter Interview + diff --git a/app/dashboard/_components/InterviewsTable/Columns.tsx b/app/dashboard/_components/InterviewsTable/Columns.tsx index 84f71cbc6..a12979d0f 100644 --- a/app/dashboard/_components/InterviewsTable/Columns.tsx +++ b/app/dashboard/_components/InterviewsTable/Columns.tsx @@ -1,28 +1,34 @@ 'use client'; -import type { Codebook, NcNetwork, Stage } from '@codaco/shared-consts'; -import { type ColumnDef } from '@tanstack/react-table'; +import { type FilterFn } from '@tanstack/react-table'; +import { type StrictColumnDef } from '~/components/DataTable/types'; import Image from 'next/image'; import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; +import { + booleanFilterFn, + dateFilterFn, + facetedFilterFn, + operatorFilterFn, + rangeFilterFn, +} from '~/components/DataTable/filters/filterFns'; +import { SelectAllHeader } from '~/components/DataTable/SelectAllHeader'; +import { type Option } from '~/components/DataTable/types'; import { Badge } from '~/components/ui/badge'; -import { Checkbox } from '~/components/ui/checkbox'; -import { Progress } from '~/components/ui/progress'; +import ProgressBar from '~/components/ui/ProgressBar'; import TimeAgo from '~/components/ui/TimeAgo'; -import type { GetInterviewsReturnType } from '~/queries/interviews'; +import Checkbox from '~/lib/form/components/fields/Checkbox'; +import type { GetInterviewsQuery } from '~/queries/interviews'; import NetworkSummary from './NetworkSummary'; -export const InterviewColumns = (): ColumnDef< - Awaited[0] ->[] => [ +type InterviewRow = GetInterviewsQuery[number]; + +export const InterviewColumns = (): StrictColumnDef[] => [ { id: 'select', - header: ({ table }) => ( - table.toggleAllPageRowsSelected(!!value)} - aria-label="Select all" - /> - ), + meta: { + className: 'sticky left-0', + }, + header: ({ table }) => , cell: ({ row }) => ( { return ( -
    - Participant icon - -
    + + Participant icon + Participant Identifier +
    + } + /> ); }, cell: ({ row }) => { @@ -59,10 +68,8 @@ export const InterviewColumns = (): ColumnDef< className="flex items-center gap-2" title={row.original.participant.identifier} > - - - {row.original.participant.identifier} - + + {row.original.participant.identifier}
    ); @@ -71,18 +78,40 @@ export const InterviewColumns = (): ColumnDef< { id: 'protocolName', accessorKey: 'protocol.name', - header: ({ column }) => { + sortingFn: 'text', + meta: { + filterType: 'faceted' as const, + filterConfig: { + type: 'faceted' as const, + options: (data: unknown[]) => { + const rows = data as GetInterviewsQuery; + const names = [...new Set(rows.map((r) => r.protocol.name))]; + return names.map((name) => ({ + label: name.replace(/\.netcanvas$/, ''), + value: name, + })); + }, + }, + }, + filterFn: facetedFilterFn, + header: ({ column, table }) => { return ( -
    - Protocol icon - -
    + + Protocol icon + Protocol Name +
    + } + /> ); }, cell: ({ row }) => { @@ -101,73 +130,156 @@ export const InterviewColumns = (): ColumnDef< { id: 'startTime', accessorKey: 'startTime', - header: ({ column }) => { - return ; + sortingFn: 'datetime', + meta: { + filterType: 'date' as const, + filterConfig: { type: 'date' as const }, + }, + filterFn: dateFilterFn, + header: ({ column, table }) => { + return ( + + ); }, cell: ({ row }) => { - const date = new Date(row.original.startTime); - return ; + return ; }, }, { id: 'lastUpdated', accessorKey: 'lastUpdated', - header: ({ column }) => { - return ; + sortingFn: 'datetime', + meta: { + filterType: 'date' as const, + filterConfig: { type: 'date' as const }, + }, + filterFn: dateFilterFn, + header: ({ column, table }) => { + return ( + + ); }, cell: ({ row }) => { - const date = new Date(row.original.lastUpdated); - return ; + return ; }, }, { id: 'progress', + sortingFn: 'basic', accessorFn: (row) => { - const stages = row.protocol.stages; - return Array.isArray(stages) - ? (row.currentStep / stages.length) * 100 - : 0; + const stageCount = row.protocol.stageCount; + return stageCount > 0 ? (row.currentStep / stageCount) * 100 : 0; }, - header: ({ column }) => { - return ; + meta: { + filterType: 'range' as const, + filterConfig: { + type: 'range' as const, + min: 0, + max: 100, + step: 1, + presets: [ + { label: 'Not Started', min: 0, max: 0 }, + { label: 'In Progress', min: 1, max: 99 }, + { label: 'Complete', min: 100, max: 100 }, + ], + formatLabel: (v: number) => `${String(v)}%`, + }, + }, + filterFn: rangeFilterFn, + header: ({ column, table }) => { + return ( + + ); }, cell: ({ row }) => { - const stages = row.original.protocol.stages! as unknown as Stage[]; - const progress = (row.original.currentStep / stages.length) * 100; + const stageCount = row.original.protocol.stageCount; + const progress = + stageCount > 0 ? (row.original.currentStep / stageCount) * 100 : 0; return ( -
    - -
    {progress.toFixed(0)}%
    +
    + +
    {progress.toFixed(0)}%
    ); }, }, { id: 'network', + enableSorting: false, accessorFn: (row) => { - const network = row.network as NcNetwork; - const nodeCount = network?.nodes?.length ?? 0; - const edgeCount = network?.edges?.length ?? 0; + const network = row.network; + const nodeCount = network.nodes.reduce((sum, n) => sum + n.count, 0); + const edgeCount = network.edges.reduce((sum, e) => sum + e.count, 0); return nodeCount + edgeCount; }, - header: ({ column }) => { - return ; + meta: { + filterType: 'operator' as const, + filterConfig: { + type: 'operator' as const, + operators: ['eq', 'gt', 'lt', 'gte', 'lte'] as const, + entitySelector: { + label: 'Entity Type', + getOptions: (data: unknown[]) => { + const rows = data as GetInterviewsQuery; + const types = new Map(); + for (const row of rows) { + for (const node of row.network.nodes) { + types.set(`nodes.${node.type}`, { + label: `${node.name} (nodes)`, + value: `nodes.${node.type}`, + }); + } + for (const edge of row.network.edges) { + types.set(`edges.${edge.type}`, { + label: `${edge.name} (edges)`, + value: `edges.${edge.type}`, + }); + } + } + return Array.from(types.values()); + }, + }, + }, + }, + filterFn: operatorFilterFn as FilterFn, + header: ({ column, table }) => { + return ( + + ); }, cell: ({ row }) => { - const network = row.original.network as NcNetwork; - const codebook = row.original.protocol.codebook as Codebook; - - return ; + return ; }, }, { + id: 'exportTime', accessorKey: 'exportTime', - header: ({ column }) => { - return ; + sortingFn: 'datetime', + meta: { + filterType: 'boolean' as const, + filterConfig: { + type: 'boolean' as const, + trueLabel: 'Exported', + falseLabel: 'Not Exported', + }, + }, + filterFn: booleanFilterFn, + header: ({ column, table }) => { + return ( + + ); }, cell: ({ row }) => { if (!row.original.exportTime) { - return Not exported; + return Not exported; } return ( diff --git a/app/dashboard/_components/InterviewsTable/InterviewsTable.tsx b/app/dashboard/_components/InterviewsTable/InterviewsTable.tsx index 27495aa29..fdef629a7 100644 --- a/app/dashboard/_components/InterviewsTable/InterviewsTable.tsx +++ b/app/dashboard/_components/InterviewsTable/InterviewsTable.tsx @@ -1,14 +1,17 @@ 'use client'; -import { HardDriveUpload } from 'lucide-react'; -import { hash as objectHash } from 'ohash'; +import { type ColumnDef, type Row } from '@tanstack/react-table'; +import { FileUp, HardDriveUpload, Trash } from 'lucide-react'; import { use, useMemo, useState } from 'react'; +import superjson from 'superjson'; import { ActionsDropdown } from '~/app/dashboard/_components/InterviewsTable/ActionsDropdown'; import { InterviewColumns } from '~/app/dashboard/_components/InterviewsTable/Columns'; import { DeleteInterviewsDialog } from '~/app/dashboard/interviews/_components/DeleteInterviewsDialog'; import { ExportInterviewsDialog } from '~/app/dashboard/interviews/_components/ExportInterviewsDialog'; import { GenerateInterviewURLs } from '~/app/dashboard/interviews/_components/GenerateInterviewURLs'; import { DataTable } from '~/components/DataTable/DataTable'; +import { DataTableFloatingBar } from '~/components/DataTable/DataTableFloatingBar'; +import { DataTableToolbar } from '~/components/DataTable/DataTableToolbar'; import { Button } from '~/components/ui/Button'; import { DropdownMenu, @@ -16,9 +19,15 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from '~/components/ui/dropdown-menu'; -import type { GetInterviewsReturnType } from '~/queries/interviews'; +import { useClientDataTable } from '~/hooks/useClientDataTable'; +import type { + GetInterviewsQuery, + GetInterviewsReturnType, +} from '~/queries/interviews'; import type { GetProtocolsReturnType } from '~/queries/protocols'; +type InterviewRow = GetInterviewsQuery[number]; + export const InterviewsTable = ({ interviewsPromise, protocolsPromise, @@ -26,7 +35,13 @@ export const InterviewsTable = ({ interviewsPromise: GetInterviewsReturnType; protocolsPromise: GetProtocolsReturnType; }) => { - const interviews = use(interviewsPromise); + // TanStack Table: consumers must also opt out so React Compiler doesn't memoize JSX that depends on the table ref. + 'use no memo'; + const serializedInterviews = use(interviewsPromise); + const interviews = useMemo( + () => superjson.parse(serializedInterviews), + [serializedInterviews], + ); const [selectedInterviews, setSelectedInterviews] = useState(); @@ -68,10 +83,29 @@ export const InterviewsTable = ({ setShowExportModal(false); }; + const actionsColumn: ColumnDef = { + id: 'actions', + cell: ({ row }: { row: Row }) => ( + + ), + }; + + const columns = useMemo[]>( + () => [...InterviewColumns(), actionsColumn], + // eslint-disable-next-line react-hooks/exhaustive-deps + [], + ); + + const { table } = useClientDataTable({ + data: interviews, + columns, + defaultSortBy: { id: 'lastUpdated', desc: true }, + enableUrlFilters: true, + }); + return ( <> { - setSelectedInterviews(selected); - setShowExportModal(true); - }} - actions={ActionsDropdown} - defaultSortBy={{ id: 'lastUpdated', desc: true }} - headerItems={ + table={table} + toolbar={ <> - - - - - - - Export all interviews - - - Export all completed interviews - - + + } />} + disabled={interviews.length === 0} + nativeButton + data-testid="export-interviews-button" + className="tablet-landscape:w-auto w-full" > - Export all unexported interviews - - - - + Export Interview Data + + + + Export all interviews + + + Export all completed interviews + + + Export all unexported interviews + + + + + } + floatingBar={ + + + + + } /> ); diff --git a/app/dashboard/_components/InterviewsTable/InterviewsTableServer.tsx b/app/dashboard/_components/InterviewsTable/InterviewsTableServer.tsx index da04088cb..df00dac79 100644 --- a/app/dashboard/_components/InterviewsTable/InterviewsTableServer.tsx +++ b/app/dashboard/_components/InterviewsTable/InterviewsTableServer.tsx @@ -1,5 +1,5 @@ import { Suspense } from 'react'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; import { getInterviews } from '~/queries/interviews'; import { getProtocols } from '~/queries/protocols'; import { InterviewsTable } from './InterviewsTable'; @@ -10,7 +10,13 @@ export default function InterviewsTableServer() { return ( } + fallback={ + + } > -
    - {count} -
    - {typeName} -
    - ); -} function EdgeSummary({ color, count, typeName }: EdgeSummaryProps) { - const lightColorClass = cn( - 'fill-[var(--edge-color-seq-1)]', - color === 'edge-color-seq-1' && 'fill-[var(--edge-color-seq-1)]', - color === 'edge-color-seq-2' && 'fill-[var(--edge-color-seq-2)]', - color === 'edge-color-seq-3' && 'fill-[var(--edge-color-seq-3)]', - color === 'edge-color-seq-4' && 'fill-[var(--edge-color-seq-4)]', - color === 'edge-color-seq-5' && 'fill-[var(--edge-color-seq-5)]', - color === 'edge-color-seq-6' && 'fill-[var(--edge-color-seq-6)]', - color === 'edge-color-seq-7' && 'fill-[var(--edge-color-seq-7)]', - color === 'edge-color-seq-8' && 'fill-[var(--edge-color-seq-8)]', - color === 'edge-color-seq-9' && 'fill-[var(--edge-color-seq-9)]', - ); - - const darkColorClass = cn( - 'fill-[var(--edge-color-seq-1-dark)]', - color === 'edge-color-seq-1' && 'fill-[var(--edge-color-seq-1-dark)]', - color === 'edge-color-seq-2' && 'fill-[var(--edge-color-seq-2-dark)]', - color === 'edge-color-seq-3' && 'fill-[var(--edge-color-seq-3-dark)]', - color === 'edge-color-seq-4' && 'fill-[var(--edge-color-seq-4-dark)]', - color === 'edge-color-seq-5' && 'fill-[var(--edge-color-seq-5-dark)]', - color === 'edge-color-seq-6' && 'fill-[var(--edge-color-seq-6-dark)]', - color === 'edge-color-seq-7' && 'fill-[var(--edge-color-seq-7-dark)]', - color === 'edge-color-seq-8' && 'fill-[var(--edge-color-seq-8-dark)]', - color === 'edge-color-seq-9' && 'fill-[var(--edge-color-seq-9-dark)]', + /** + * There is a bug in the suggestCanonicalClasses rule: https://github.com/tailwindlabs/tailwindcss-intellisense/issues/1542 + */ + const edgeColorClasses = cx( + color === 'edge-color-seq-1' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-1)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-1)]', + color === 'edge-color-seq-2' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-2)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-2)]', + color === 'edge-color-seq-3' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-3)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-3)]', + color === 'edge-color-seq-4' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-4)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-4)]', + color === 'edge-color-seq-5' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-5)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-5)]', + color === 'edge-color-seq-6' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-6)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-6)]', + color === 'edge-color-seq-7' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-7)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-7)]', + color === 'edge-color-seq-8' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-8)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-8)]', + color === 'edge-color-seq-9' && + // eslint-disable-next-line better-tailwindcss/enforce-canonical-classes + '[--fill-dark:oklch(from_var(--color-edge-9)_calc(l_-_var(--dark-mod))_c_h)] [--fill:var(--color-edge-9)]', ); return (
    -
    +
    - - - + + + @@ -145,58 +108,45 @@ function EdgeSummary({ color, count, typeName }: EdgeSummaryProps) { const NetworkSummary = ({ network, - codebook, }: { - network: NcNetwork | null; - codebook: Codebook | null; + network: GetInterviewsQuery[number]['network']; }) => { - if (!network || !codebook) { - return
    No interview data
    ; - } - const nodeSummaries = Object.entries( - network.nodes?.reduce>((acc, node) => { - acc[node.type] = (acc[node.type] ?? 0) + 1; - return acc; - }, {}) ?? {}, - ).map(([nodeType, count]) => { - // eslint-disable-next-line @typescript-eslint/no-non-null-asserted-optional-chain - const nodeInfo = codebook.node?.[nodeType]!; - return ( - - ); - }); + const nodeSummaries = network.nodes.map( + ({ type: nodeType, count, name, color }) => ( +
    + + {name} +
    + ), + ); + + const edgeSummaries = network.edges + .map(({ type: edgeType, count, name, color }) => { + if (!color) return null; - const edgeSummaries = Object.entries( - network.edges?.reduce>((acc, edge) => { - acc[edge.type] = (acc[edge.type] ?? 0) + 1; - return acc; - }, {}) ?? {}, - ).map(([edgeType, count]) => { - // eslint-disable-next-line @typescript-eslint/no-non-null-asserted-optional-chain - const edgeInfo = codebook.edge?.[edgeType]!; - return ( - - ); - }); + return ( + + ); + }) + .filter(Boolean); if (nodeSummaries.length === 0 && edgeSummaries.length === 0) { return
    No nodes or edges
    ; } return ( -
    -
    {nodeSummaries}
    -
    {edgeSummaries}
    +
    + {nodeSummaries} + {edgeSummaries}
    ); }; diff --git a/app/dashboard/_components/MobileNavDrawer.tsx b/app/dashboard/_components/MobileNavDrawer.tsx new file mode 100644 index 000000000..ab5afb0f6 --- /dev/null +++ b/app/dashboard/_components/MobileNavDrawer.tsx @@ -0,0 +1,138 @@ +'use client'; + +import { Menu, Settings, X } from 'lucide-react'; +import { motion } from 'motion/react'; +import type { Route } from 'next'; +import Link from 'next/link'; +import { usePathname } from 'next/navigation'; +import { useState } from 'react'; +import type { UrlObject } from 'url'; +import { logout } from '~/actions/auth'; +import Modal from '~/components/Modal/Modal'; +import ModalPopup from '~/components/Modal/ModalPopup'; +import SubmitButton from '~/components/ui/SubmitButton'; +import { cx } from '~/utils/cva'; + +type NavItem = { + label: string; + href: UrlObject | Route; + icon?: React.ReactNode; +}; + +const navItems: NavItem[] = [ + { label: 'Dashboard', href: '/dashboard' }, + { label: 'Protocols', href: '/dashboard/protocols' }, + { label: 'Participants', href: '/dashboard/participants' }, + { label: 'Interviews', href: '/dashboard/interviews' }, +]; + +const MobileNavLink = ({ + item, + isActive, + onClick, +}: { + item: NavItem; + isActive: boolean; + onClick: () => void; +}) => { + return ( + + {item.icon} + {item.label} + + ); +}; + +export function MobileNavDrawer() { + const [open, setOpen] = useState(false); + const pathname = usePathname(); + + const handleClose = () => setOpen(false); + + return ( + <> + + + + + + + + + ); +} diff --git a/app/dashboard/_components/NavigationBar.tsx b/app/dashboard/_components/NavigationBar.tsx index 7783aa907..1c6f34779 100644 --- a/app/dashboard/_components/NavigationBar.tsx +++ b/app/dashboard/_components/NavigationBar.tsx @@ -1,86 +1,148 @@ 'use client'; -import { motion } from 'motion/react'; +import { Settings } from 'lucide-react'; +import { motion, useReducedMotion, type Variants } from 'motion/react'; import type { Route } from 'next'; -import Image from 'next/image'; import Link from 'next/link'; import { usePathname } from 'next/navigation'; import type { UrlObject } from 'url'; -import Heading from '~/components/ui/typography/Heading'; -import { env } from '~/env'; -import { cn } from '~/utils/shadcn'; +import { MotionSurface } from '~/components/layout/Surface'; +import Spinner from '~/components/Spinner'; +import Heading from '~/components/typography/Heading'; +import { cx } from '~/utils/cva'; +import { MobileNavDrawer } from './MobileNavDrawer'; import UserMenu from './UserMenu'; +const containerVariants: Variants = { + hidden: { + y: '-150%', + }, + visible: { + y: 0, + transition: { + type: 'spring', + delayChildren: 0.5, + staggerChildren: 0.1, + }, + }, +}; + +const itemVariants: Variants = { + hidden: { opacity: 0, y: '-100%' }, + visible: { + opacity: 1, + y: 0, + transition: { + type: 'spring', + }, + }, +}; + const NavButton = ({ label, href, isActive = false, }: { - label: string; + label: string | React.ReactNode; href: UrlObject | Route; isActive?: boolean; }) => { return ( - + - {label} + {isActive && ( + + )} + {label} - {isActive && ( - - )} ); }; export function NavigationBar() { const pathname = usePathname(); + const shouldReduceMotion = useReducedMotion(); return ( - - - Fresco - - Fresco - {env.APP_VERSION} - - -
      - - - - - -
    - -
    +
    + + + + + Fresco + + +
      + + + + +
    +
    + + + Settings +
    + } + href="/dashboard/settings" + isActive={pathname === '/dashboard/settings'} + /> + + + + +
    + +
    + +
    + +
    ); } diff --git a/app/dashboard/_components/ParticipantsTable/ActionsDropdown.tsx b/app/dashboard/_components/ParticipantsTable/ActionsDropdown.tsx index bcf099ee2..db06e8cfc 100644 --- a/app/dashboard/_components/ParticipantsTable/ActionsDropdown.tsx +++ b/app/dashboard/_components/ParticipantsTable/ActionsDropdown.tsx @@ -1,62 +1,55 @@ -import { MoreHorizontal } from 'lucide-react'; -import { Button } from '~/components/ui/Button'; +import type { Row } from '@tanstack/react-table'; +import { DeleteIcon, MoreHorizontal, PencilIcon } from 'lucide-react'; +import { IconButton } from '~/components/ui/Button'; import { DropdownMenu, DropdownMenuContent, + DropdownMenuGroup, DropdownMenuItem, DropdownMenuLabel, DropdownMenuTrigger, } from '~/components/ui/dropdown-menu'; -import type { Row } from '@tanstack/react-table'; -import { useState } from 'react'; -import ParticipantModal from '~/app/dashboard/participants/_components/ParticipantModal'; -import type { ParticipantWithInterviews } from '~/types/types'; -import type { Participant } from '~/lib/db/generated/client'; +import type { ParticipantWithInterviews } from './ParticipantsTableClient'; -export const ActionsDropdown = ({ +export function ActionsDropdown({ row, - data, - deleteHandler, + onEdit, + onDelete, }: { row: Row; - data: ParticipantWithInterviews[]; - deleteHandler: (participant: ParticipantWithInterviews) => void; -}) => { - const [selectedParticipant, setSelectedParticipant] = - useState(null); - const [showParticipantModal, setShowParticipantModal] = useState(false); - - const editParticipant = (data: Participant) => { - setSelectedParticipant(data); - setShowParticipantModal(true); - }; - + onEdit: (participant: ParticipantWithInterviews) => void; + onDelete: (participant: ParticipantWithInterviews) => void; +}) { return ( - <> - + } + size="sm" + /> + } + nativeButton /> - - - - - + + Actions - editParticipant(row.original)}> + onEdit(row.original)} + icon={} + > Edit - deleteHandler(row.original)}> + onDelete(row.original)} + icon={} + > Delete - - - + + + ); -}; +} diff --git a/app/dashboard/_components/ParticipantsTable/Columns.tsx b/app/dashboard/_components/ParticipantsTable/Columns.tsx index bd96e707a..e479efbd2 100644 --- a/app/dashboard/_components/ParticipantsTable/Columns.tsx +++ b/app/dashboard/_components/ParticipantsTable/Columns.tsx @@ -1,30 +1,20 @@ -import { type ColumnDef } from '@tanstack/react-table'; -import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; -import { Checkbox } from '~/components/ui/checkbox'; -import { GenerateParticipationURLButton } from './GenerateParticipantURLButton'; -import { type ParticipantWithInterviews } from '~/types/types'; +import { type StrictColumnDef } from '~/components/DataTable/types'; import Image from 'next/image'; -import InfoTooltip from '~/components/InfoTooltip'; -import { InfoIcon } from 'lucide-react'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { buttonVariants } from '~/components/ui/Button'; +import Checkbox from '~/lib/form/components/fields/Checkbox'; +import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; +import { SelectAllHeader } from '~/components/DataTable/SelectAllHeader'; import { Badge } from '~/components/ui/badge'; -import type { GetProtocolsReturnType } from '~/queries/protocols'; +import type { ProtocolWithInterviews } from '../ProtocolsTable/ProtocolsTableClient'; +import { GenerateParticipationURLButton } from './GenerateParticipantURLButton'; +import type { ParticipantWithInterviews } from './ParticipantsTableClient'; export function getParticipantColumns( - protocols: Awaited, -): ColumnDef[] { + protocols: ProtocolWithInterviews[], +): StrictColumnDef[] { return [ { id: 'select', - header: ({ table }) => ( - table.toggleAllPageRowsSelected(!!value)} - aria-label="Select all" - /> - ), + header: ({ table }) => , cell: ({ row }) => ( { return ; }, @@ -65,6 +56,7 @@ export function getParticipantColumns( }, { accessorKey: 'label', + sortingFn: 'text', header: ({ column }) => { return ; }, @@ -74,6 +66,8 @@ export function getParticipantColumns( }, { id: 'interviews', + accessorFn: (row) => row._count.interviews, + sortingFn: 'basic', header: ({ column }) => { return ; }, @@ -91,32 +85,12 @@ export function getParticipantColumns( }, { id: 'participant-url', - header: () => { + enableSorting: false, + header: ({ column }) => { return ( - - Unique Participant URL - -
    - } - content={ - <> - Unique Participant URL - - A unique participant URL allows a participant to take an - interview simply by visiting a URL. A participation URL is - specific to each participant, and should only be shared with - them. - - - } + ); }, diff --git a/app/dashboard/_components/ParticipantsTable/GenerateParticipantURLButton.tsx b/app/dashboard/_components/ParticipantsTable/GenerateParticipantURLButton.tsx index 62f7b2646..b3962b7bb 100644 --- a/app/dashboard/_components/ParticipantsTable/GenerateParticipantURLButton.tsx +++ b/app/dashboard/_components/ParticipantsTable/GenerateParticipantURLButton.tsx @@ -1,99 +1,88 @@ 'use client'; -import type { Participant, Protocol } from '~/lib/db/generated/client'; -import { useRef, useState } from 'react'; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from '~/components/ui/select'; - -import { PopoverTrigger } from '@radix-ui/react-popover'; -import { Check, Copy } from 'lucide-react'; +import { Copy } from 'lucide-react'; +import { memo, useState } from 'react'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; -import { Popover, PopoverContent } from '~/components/ui/popover'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { useToast } from '~/components/ui/use-toast'; -import type { GetProtocolsReturnType } from '~/queries/protocols'; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from '~/components/ui/popover'; +import { useToast } from '~/components/ui/Toast'; +import type { Participant, Protocol } from '~/lib/db/generated/client'; +import SelectField from '~/lib/form/components/fields/Select/Native'; +import type { ProtocolWithInterviews } from '../ProtocolsTable/ProtocolsTableClient'; -export const GenerateParticipationURLButton = ({ - participant, - protocols, -}: { - participant: Participant; - protocols: Awaited; -}) => { - const [selectedProtocol, setSelectedProtocol] = useState(); +export const GenerateParticipationURLButton = memo( + function GenerateParticipationURLButton({ + participant, + protocols, + }: { + participant: Participant; + protocols: ProtocolWithInterviews[]; + }) { + const [open, setOpen] = useState(false); + const [selectedProtocol, setSelectedProtocol] = + useState | null>(); - const { toast } = useToast(); + const { promise } = useToast(); - const handleCopy = (url: string) => { - if (url) { - navigator.clipboard - .writeText(url) - .then(() => { - toast({ - title: 'Success!', - icon: , - description: 'Participation URL copied to clipboard', - variant: 'success', - }); - }) - .catch(() => { - toast({ - title: 'Error', - description: 'Could not copy text', - variant: 'destructive', - }); + const handleCopy = (url: string) => { + if (url) { + void promise(navigator.clipboard.writeText(url), { + loading: 'Copying URL to clipboard...', + success: 'URL copied to clipboard!', + error: 'Failed to copy URL to clipboard.', }); - } - }; + } + }; - const ref = useRef(null); - - return ( - - - - - - - Select a protocol, and the URL will be copied to your clipboard. - - - - - ); -}; + setSelectedProtocol(null); + }} + value={selectedProtocol?.id} + placeholder="Select a Protocol..." + /> + + + ); + }, +); diff --git a/app/dashboard/_components/ParticipantsTable/ParticipantsTable.tsx b/app/dashboard/_components/ParticipantsTable/ParticipantsTable.tsx index a1e4023b3..79786e998 100644 --- a/app/dashboard/_components/ParticipantsTable/ParticipantsTable.tsx +++ b/app/dashboard/_components/ParticipantsTable/ParticipantsTable.tsx @@ -1,5 +1,5 @@ import { Suspense } from 'react'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; import { getParticipants } from '~/queries/participants'; import { getProtocols } from '~/queries/protocols'; import { ParticipantsTableClient } from './ParticipantsTableClient'; @@ -10,7 +10,13 @@ export default function ParticipantsTable() { return ( } + fallback={ + + } > (raw: string): T { + const ref = useRef<{ raw: string; parsed: T } | null>(null); + if (ref.current?.raw !== raw) { + ref.current = { raw, parsed: SuperJSON.parse(raw) }; + } + return ref.current.parsed; +} export const ParticipantsTableClient = ({ participantsPromise, @@ -25,39 +50,31 @@ export const ParticipantsTableClient = ({ participantsPromise: GetParticipantsReturnType; protocolsPromise: GetProtocolsReturnType; }) => { - const participants = use(participantsPromise); - const protocols = use(protocolsPromise); - - // Memoize the columns so they don't re-render on every render - const columns = useMemo[]>( - () => getParticipantColumns(protocols), - [protocols], - ); + // TanStack Table: consumers must also opt out so React Compiler doesn't memoize JSX that depends on the table ref. + 'use no memo'; + const rawParticipants = use(participantsPromise); + const rawProtocols = use(protocolsPromise); + const participants = useStableParse(rawParticipants); + const protocols = useStableParse(rawProtocols); const [participantsToDelete, setParticipantsToDelete] = useState< ParticipantWithInterviews[] | null >(null); const [showDeleteModal, setShowDeleteModal] = useState(false); - // Actual delete handler, which handles optimistic updates, etc. + const [editingParticipant, setEditingParticipant] = + useState(null); + const [showEditModal, setShowEditModal] = useState(false); + const doDelete = async () => { if (!participantsToDelete) { return; } - // Check if we are deleting all and call the appropriate function - if (participantsToDelete.length === participants.length) { - await deleteAllParticipants(); - resetDelete(); - return; - } - await deleteParticipants(participantsToDelete.map((p) => p.id)); - resetDelete(); }; - // Resets the state when the dialog is closed. const resetDelete = () => { setShowDeleteModal(false); setParticipantsToDelete(null); @@ -65,22 +82,50 @@ export const ParticipantsTableClient = ({ const handleDeleteItems = useCallback( (items: ParticipantWithInterviews[]) => { - // Set state to the items to be deleted setParticipantsToDelete(items); - - // Show the dialog setShowDeleteModal(true); }, [], ); - const handleDeleteAll = useCallback(() => { - // Set state to all items - setParticipantsToDelete(participants); + const handleEditParticipant = useCallback( + (participant: ParticipantWithInterviews) => { + setEditingParticipant(participant); + setShowEditModal(true); + }, + [], + ); + + const handleDeleteSingle = useCallback( + (participant: ParticipantWithInterviews) => { + handleDeleteItems([participant]); + }, + [handleDeleteItems], + ); + + const columns = useMemo[]>( + () => [ + ...getParticipantColumns(protocols), + { + id: 'actions', + cell: ({ row }: { row: Row }) => ( + + ), + }, + ], + [protocols, handleEditParticipant, handleDeleteSingle], + ); + + const exportParticipants = useExportParticipants(protocols); - // Show the dialog - setShowDeleteModal(true); - }, [participants]); + const { table } = useClientDataTable({ + data: participants, + columns, + }); return ( <> @@ -100,26 +145,54 @@ export const ParticipantsTableClient = ({ onConfirm={doDelete} onCancel={resetDelete} /> + -
    + table={table} + toolbar={ + +
    - +
    - + - + } /> diff --git a/app/dashboard/_components/ProtocolUploader.tsx b/app/dashboard/_components/ProtocolUploader.tsx index c7feb1ad8..8e5806656 100644 --- a/app/dashboard/_components/ProtocolUploader.tsx +++ b/app/dashboard/_components/ProtocolUploader.tsx @@ -1,111 +1,29 @@ 'use client'; -import { FileDown, Loader2 } from 'lucide-react'; -import { AnimatePresence, motion } from 'motion/react'; -import { useCallback } from 'react'; -import { useDropzone } from 'react-dropzone'; -import JobCard from '~/components/ProtocolImport/JobCard'; -import { Button, type ButtonProps } from '~/components/ui/Button'; -import { PROTOCOL_EXTENSION } from '~/fresco.config'; -import usePortal from '~/hooks/usePortal'; +import ProtocolImportPopover from '~/components/ProtocolImport/ProtocolImportPopover'; +import { type ButtonProps } from '~/components/ui/Button'; import { useProtocolImport } from '~/hooks/useProtocolImport'; -import { withNoSSRWrapper } from '~/utils/NoSSRWrapper'; -import { cn } from '~/utils/shadcn'; -function ProtocolUploader({ +export default function ProtocolUploader({ className, buttonVariant, buttonSize, - hideCancelButton, buttonDisabled, }: { className?: string; buttonVariant?: ButtonProps['variant']; buttonSize?: ButtonProps['size']; - hideCancelButton?: boolean; buttonDisabled?: boolean; }) { - const Portal = usePortal(); - - const { importProtocols, jobs, cancelJob, cancelAllJobs } = - useProtocolImport(); - - const { getInputProps, open } = useDropzone({ - // Disable automatic opening of file dialog - we do it manually to allow for - // job cards to be clicked - noClick: true, - onDropAccepted: importProtocols, - accept: { - 'application/octect-stream': [PROTOCOL_EXTENSION], - 'application/zip': [PROTOCOL_EXTENSION], - }, - }); - - const handleCancelJob = useCallback( - (jobId: string) => () => cancelJob(jobId), - [cancelJob], - ); - - const isActive = jobs && jobs.length > 0 && jobs.some((job) => !job.error); + const { importProtocols } = useProtocolImport(); return ( - <> - - {!hideCancelButton && jobs.length > 0 && ( - - )} - - - - - {jobs.map((job, index) => ( - - ))} - - - - - + ); } - -export default withNoSSRWrapper(ProtocolUploader); diff --git a/app/dashboard/_components/ProtocolsTable/ActionsDropdown.tsx b/app/dashboard/_components/ProtocolsTable/ActionsDropdown.tsx index c2dabc44d..8e6d32f88 100644 --- a/app/dashboard/_components/ProtocolsTable/ActionsDropdown.tsx +++ b/app/dashboard/_components/ProtocolsTable/ActionsDropdown.tsx @@ -1,18 +1,19 @@ 'use client'; -import { MoreHorizontal } from 'lucide-react'; -import { Button } from '~/components/ui/Button'; +import type { Row } from '@tanstack/react-table'; +import { DeleteIcon, MoreHorizontal } from 'lucide-react'; +import { useState } from 'react'; +import { DeleteProtocolsDialog } from '~/app/dashboard/protocols/_components/DeleteProtocolsDialog'; +import { IconButton } from '~/components/ui/Button'; import { DropdownMenu, DropdownMenuContent, + DropdownMenuGroup, DropdownMenuItem, DropdownMenuLabel, DropdownMenuTrigger, } from '~/components/ui/dropdown-menu'; -import type { Row } from '@tanstack/react-table'; -import { useState } from 'react'; -import type { ProtocolWithInterviews } from '~/types/types'; -import { DeleteProtocolsDialog } from '~/app/dashboard/protocols/_components/DeleteProtocolsDialog'; +import type { ProtocolWithInterviews } from './ProtocolsTableClient'; export const ActionsDropdown = ({ row, @@ -36,17 +37,27 @@ export const ActionsDropdown = ({ protocolsToDelete={protocolToDelete ?? []} /> - - - + } + size="sm" + /> + } + nativeButton + /> - Actions - handleDelete(row.original)}> - Delete - + + Actions + handleDelete(row.original)} + icon={} + > + Delete + + diff --git a/app/dashboard/_components/ProtocolsTable/AnonymousRecruitmentURLButton.tsx b/app/dashboard/_components/ProtocolsTable/AnonymousRecruitmentURLButton.tsx index 0712f284a..b759492ba 100644 --- a/app/dashboard/_components/ProtocolsTable/AnonymousRecruitmentURLButton.tsx +++ b/app/dashboard/_components/ProtocolsTable/AnonymousRecruitmentURLButton.tsx @@ -1,16 +1,16 @@ 'use client'; -import { Check, Copy } from 'lucide-react'; +import { Copy } from 'lucide-react'; import { useEffect, useState } from 'react'; import { Button } from '~/components/ui/Button'; -import { useToast } from '~/components/ui/use-toast'; +import { useToast } from '~/components/ui/Toast'; export const AnonymousRecruitmentURLButton = ({ protocolId, }: { protocolId: string; }) => { - const { toast } = useToast(); + const { promise } = useToast(); const [url, setUrl] = useState(null); useEffect(() => { @@ -24,30 +24,16 @@ export const AnonymousRecruitmentURLButton = ({ return; } - navigator.clipboard - .writeText(url) - .then(() => { - toast({ - title: 'Success!', - description: 'URL copied to clipboard', - variant: 'success', - icon: , - }); - }) - .catch((error) => { - // eslint-disable-next-line no-console - console.error('Could not copy text: ', error); - toast({ - title: 'Error', - description: 'Could not copy text', - variant: 'destructive', - }); - }); + void promise(navigator.clipboard.writeText(url), { + loading: 'Copying URL to clipboard...', + success: 'URL copied to clipboard!', + error: 'Failed to copy URL to clipboard.', + }); }; return ( - ); diff --git a/app/dashboard/_components/ProtocolsTable/Columns.tsx b/app/dashboard/_components/ProtocolsTable/Columns.tsx index 2074deb1f..6a1d32843 100644 --- a/app/dashboard/_components/ProtocolsTable/Columns.tsx +++ b/app/dashboard/_components/ProtocolsTable/Columns.tsx @@ -1,23 +1,17 @@ 'use client'; -import { type ColumnDef } from '@tanstack/react-table'; -import { Checkbox } from '~/components/ui/checkbox'; +import { type StrictColumnDef } from '~/components/DataTable/types'; +import Image from 'next/image'; +import Checkbox from '~/lib/form/components/fields/Checkbox'; import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; -import type { ProtocolWithInterviews } from '~/types/types'; -import { AnonymousRecruitmentURLButton } from './AnonymousRecruitmentURLButton'; import TimeAgo from '~/components/ui/TimeAgo'; -import Image from 'next/image'; -import { buttonVariants } from '~/components/ui/Button'; -import InfoTooltip from '~/components/InfoTooltip'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import Heading from '~/components/ui/typography/Heading'; -import Link from '~/components/Link'; -import { InfoIcon } from 'lucide-react'; +import { AnonymousRecruitmentURLButton } from './AnonymousRecruitmentURLButton'; +import type { ProtocolWithInterviews } from './ProtocolsTableClient'; export const getProtocolColumns = ( allowAnonRecruitment = false, -): ColumnDef[] => { - const columns: ColumnDef[] = [ +): StrictColumnDef[] => { + const columns: StrictColumnDef[] = [ { id: 'select', header: ({ table }) => ( @@ -39,25 +33,28 @@ export const getProtocolColumns = ( }, { accessorKey: 'name', + sortingFn: 'text', header: ({ column }) => { return ; }, cell: ({ row }) => { return ( -
    +
    Protocol icon - {row.original.name} + {row.original.name}
    ); }, }, { accessorKey: 'importedAt', + sortingFn: 'datetime', header: ({ column }) => { return ; }, @@ -65,6 +62,7 @@ export const getProtocolColumns = ( }, { accessorKey: 'lastModified', + sortingFn: 'datetime', header: ({ column }) => { return ; }, @@ -75,38 +73,12 @@ export const getProtocolColumns = ( if (allowAnonRecruitment) { columns.push({ id: 'participant-url', - header: () => { + enableSorting: false, + header: ({ column }) => { return ( - - Anonymous Participation URL - -
    - } - content={ - <> - - Anonymous Participation URLs - - - Anonymous recruitment is enabled, so you can generate - anonymous participation URLs for your protocols from the - "Anonymous Participation URL" column in the table - below.. These URLs can be shared with participants to allow - them to self-enroll in your study. - - - To disable anonymous recruitment, visit the{' '} - settings page. - - - } + ); }, diff --git a/app/dashboard/_components/ProtocolsTable/ProtocolsTable.tsx b/app/dashboard/_components/ProtocolsTable/ProtocolsTable.tsx index b49876875..7b870e2b9 100644 --- a/app/dashboard/_components/ProtocolsTable/ProtocolsTable.tsx +++ b/app/dashboard/_components/ProtocolsTable/ProtocolsTable.tsx @@ -1,18 +1,19 @@ -import { unstable_noStore } from 'next/cache'; import { Suspense } from 'react'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; import { getAppSetting } from '~/queries/appSettings'; import { getProtocols } from '~/queries/protocols'; import ProtocolsTableClient from './ProtocolsTableClient'; async function getData() { - unstable_noStore(); - - return Promise.all([ - getProtocols(), - getAppSetting('allowAnonymousRecruitment'), - getAppSetting('uploadThingToken'), - ]); + const [protocols, allowAnonymousRecruitment, storageProvider, uploadThingToken] = + await Promise.all([ + getProtocols(), + getAppSetting('allowAnonymousRecruitment'), + getAppSetting('storageProvider'), + getAppSetting('uploadThingToken'), + ]); + const storageConfigured = storageProvider === 's3' || !!uploadThingToken; + return [protocols, allowAnonymousRecruitment, storageConfigured] as const; } export type GetData = ReturnType; @@ -20,7 +21,13 @@ export type GetData = ReturnType; export default function ProtocolsTable() { return ( } + fallback={ + + } > diff --git a/app/dashboard/_components/ProtocolsTable/ProtocolsTableClient.tsx b/app/dashboard/_components/ProtocolsTable/ProtocolsTableClient.tsx index 4159b33c3..9e8742088 100644 --- a/app/dashboard/_components/ProtocolsTable/ProtocolsTableClient.tsx +++ b/app/dashboard/_components/ProtocolsTable/ProtocolsTableClient.tsx @@ -1,17 +1,32 @@ 'use client'; -import { use, useState } from 'react'; -import { DeleteProtocolsDialog } from '~/app/dashboard/protocols/_components/DeleteProtocolsDialog'; +import { type ColumnDef, type Row } from '@tanstack/react-table'; +import { Trash } from 'lucide-react'; +import { use, useMemo, useState } from 'react'; +import { SuperJSON } from 'superjson'; import { DataTable } from '~/components/DataTable/DataTable'; -import type { ProtocolWithInterviews } from '~/types/types'; +import { DataTableFloatingBar } from '~/components/DataTable/DataTableFloatingBar'; +import { DataTableToolbar } from '~/components/DataTable/DataTableToolbar'; +import { Button } from '~/components/ui/Button'; +import { useClientDataTable } from '~/hooks/useClientDataTable'; +import type { GetProtocolsQuery } from '~/queries/protocols'; +import { DeleteProtocolsDialog } from '../../protocols/_components/DeleteProtocolsDialog'; import ProtocolUploader from '../ProtocolUploader'; import { ActionsDropdown } from './ActionsDropdown'; import { getProtocolColumns } from './Columns'; import { type GetData } from './ProtocolsTable'; +export type ProtocolWithInterviews = GetProtocolsQuery[number]; + const ProtocolsTableClient = ({ dataPromise }: { dataPromise: GetData }) => { - const [protocols, allowAnonymousRecruitment, hasUploadThingToken] = + // TanStack Table: consumers must also opt out so React Compiler doesn't memoize JSX that depends on the table ref. + 'use no memo'; + const [rawProtocols, allowAnonymousRecruitment, storageConfigured] = use(dataPromise); + const protocols = useMemo( + () => SuperJSON.parse(rawProtocols), + [rawProtocols], + ); const [showAlertDialog, setShowAlertDialog] = useState(false); const [protocolsToDelete, setProtocolsToDelete] = @@ -22,15 +37,51 @@ const ProtocolsTableClient = ({ dataPromise }: { dataPromise: GetData }) => { setShowAlertDialog(true); }; + const actionsColumn: ColumnDef = { + id: 'actions', + cell: ({ row }: { row: Row }) => ( + + ), + }; + + const columns = useMemo[]>( + () => [...getProtocolColumns(allowAnonymousRecruitment), actionsColumn], + // eslint-disable-next-line react-hooks/exhaustive-deps + [allowAnonymousRecruitment], + ); + + const { table } = useClientDataTable({ + data: protocols, + columns, + }); + return ( <> } + table={table} + toolbar={ + + + + } + floatingBar={ + + + + } /> ; }) { - const protocols = use(protocolsPromise); - const participants = use(participantsPromise); + const rawProtocols = use(protocolsPromise); + const protocols = SuperJSON.parse(rawProtocols); + const rawParticipants = use(participantsPromise); + const participants = SuperJSON.parse(rawParticipants); const allowAnonymousRecruitment = use(allowAnonymousRecruitmentPromise); - const [selectedProtocol, setSelectedProtocol] = useState(); + const [selectedProtocol, setSelectedProtocol] = useState>(); const [selectedParticipant, setSelectedParticipant] = useState(); const router = useRouter(); @@ -51,30 +54,27 @@ export default function RecruitmentTestSection({ return ( <> -
    - - + placeholder="Select a Participant..." + />
    -
    +
    - setShowConfirmDialog(state)} - > - - - Are you sure? - - This action will delete ALL application data, including interviews - and protocols. This action cannot be undone. Do you want to - continue? - - - - + closeDialog={() => setShowConfirmDialog(false)} + title="Are you sure?" + description="This action will delete ALL application data, including interviews and protocols. This action cannot be undone. Do you want to continue?" + footer={ + <> + - - - + + } + > ); }; diff --git a/app/dashboard/_components/SummaryStatistics/Icons.tsx b/app/dashboard/_components/SummaryStatistics/Icons.tsx index 5a92bfb0b..5d67b1d7a 100644 --- a/app/dashboard/_components/SummaryStatistics/Icons.tsx +++ b/app/dashboard/_components/SummaryStatistics/Icons.tsx @@ -1,29 +1,29 @@ export const ProtocolIcon = () => ( -
    +
    -
    -
    +
    +
    -
    -
    +
    +
    ); export const InterviewIcon = () => ( -
    +
    -
    -
    +
    +
    -
    +
    -
    -
    +
    +
    -
    -
    +
    +
    diff --git a/app/dashboard/_components/SummaryStatistics/StatCard.tsx b/app/dashboard/_components/SummaryStatistics/StatCard.tsx index 064a4ba66..ccbac8069 100644 --- a/app/dashboard/_components/SummaryStatistics/StatCard.tsx +++ b/app/dashboard/_components/SummaryStatistics/StatCard.tsx @@ -1,12 +1,15 @@ import { use } from 'react'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; import { Skeleton } from '~/components/ui/skeleton'; -import Heading from '~/components/ui/typography/Heading'; -import { cn } from '~/utils/shadcn'; +import { cx } from '~/utils/cva'; -const statCardClasses = cn( - 'flex flex-col gap-4 rounded-xl border border-[hsl(var(--platinum--dark))] bg-card p-4 text-card-foreground shadow-xl shadow-platinum-dark transition-all', - 'sm:flex-row sm:items-center md:p-6 lg:gap-6 lg:p-10', - ' hover:scale-[102%]', +const statCardClasses = cx( + 'flex flex-col gap-4 border transition-all', + '@3xs:flex-row @3xs:items-center @lg:gap-6', + 'hover:elevation-medium hover:scale-[102%]', + 'w-full rounded outline-none', + 'tablet-landscape:px-6 tablet-landscape:py-8 px-4 py-6', ); function StatCard({ title, @@ -22,13 +25,22 @@ function StatCard({ const data = use(dataPromise); return ( -
    -
    {icon}
    + +
    {icon}
    - {title} - {data[render]} + + {title} + + + {data[render]} +
    -
    + ); } @@ -40,13 +52,15 @@ export function StatCardSkeleton({ icon: React.ReactNode; }) { return ( -
    -
    {icon}
    + +
    {icon}
    - {title} + + {title} +
    -
    + ); } diff --git a/app/dashboard/_components/SummaryStatistics/SummaryStatistics.tsx b/app/dashboard/_components/SummaryStatistics/SummaryStatistics.tsx index e2cf2012a..36a50c4b0 100644 --- a/app/dashboard/_components/SummaryStatistics/SummaryStatistics.tsx +++ b/app/dashboard/_components/SummaryStatistics/SummaryStatistics.tsx @@ -1,20 +1,28 @@ import Image from 'next/image'; import Link from 'next/link'; import { Suspense } from 'react'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import { getSummaryStatistics } from '~/queries/summaryStatistics'; +import ResponsiveContainer from '~/components/layout/ResponsiveContainer'; +import { type getSummaryStatistics } from '~/queries/summaryStatistics'; import { InterviewIcon, ProtocolIcon } from './Icons'; import StatCard, { StatCardSkeleton } from './StatCard'; -export default function SummaryStatistics() { - const data = getSummaryStatistics(); +type SummaryStatisticsProps = { + dataPromise: ReturnType; +}; +export default function SummaryStatistics({ + dataPromise, +}: SummaryStatisticsProps) { return ( - + } /> @@ -22,13 +30,17 @@ export default function SummaryStatistics() { > } /> - + - + } /> @@ -69,7 +85,7 @@ export default function SummaryStatistics() { > } /> @@ -78,25 +94,3 @@ export default function SummaryStatistics() { ); } - -export const SummaryStatisticsSkeleton = () => ( - - } /> - - } - /> - } /> - -); diff --git a/app/dashboard/_components/UpdateSettingsValue.tsx b/app/dashboard/_components/UpdateSettingsValue.tsx index df50e22bd..35f8b3083 100644 --- a/app/dashboard/_components/UpdateSettingsValue.tsx +++ b/app/dashboard/_components/UpdateSettingsValue.tsx @@ -1,34 +1,36 @@ import { Loader2 } from 'lucide-react'; -import { useState } from 'react'; -import { type z } from 'zod'; +import { type ReactNode, useState } from 'react'; +import type { z } from 'zod/mini'; +import { setAppSetting } from '~/actions/appSettings'; import { Button } from '~/components/ui/Button'; -import { Input } from '~/components/ui/Input'; +import InputField from '~/lib/form/components/fields/InputField'; +import { type AppSetting } from '~/schemas/appSettings'; import ReadOnlyEnvAlert from '../settings/ReadOnlyEnvAlert'; -export default function UpdateSettingsValue({ +export default function UpdateSettingsValue({ + settingsKey, initialValue, - updateValue, - schema, readOnly, + schema, + suffixComponent, }: { - initialValue?: T; - updateValue: (value: T) => Promise; - schema: z.ZodSchema; + settingsKey: AppSetting; + initialValue?: string; readOnly?: boolean; + schema: z.ZodMiniType; + suffixComponent?: ReactNode; }) { const [newValue, setNewValue] = useState(initialValue); const [error, setError] = useState(null); const [isSaving, setSaving] = useState(false); - // If key is empty or invalid, set the error state - const handleChange = (event: React.ChangeEvent) => { - const value = event.target.value; - - const result = schema.safeParse(value); + // If settingsKey is empty or invalid, set the error state + const handleChange = (value: string | undefined) => { + const result = schema.safeParse(value ?? initialValue ?? ''); if (!result.success) { setError( - `Invalid: ${result.error.errors.map((e) => e.message).join(', ')}`, + `Invalid: ${result.error.issues.map((e) => e.message).join(', ')}`, ); } else { setError(null); @@ -47,30 +49,27 @@ export default function UpdateSettingsValue({ if (!newValue) return; setSaving(true); - await updateValue(newValue); + await setAppSetting(settingsKey, newValue); setSaving(false); }; return ( <> {readOnly && } - event.target.select()} type="text" - error={error} className="w-full" disabled={readOnly ?? isSaving} + suffixComponent={suffixComponent} /> + {error &&

    {error}

    } {newValue !== initialValue && (
    - {!isSaving && ( - - )} - } + diff --git a/app/dashboard/_components/UpdateUploadThingTokenAlert.tsx b/app/dashboard/_components/UpdateUploadThingTokenAlert.tsx index de66d219a..cdbb3eb43 100644 --- a/app/dashboard/_components/UpdateUploadThingTokenAlert.tsx +++ b/app/dashboard/_components/UpdateUploadThingTokenAlert.tsx @@ -1,16 +1,16 @@ -import { AlertTriangleIcon } from 'lucide-react'; -import Link from '~/components/Link'; import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; +import Link from '~/components/ui/Link'; import { getAppSetting } from '~/queries/appSettings'; export default async function UpdateUploadThingTokenAlert() { - const uploadThingToken = await getAppSetting('uploadThingToken'); + const storageProvider = await getAppSetting('storageProvider'); + if (storageProvider === 's3') return null; + const uploadThingToken = await getAppSetting('uploadThingToken'); if (uploadThingToken) return null; return ( - - + Configuration update required You need to add a new UploadThing API key before you can upload diff --git a/app/dashboard/_components/UploadThingModal.tsx b/app/dashboard/_components/UploadThingModal.tsx index 3c47e4ffa..a145f8781 100644 --- a/app/dashboard/_components/UploadThingModal.tsx +++ b/app/dashboard/_components/UploadThingModal.tsx @@ -2,75 +2,53 @@ import Image from 'next/image'; import { useState } from 'react'; -import { setAppSetting } from '~/actions/appSettings'; import { UploadThingTokenForm } from '~/app/(blobs)/(setup)/_components/UploadThingTokenForm'; -import Link from '~/components/Link'; -import { - Dialog, - DialogContent, - DialogDescription, - DialogHeader, - DialogTitle, -} from '~/components/ui/dialog'; -import { Divider } from '~/components/ui/Divider'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import Paragraph from '~/components/typography/Paragraph'; +import Link from '~/components/ui/Link'; +import Dialog from '~/lib/dialogs/Dialog'; function UploadThingModal() { const [open, setOpen] = useState(true); return ( - - - - Required Environment Variable Update - - - The Fresco update you installed requires a new UploadThing API - key.{' '} - - Until you add it, you will not be able to upload new protocols - - . Existing protocols will continue to function. - - - - Updating the key should take a matter of minutes, and can be - completed using the following steps: - -
      -
    1. - Visit the{' '} - - UploadThing dashboard - -
    2. -
    3. Select your project.
    4. -
    5. Select the API Keys tab.
    6. -
    7. - Ensure you have the SDK v7+ tab selected. -
    8. -
    9. - Copy the token by clicking the Copy button (see screenshot - below).{' '} - UploadThing API key dashboard -
    10. -
    11. - Paste the token into the field below and click "save and - continue". -
    12. -
    -
    -
    - - setAppSetting('uploadThingToken', token)} - /> -
    + setOpen(false)} + title="Required Environment Variable Update" + description="The Fresco update you installed requires a new UploadThing API + key. Until you add it, you will not be able to upload new protocols. Existing protocols will continue to function." + > + + Updating the key should take a matter of minutes, and can be completed + using the following steps: + +
      +
    1. + Visit the{' '} + + UploadThing dashboard + +
    2. +
    3. Select your project.
    4. +
    5. Select the API Keys tab.
    6. +
    7. + Ensure you have the SDK v7+ tab selected. +
    8. +
    9. + Copy the token by clicking the Copy button (see screenshot below).{' '} + UploadThing API key dashboard +
    10. +
    11. + Paste the token into the field below and click "save and + continue". +
    12. +
    +
    ); } diff --git a/app/dashboard/_components/UserMenu.tsx b/app/dashboard/_components/UserMenu.tsx index 213f0694c..a7240741b 100644 --- a/app/dashboard/_components/UserMenu.tsx +++ b/app/dashboard/_components/UserMenu.tsx @@ -4,7 +4,7 @@ import SubmitButton from '~/components/ui/SubmitButton'; const UserMenu = () => { return (
    void logout()}> - + Sign out diff --git a/app/dashboard/interviews/_components/DeleteInterviewsDialog.tsx b/app/dashboard/interviews/_components/DeleteInterviewsDialog.tsx index 13b95a65a..d6b856422 100644 --- a/app/dashboard/interviews/_components/DeleteInterviewsDialog.tsx +++ b/app/dashboard/interviews/_components/DeleteInterviewsDialog.tsx @@ -1,23 +1,15 @@ -import type { Interview } from '~/lib/db/generated/client'; -import { AlertCircle, Loader2, Trash2 } from 'lucide-react'; +import { Loader2, Trash2 } from 'lucide-react'; import { type Dispatch, type SetStateAction, useEffect, useState } from 'react'; import { deleteInterviews } from '~/actions/interviews'; import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import { - AlertDialog, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from '~/components/ui/AlertDialog'; import { Button } from '~/components/ui/Button'; +import type { GetInterviewsQuery } from '~/queries/interviews'; +import Dialog from '~/lib/dialogs/Dialog'; type DeleteInterviewsDialog = { open: boolean; setOpen: Dispatch>; - interviewsToDelete: Interview[]; + interviewsToDelete: GetInterviewsQuery; }; export const DeleteInterviewsDialog = ({ @@ -47,66 +39,60 @@ export const DeleteInterviewsDialog = ({ }; return ( - - - - Are you absolutely sure? - - This action cannot be undone. This will permanently delete{' '} - - {interviewsToDelete.length}{' '} - {interviewsToDelete.length > 1 ? ( - <>interviews. - ) : ( - <>interview. - )} - - - {hasUnexported && ( - - - Warning - - {interviewsToDelete.length > 1 ? ( - <> - One or more of the selected interviews - has not yet been exported. - - ) : ( - <> - The selected interview - has not yet been exported. - - )} - - - )} - - - + + This action cannot be undone. This will permanently delete{' '} + + {interviewsToDelete.length}{' '} + {interviewsToDelete.length > 1 ? <>interviews. : <>interview.} + + + } + footer={ + <> + + + } + > + {hasUnexported && ( + + Warning + + {interviewsToDelete.length > 1 ? ( <> - Deleting... + One or more of the selected interviews + has not yet been exported. ) : ( <> - Delete + The selected interview + has not yet been exported. )} - - - - +
    +
    + )} + ); }; diff --git a/app/dashboard/interviews/_components/ExportCSVInterviewURLs.tsx b/app/dashboard/interviews/_components/ExportCSVInterviewURLs.tsx index bd3631a60..1189bccb4 100644 --- a/app/dashboard/interviews/_components/ExportCSVInterviewURLs.tsx +++ b/app/dashboard/interviews/_components/ExportCSVInterviewURLs.tsx @@ -4,21 +4,21 @@ import { Download } from 'lucide-react'; import { unparse } from 'papaparse'; import { useState } from 'react'; import { Button } from '~/components/ui/Button'; -import { useToast } from '~/components/ui/use-toast'; +import { useToast } from '~/components/ui/Toast'; import { useDownload } from '~/hooks/useDownload'; -import type { GetInterviewsReturnType } from '~/queries/interviews'; -import type { GetProtocolsReturnType } from '~/queries/protocols'; +import type { GetInterviewsQuery } from '~/queries/interviews'; +import type { ProtocolWithInterviews } from '../../_components/ProtocolsTable/ProtocolsTableClient'; function ExportCSVInterviewURLs({ protocol, interviews, }: { - protocol?: Awaited[number]; - interviews: Awaited; + protocol?: ProtocolWithInterviews; + interviews: Awaited; }) { const download = useDownload(); const [isExporting, setIsExporting] = useState(false); - const { toast } = useToast(); + const { add } = useToast(); const handleExport = () => { try { @@ -26,7 +26,6 @@ function ExportCSVInterviewURLs({ if (!protocol?.id) return; const csvData = interviews.map((interview) => ({ - participant_id: interview.participantId, identifier: interview.participant.identifier, interview_url: `${window.location.origin}/interview/${interview.id}`, })); @@ -42,17 +41,17 @@ function ExportCSVInterviewURLs({ download(url, fileName); // Clean up the URL object URL.revokeObjectURL(url); - toast({ + add({ + title: 'Success', description: 'Incomplete interview URLs CSV exported successfully', - variant: 'success', - duration: 3000, + type: 'success', }); } catch (error) { - toast({ + add({ title: 'Error', description: 'An error occurred while exporting incomplete interview URLs', - variant: 'destructive', + type: 'destructive', }); throw new Error( 'An error occurred while exporting incomplete interview URLs', @@ -64,12 +63,13 @@ function ExportCSVInterviewURLs({ return ( ); } diff --git a/app/dashboard/interviews/_components/ExportInterviewsDialog.tsx b/app/dashboard/interviews/_components/ExportInterviewsDialog.tsx index df7229d89..218aa032f 100644 --- a/app/dashboard/interviews/_components/ExportInterviewsDialog.tsx +++ b/app/dashboard/interviews/_components/ExportInterviewsDialog.tsx @@ -1,46 +1,11 @@ -import type { Interview } from '~/lib/db/generated/client'; -import { DialogDescription } from '@radix-ui/react-dialog'; -import { FileWarning, Loader2, XCircle } from 'lucide-react'; -import { useState } from 'react'; -import { exportInterviews, updateExportTime } from '~/actions/interviews'; -import { deleteZipFromUploadThing } from '~/actions/uploadThing'; +import { useExportProgress } from '~/components/ExportProgressProvider'; import { Button } from '~/components/ui/Button'; -import { cardClasses } from '~/components/ui/card'; -import { - Dialog, - DialogContent, - DialogFooter, - DialogHeader, - DialogTitle, -} from '~/components/ui/dialog'; -import Heading from '~/components/ui/typography/Heading'; -import { useToast } from '~/components/ui/use-toast'; -import { useDownload } from '~/hooks/useDownload'; import useSafeLocalStorage from '~/hooks/useSafeLocalStorage'; -import trackEvent from '~/lib/analytics'; +import type { GetInterviewsQuery } from '~/queries/interviews'; +import Dialog from '~/lib/dialogs/Dialog'; import { ExportOptionsSchema } from '~/lib/network-exporters/utils/types'; -import { ensureError } from '~/utils/ensureError'; -import { cn } from '~/utils/shadcn'; import ExportOptionsView from './ExportOptionsView'; -const ExportingStateAnimation = () => { - return ( -
    -
    - - - Exporting and zipping files. Please wait... - -
    -
    - ); -}; - export const ExportInterviewsDialog = ({ open, handleCancel, @@ -48,11 +13,9 @@ export const ExportInterviewsDialog = ({ }: { open: boolean; handleCancel: () => void; - interviewsToExport: Interview[]; + interviewsToExport: GetInterviewsQuery; }) => { - const download = useDownload(); - const { toast } = useToast(); - const [isExporting, setIsExporting] = useState(false); + const { startExport } = useExportProgress(); const [exportOptions, setExportOptions] = useSafeLocalStorage( 'exportOptions', @@ -68,123 +31,31 @@ export const ExportInterviewsDialog = ({ }, ); - const handleConfirm = async () => { - let exportFilename = null; // Used to track the filename of the temp file uploaded to UploadThing - - // start export process - setIsExporting(true); - try { - const interviewIds = interviewsToExport.map((interview) => interview.id); - - const { zipUrl, zipKey, status, error } = await exportInterviews( - interviewIds, - exportOptions, - ); - - if (status === 'error' || !zipUrl || !zipKey) { - throw new Error(error ?? 'An error occured during export.'); - } - - exportFilename = zipKey; - - // update export time of interviews - await updateExportTime(interviewIds); - - const responseAsBlob = await fetch(zipUrl).then((res) => { - if (!res.ok) { - throw new Error('HTTP error ' + res.status); - } - return res.blob(); - }); - - // create a download link - const url = URL.createObjectURL(responseAsBlob); - - // Download the zip file - download(url, 'Network Canvas Export.zip'); - // clean up the URL object - URL.revokeObjectURL(url); - } catch (error) { - const e = ensureError(error); - - toast({ - icon: , - title: 'Error', - description: - 'Failed to export, please try again. The error was: ' + e.message, - variant: 'destructive', - }); - - void trackEvent({ - type: 'Error', - name: 'FailedToExportInterviews', - message: e.message, - stack: e.stack, - metadata: { - error: e.name, - string: e.toString(), - path: '/dashboard/interviews/_components/ExportInterviewsDialog.tsx', - }, - }); - } finally { - if (exportFilename) { - // Attempt to delete the zip file from UploadThing. - void deleteZipFromUploadThing(exportFilename).catch((error) => { - const e = ensureError(error); - void trackEvent({ - type: 'Error', - name: 'FailedToDeleteTempFile', - message: e.message, - stack: e.stack, - metadata: { - error: e.name, - string: e.toString(), - path: '/dashboard/interviews/_components/ExportInterviewsDialog.tsx', - }, - }); - - toast({ - icon: , - duration: Infinity, - variant: 'default', - title: 'Could not delete temporary file', - description: - 'We were unable to delete the temporary file containing your exported data, which is stored on your UploadThing account. Although extremely unlikely, it is possible that this file could be accessed by someone else. You can delete the file manually by visiting uploadthing.com and logging in with your GitHub account. Please use the feedback button to report this issue.', - }); - }); - } - - setIsExporting(false); - handleCancel(); // Close the dialog - } + const handleConfirm = () => { + const interviewIds = interviewsToExport.map((interview) => interview.id); + startExport(interviewIds, exportOptions); + handleCancel(); }; return ( - <> - {isExporting && } - - - - Confirm File Export Options - - Before exporting, please confirm the export options that you wish - to use. These options are identical to those found in Interviewer. - - - - - - - - - - + + + + + } + > + + ); }; diff --git a/app/dashboard/interviews/_components/ExportOptionsView.tsx b/app/dashboard/interviews/_components/ExportOptionsView.tsx index 23f5fb163..02d3f3ecd 100644 --- a/app/dashboard/interviews/_components/ExportOptionsView.tsx +++ b/app/dashboard/interviews/_components/ExportOptionsView.tsx @@ -1,15 +1,13 @@ import { type Dispatch, type SetStateAction } from 'react'; -import { cardClasses } from '~/components/ui/card'; -import { Switch } from '~/components/ui/switch'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; +import Switch from '~/lib/form/components/fields/ToggleField'; import type { ExportOptions } from '~/lib/network-exporters/utils/types'; -import { cn } from '~/utils/shadcn'; +import { cx } from '~/utils/cva'; -const sectionClasses = cn( - cardClasses, - 'p-4 flex gap-4', - '[&_div]:[flex-basis:fit-content]', +const sectionClasses = cx( + 'flex gap-4 p-4', + '[&_div]:basis-[fit-content]', '[&_div:nth-child(2)]:flex [&_div:nth-child(2)]:items-center [&_div:nth-child(2)]:justify-center [&_div:nth-child(2)]:p-4', ); @@ -76,8 +74,10 @@ const ExportOptionsView = ({
    - Export GraphML Files - + + Export GraphML Files + + GraphML is the main file format used by the Network Canvas software. GraphML files can be used to manually import your data into Server, and can be opened by many other pieces of network analysis software. @@ -85,15 +85,17 @@ const ExportOptionsView = ({
    handleGraphMLSwitch(v ?? false)} />
    - Export CSV Files - + + Export CSV Files + + CSV is a widely used format for storing network data, but this wider compatibility comes at the expense of robustness. If you enable this format, your networks will be exported as an{' '} @@ -104,15 +106,17 @@ const ExportOptionsView = ({
    handleCSVSwitch(v ?? false)} />
    - Use Screen Layout Coordinates - + + Use Screen Layout Coordinates + + By default Interviewer exports sociogram node coordinates as normalized X/Y values (a number between 0 and 1 for each axis, with the origin in the top left). Enabling this option will store @@ -121,8 +125,8 @@ const ExportOptionsView = ({
    handleScreenLayoutCoordinatesSwitch(v ?? false)} />
    diff --git a/app/dashboard/interviews/_components/GenerateInterviewURLs.tsx b/app/dashboard/interviews/_components/GenerateInterviewURLs.tsx index 5b774954c..0e4436172 100644 --- a/app/dashboard/interviews/_components/GenerateInterviewURLs.tsx +++ b/app/dashboard/interviews/_components/GenerateInterviewURLs.tsx @@ -2,35 +2,34 @@ import { FileUp } from 'lucide-react'; import { use, useEffect, useState } from 'react'; +import superjson from 'superjson'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, -} from '~/components/ui/dialog'; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from '~/components/ui/select'; + Popover, + PopoverContent, + PopoverTrigger, +} from '~/components/ui/popover'; import { Skeleton } from '~/components/ui/skeleton'; -import type { GetInterviewsReturnType } from '~/queries/interviews'; -import type { GetProtocolsReturnType } from '~/queries/protocols'; +import SelectField from '~/lib/form/components/fields/Select/Native'; +import type { GetInterviewsQuery } from '~/queries/interviews'; +import type { + GetProtocolsQuery, + GetProtocolsReturnType, +} from '~/queries/protocols'; import ExportCSVInterviewURLs from './ExportCSVInterviewURLs'; export const GenerateInterviewURLs = ({ interviews, protocolsPromise, + className, }: { - interviews: Awaited; + interviews: Awaited; protocolsPromise: GetProtocolsReturnType; + className?: string; }) => { - const protocols = use(protocolsPromise); + const rawProtocols = use(protocolsPromise); + const protocols = superjson.parse(rawProtocols); const [interviewsToExport, setInterviewsToExport] = useState< typeof interviews @@ -39,7 +38,6 @@ export const GenerateInterviewURLs = ({ const [selectedProtocol, setSelectedProtocol] = useState<(typeof protocols)[number]>(); - // Only export interviews that are 1. incomplete and 2. belong to the selected protocol useEffect(() => { if (interviews) { setInterviewsToExport( @@ -52,71 +50,51 @@ export const GenerateInterviewURLs = ({ } }, [interviews, selectedProtocol]); - const [open, setOpen] = useState(false); - - const handleOpenChange = () => { - setOpen(!open); - }; - return ( - <> - - - - - Generate Incomplete Interview URLs - - Generate a CSV that contains unique interview URLs for all{' '} - incomplete interviews by protocol. These URLs - can be shared with participants to allow them to finish their - interviews. - - -
    - {!protocols ? ( - - ) : ( - - )} -
    - - - - -
    -
    - + setSelectedProtocol(protocol); + }} + value={selectedProtocol?.id} + placeholder="Select a Protocol..." + /> + )} +
    + +
    + + ); }; diff --git a/app/dashboard/interviews/loading.tsx b/app/dashboard/interviews/loading.tsx deleted file mode 100644 index 1f3fefd2b..000000000 --- a/app/dashboard/interviews/loading.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; -import Section from '~/components/layout/Section'; -import PageHeader from '~/components/ui/typography/PageHeader'; - -export default function Loading() { - return ( - <> - - - - -
    - -
    -
    - - ); -} diff --git a/app/dashboard/interviews/page.tsx b/app/dashboard/interviews/page.tsx index e5bac4325..b0afe2651 100644 --- a/app/dashboard/interviews/page.tsx +++ b/app/dashboard/interviews/page.tsx @@ -1,27 +1,38 @@ -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import Section from '~/components/layout/Section'; -import PageHeader from '~/components/ui/typography/PageHeader'; +import { Suspense } from 'react'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; +import ResponsiveContainer from '~/components/layout/ResponsiveContainer'; +import PageHeader from '~/components/typography/PageHeader'; +import { requirePageAuth } from '~/lib/auth/guards'; import { requireAppNotExpired } from '~/queries/appSettings'; -import { requirePageAuth } from '~/utils/auth'; import InterviewsTableServer from '../_components/InterviewsTable/InterviewsTableServer'; -export default async function InterviewPage() { - await requireAppNotExpired(); - await requirePageAuth(); - +export default function InterviewPage() { return ( <> - - - - -
    - -
    + + + + } + > + + ); } + +async function AuthenticatedInterviews() { + await requireAppNotExpired(); + await requirePageAuth(); + return ; +} diff --git a/app/dashboard/layout.tsx b/app/dashboard/layout.tsx index 335a60321..57c56b813 100644 --- a/app/dashboard/layout.tsx +++ b/app/dashboard/layout.tsx @@ -1,31 +1,40 @@ +import { type Metadata } from 'next'; +import { connection } from 'next/server'; +import { Suspense } from 'react'; import NetlifyBadge from '~/components/NetlifyBadge'; -import { getAppSetting, requireAppNotExpired } from '~/queries/appSettings'; -import { requirePageAuth } from '~/utils/auth'; +import { ExportProgressProvider } from '~/components/ExportProgressProvider'; +import { getAppSetting } from '~/queries/appSettings'; import { NavigationBar } from './_components/NavigationBar'; import UploadThingModal from './_components/UploadThingModal'; -export const metadata = { +export const metadata: Metadata = { title: 'Network Canvas Fresco - Dashboard', description: 'Fresco.', }; -export const dynamic = 'force-dynamic'; - -const Layout = async ({ children }: { children: React.ReactNode }) => { - await requireAppNotExpired(); - await requirePageAuth(); - - const uploadThingToken = await getAppSetting('uploadThingToken'); - +const Layout = ({ children }: { children: React.ReactNode }) => { return ( - <> +
    - {!uploadThingToken && } - - {children} + + + + {children} - +
    ); }; +async function UploadThingTokenGate() { + await connection(); + const storageProvider = await getAppSetting('storageProvider'); + if (storageProvider === 's3') return null; + const uploadThingToken = await getAppSetting('uploadThingToken'); + if (!uploadThingToken) return ; + return null; +} + export default Layout; diff --git a/app/dashboard/loading.tsx b/app/dashboard/loading.tsx deleted file mode 100644 index 80702414c..000000000 --- a/app/dashboard/loading.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; -import Section from '~/components/layout/Section'; -import Heading from '~/components/ui/typography/Heading'; -import PageHeader from '~/components/ui/typography/PageHeader'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { SummaryStatisticsSkeleton } from './_components/SummaryStatistics/SummaryStatistics'; - -export default function Loading() { - return ( - <> - - - - - - Recent Activity - - This table summarizes the most recent activity within Fresco. Use it - to keep track of new protocols, interviews, and participants. - - - -
    - -
    -
    - - ); -} diff --git a/app/dashboard/page.tsx b/app/dashboard/page.tsx index f6a26bea8..12b07a103 100644 --- a/app/dashboard/page.tsx +++ b/app/dashboard/page.tsx @@ -1,54 +1,112 @@ +import Image from 'next/image'; +import { type SearchParams } from 'nuqs/server'; import { Suspense } from 'react'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import Section from '~/components/layout/Section'; -import Heading from '~/components/ui/typography/Heading'; -import PageHeader from '~/components/ui/typography/PageHeader'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; +import ResponsiveContainer from '~/components/layout/ResponsiveContainer'; +import Heading from '~/components/typography/Heading'; +import PageHeader from '~/components/typography/PageHeader'; +import Paragraph from '~/components/typography/Paragraph'; +import { requirePageAuth } from '~/lib/auth/guards'; +import { fetchActivities } from '~/queries/activityFeed'; import { requireAppNotExpired } from '~/queries/appSettings'; -import { requirePageAuth } from '~/utils/auth'; +import { getSummaryStatistics } from '~/queries/summaryStatistics'; import ActivityFeed from './_components/ActivityFeed/ActivityFeed'; import { searchParamsCache } from './_components/ActivityFeed/SearchParams'; +import { + InterviewIcon, + ProtocolIcon, +} from './_components/SummaryStatistics/Icons'; +import { StatCardSkeleton } from './_components/SummaryStatistics/StatCard'; import SummaryStatistics from './_components/SummaryStatistics/SummaryStatistics'; import UpdateUploadThingTokenAlert from './_components/UpdateUploadThingTokenAlert'; import AnonymousRecruitmentWarning from './protocols/_components/AnonymousRecruitmentWarning'; -export default async function Home({ - searchParams, +export default function Home(props: { searchParams: Promise }) { + return ( + <> + + }> + + + + ); +} + +function DashboardContentSkeleton() { + return ( + <> + + } /> + + } + /> + } /> + + + + Recent Activity + + This table summarizes the most recent activity within Fresco. Use it + to keep track of new protocols, interviews, and participants. + + + + + + + ); +} + +async function DashboardContent({ + searchParams: searchParamsPromise, }: { - searchParams: Record; + searchParams: Promise; }) { await requireAppNotExpired(); await requirePageAuth(); - searchParamsCache.parse(searchParams); + const cache = await searchParamsCache.parse(searchParamsPromise); + + const summaryPromise = getSummaryStatistics(); + const activitiesPromise = fetchActivities(cache); return ( <> - - - - - - - - - - + + + + + + + + + - - Recent Activity + + Recent Activity This table summarizes the most recent activity within Fresco. Use it to keep track of new protocols, interviews, and participants. - -
    - -
    + + ); diff --git a/app/dashboard/participants/_components/AddParticipantButton.tsx b/app/dashboard/participants/_components/AddParticipantButton.tsx index 40f3b77fc..530cd91c6 100644 --- a/app/dashboard/participants/_components/AddParticipantButton.tsx +++ b/app/dashboard/participants/_components/AddParticipantButton.tsx @@ -1,9 +1,9 @@ import { Button } from '~/components/ui/Button'; -import { type Participant } from '~/lib/db/generated/client'; +import { Plus } from 'lucide-react'; import { useState } from 'react'; import ParticipantModal from '~/app/dashboard/participants/_components/ParticipantModal'; -import { Plus } from 'lucide-react'; +import { type Participant } from '~/lib/db/generated/client'; type AddParticipantButtonProps = { existingParticipants: Participant[]; @@ -15,17 +15,16 @@ function AddParticipantButton({ const [isOpen, setOpen] = useState(false); return ( -
    + <> - -
    + ); } diff --git a/app/dashboard/participants/_components/DeleteParticipantsDialog.tsx b/app/dashboard/participants/_components/DeleteParticipantsDialog.tsx index 292d88a41..25004ecaa 100644 --- a/app/dashboard/participants/_components/DeleteParticipantsDialog.tsx +++ b/app/dashboard/participants/_components/DeleteParticipantsDialog.tsx @@ -1,16 +1,8 @@ -import { AlertCircle, Loader2, Trash2 } from 'lucide-react'; +import { Trash2 } from 'lucide-react'; import { useMemo, useState } from 'react'; import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import { - AlertDialog, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from '~/components/ui/AlertDialog'; import { Button } from '~/components/ui/Button'; +import Dialog from '~/lib/dialogs/Dialog'; type DeleteParticipantsDialog = { open: boolean; @@ -39,7 +31,6 @@ export const DeleteParticipantsDialog = ({ if (haveUnexportedInterviews) { return ( - Warning {participantCount > 1 ? ( @@ -61,19 +52,20 @@ export const DeleteParticipantsDialog = ({ } return ( - - + Warning {participantCount > 1 ? ( <> One or more of the selected participants have interview data that - will also be deleted. + will also be deleted. This data is marked as having been exported, + but you may wish to confirm this before proceeding. ) : ( <> The selected participant has interview data that will also be - deleted. + deleted. This data is marked as having been exported, but you may + wish to confirm this before proceeding. )} @@ -82,25 +74,17 @@ export const DeleteParticipantsDialog = ({ }, [haveInterviews, haveUnexportedInterviews, participantCount]); return ( - - - - Are you absolutely sure? - - This action cannot be undone. This will permanently delete{' '} - - {`${participantCount} participant${ - participantCount > 1 ? 's' : '' - }`} - - . - - {dialogContent} - - - + 1 ? 's' : ''}.`} + footer={ + <> + - - - + + } + > + {dialogContent} + ); }; diff --git a/app/dashboard/participants/_components/DropzoneField.tsx b/app/dashboard/participants/_components/DropzoneField.tsx deleted file mode 100644 index d6e39d608..000000000 --- a/app/dashboard/participants/_components/DropzoneField.tsx +++ /dev/null @@ -1,118 +0,0 @@ -import { FileCheck, FileText } from 'lucide-react'; -import { useId } from 'react'; -import { useDropzone } from 'react-dropzone'; -import { useController, type Control } from 'react-hook-form'; -import { Label } from '~/components/ui/Label'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { type FormSchema } from '~/schemas/participant'; -import parseCSV from '~/utils/parseCSV'; -import { cn } from '~/utils/shadcn'; - -const accept = { - 'text/csv': [], - 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': [], - 'application/vnd.ms-excel': [], -}; -const maxFiles = 1; -const maxSize = 1024 * 5000; // 5MB - -export default function DropzoneField({ - control, - name, - label, - hint, -}: { - control: Control; - name: 'csvFile'; - label?: string; - hint?: string; -}) { - const id = useId(); - - const controller = useController({ - name, - control, - rules: { - required: 'No CSV file selected. Please select a file.', - validate: { - hasCorrectFields: (value) => { - if (!value) { - return 'No CSV file selected. Please select a file.'; - } - - if (!Array.isArray(value)) { - return 'Invalid CSV. Please select a valid CSV file.'; - } - - // Check that every row has either a label or an identifier - const valid = value.every( - (row) => - (row.label !== undefined && row.label !== '') || row.identifier, - ); - - if (!valid) { - return 'Invalid CSV. Every row must have either a label or an identifier'; - } - }, - }, - }, - }); - - const { getRootProps, getInputProps } = useDropzone({ - accept, - multiple: false, - maxFiles, - maxSize, - onDrop: async (acceptedFiles, _fileRejections) => { - if (acceptedFiles?.length && acceptedFiles[0]) { - const csvData = await parseCSV(acceptedFiles[0]); - controller.field.onChange(csvData); - } - }, - }); - - return ( -
    - {label && ( - - )} - {hint && ( - {hint} - )} -
    - - {!controller.field.value && ( - <> - - - Drag & drop file here, or click to select. - - - )} - {controller.field.value && ( - <> - - - File selected. Click import to continue, or drop a new file here - to replace. - - - )} -
    - {controller.fieldState.error && ( - - {controller.fieldState.error.message} - - )} -
    - ); -} diff --git a/app/dashboard/participants/_components/ExportParticipants/ExportCSVParticipantURLs.tsx b/app/dashboard/participants/_components/ExportParticipants/ExportCSVParticipantURLs.tsx deleted file mode 100644 index 37d49c149..000000000 --- a/app/dashboard/participants/_components/ExportParticipants/ExportCSVParticipantURLs.tsx +++ /dev/null @@ -1,76 +0,0 @@ -'use client'; - -import { Download } from 'lucide-react'; -import { unparse } from 'papaparse'; -import { useState } from 'react'; -import { Button } from '~/components/ui/Button'; -import { useToast } from '~/components/ui/use-toast'; -import { useDownload } from '~/hooks/useDownload'; -import type { GetParticipantsReturnType } from '~/queries/participants'; -import type { GetProtocolsReturnType } from '~/queries/protocols'; - -function ExportCSVParticipantURLs({ - protocol, - participants, -}: { - protocol?: Awaited[0]; - participants: Awaited; -}) { - const download = useDownload(); - const [isExporting, setIsExporting] = useState(false); - const { toast } = useToast(); - - const handleExport = () => { - try { - setIsExporting(true); - if (!participants) return; - if (!protocol?.id) return; - - // CSV file format - const csvData = participants.map((participant) => ({ - id: participant.id, - identifier: participant.identifier, - interview_url: `${window.location.origin}/onboard/${protocol.id}/?participantId=${participant.id}`, - })); - - const csv = unparse(csvData, { header: true }); - - // Create a download link - const blob = new Blob([csv], { type: 'text/csv' }); - const url = URL.createObjectURL(blob); - // trigger the download - const protocolNameWithoutExtension = protocol.name.split('.')[0]; - const fileName = `participation_urls_${protocolNameWithoutExtension}.csv`; - download(url, fileName); - // Clean up the URL object - URL.revokeObjectURL(url); - toast({ - description: 'Participation URLs CSV exported successfully', - variant: 'success', - duration: 3000, - }); - } catch (error) { - toast({ - title: 'Error', - description: 'An error occurred while exporting participation URLs', - variant: 'destructive', - }); - throw new Error('An error occurred while exporting participation URLs'); - } - - setIsExporting(false); - }; - - return ( - - ); -} - -export default ExportCSVParticipantURLs; diff --git a/app/dashboard/participants/_components/ExportParticipants/ExportParticipants.tsx b/app/dashboard/participants/_components/ExportParticipants/ExportParticipants.tsx index 87cd130a0..eba75a59a 100644 --- a/app/dashboard/participants/_components/ExportParticipants/ExportParticipants.tsx +++ b/app/dashboard/participants/_components/ExportParticipants/ExportParticipants.tsx @@ -1,70 +1,77 @@ 'use client'; -import { Check, FileUp } from 'lucide-react'; +import { FileUp } from 'lucide-react'; import { unparse } from 'papaparse'; -import { use, useState } from 'react'; +import { useCallback } from 'react'; +import type { ParticipantWithInterviews } from '~/app/dashboard/_components/ParticipantsTable/ParticipantsTableClient'; +import type { ProtocolWithInterviews } from '~/app/dashboard/_components/ProtocolsTable/ProtocolsTableClient'; import { Button } from '~/components/ui/Button'; -import { useToast } from '~/components/ui/use-toast'; +import { useToast } from '~/components/ui/Toast'; import { useDownload } from '~/hooks/useDownload'; -import type { GetParticipantsReturnType } from '~/queries/participants'; - -function ExportParticipants({ - participantsPromise, -}: { - participantsPromise: GetParticipantsReturnType; -}) { - const participants = use(participantsPromise); +export function useExportParticipants(protocols: ProtocolWithInterviews[]) { const download = useDownload(); - const [isExporting, setIsExporting] = useState(false); - const { toast } = useToast(); + const { add } = useToast(); + + return useCallback( + (participants: ParticipantWithInterviews[]) => { + try { + const csvData = participants.map((participant) => { + const row: Record = { + id: participant.id, + identifier: participant.identifier, + label: participant.label ?? '', + }; - const handleExport = () => { - try { - setIsExporting(true); - if (!participants) return; + for (const protocol of protocols) { + const name = protocol.name.split('.')[0] ?? protocol.id; + row[`interview_url_${name}`] = + `${window.location.origin}/onboard/${protocol.id}/?participantId=${participant.id}`; + } - // CSV file format - const csvData = participants.map((participant) => ({ - identifier: participant.identifier, - label: participant.label, - })); + return row; + }); - const csv = unparse(csvData, { header: true }); + const csv = unparse(csvData, { header: true }); + const blob = new Blob([csv], { type: 'text/csv' }); + const url = URL.createObjectURL(blob); + download(url, 'participants.csv'); + URL.revokeObjectURL(url); - // Create a download link - const blob = new Blob([csv], { type: 'text/csv' }); - const url = URL.createObjectURL(blob); - // trigger the download - download(url, 'participants.csv'); - // Clean up the URL object - URL.revokeObjectURL(url); - toast({ - title: 'Success', - icon: , - description: 'Participant CSV exported successfully', - variant: 'success', - }); - } catch (error) { - toast({ - title: 'Error', - description: 'An error occurred while exporting participants', - variant: 'destructive', - }); - throw new Error('An error occurred while exporting participants'); - } + add({ + title: 'Success', + description: 'Participants exported successfully', + type: 'success', + }); + } catch (error) { + add({ + title: 'Error', + description: 'An error occurred while exporting participants', + type: 'destructive', + }); + } + }, + [protocols, download, add], + ); +} - setIsExporting(false); - }; +function ExportParticipants({ + participants, + protocols, +}: { + participants: ParticipantWithInterviews[]; + protocols: ProtocolWithInterviews[]; +}) { + const exportParticipants = useExportParticipants(protocols); return ( ); } diff --git a/app/dashboard/participants/_components/ExportParticipants/GenerateParticipantURLsButton.tsx b/app/dashboard/participants/_components/ExportParticipants/GenerateParticipantURLsButton.tsx deleted file mode 100644 index f08ccf574..000000000 --- a/app/dashboard/participants/_components/ExportParticipants/GenerateParticipantURLsButton.tsx +++ /dev/null @@ -1,125 +0,0 @@ -'use client'; -import { useState, useEffect } from 'react'; - -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from '~/components/ui/select'; - -import ExportCSVParticipantURLs from './ExportCSVParticipantURLs'; -import FancyBox from '~/components/ui/FancyBox'; -import { Button } from '~/components/ui/Button'; -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, -} from '~/components/ui/dialog'; -import { FileUp } from 'lucide-react'; -import type { GetProtocolsReturnType } from '~/queries/protocols'; -import type { GetParticipantsReturnType } from '~/queries/participants'; - -export const GenerateParticipantURLs = ({ - protocols, - participants, -}: { - protocols: Awaited; - participants: Awaited; -}) => { - const [selectedParticipants, setSelectedParticipants] = useState( - [], - ); - - const [selectedProtocol, setSelectedProtocol] = - useState[0]>(); - - // Default to all participants selected - useEffect(() => { - if (participants) { - setSelectedParticipants(participants.map((p) => p.id)); - } - }, [participants]); - - const [open, setOpen] = useState(false); - - const handleOpenChange = () => { - setOpen(!open); - }; - - return ( - <> - - - - - Generate Participation URLs - - Generate a CSV that contains{' '} - unique participation URLs for all participants by - protocol. These URLs can be shared with participants to allow them - to take your interview. - - -
    - - ({ - id: participant.id, - label: participant.identifier, - value: participant.id, - }))} - placeholder="Select Participants..." - singular="Participant" - plural="Participants" - value={selectedParticipants} - onValueChange={setSelectedParticipants} - /> -
    - - - participants.find((p) => p.id === id)!, - )} - /> - -
    -
    - - ); -}; diff --git a/app/dashboard/participants/_components/ExportParticipants/ImportExportSection.tsx b/app/dashboard/participants/_components/ExportParticipants/ImportExportSection.tsx deleted file mode 100644 index 1b323af4d..000000000 --- a/app/dashboard/participants/_components/ExportParticipants/ImportExportSection.tsx +++ /dev/null @@ -1,32 +0,0 @@ -import { Suspense } from 'react'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import SettingsSection from '~/components/layout/SettingsSection'; -import { ButtonSkeleton } from '~/components/ui/Button'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { getParticipants } from '~/queries/participants'; -import ImportCSVModal from '../ImportCSVModal'; -import ExportParticipants from './ExportParticipants'; - -export default function ImportExportSection() { - const participantsPromise = getParticipants(); - return ( - - - - }> - - -
    - } - > - - Import or export participants in bulk using the options to the right. - Refer to our documentation for information about the formats used. - - - - ); -} diff --git a/app/dashboard/participants/_components/ImportCSVModal.tsx b/app/dashboard/participants/_components/ImportCSVModal.tsx deleted file mode 100644 index 3fd688d89..000000000 --- a/app/dashboard/participants/_components/ImportCSVModal.tsx +++ /dev/null @@ -1,178 +0,0 @@ -'use client'; - -import { AlertCircle, FileDown, Loader2 } from 'lucide-react'; -import { useState } from 'react'; -import { useForm } from 'react-hook-form'; -import { ZodError } from 'zod'; -import { importParticipants } from '~/actions/participants'; -import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import { Button } from '~/components/ui/Button'; -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, - DialogTrigger, -} from '~/components/ui/dialog'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import UnorderedList from '~/components/ui/typography/UnorderedList'; -import { useToast } from '~/components/ui/use-toast'; -import { FormSchema } from '~/schemas/participant'; -import DropzoneField from './DropzoneField'; - -const ImportCSVModal = ({ - onImportComplete, -}: { - onImportComplete?: () => void; -}) => { - const { toast } = useToast(); - const { control, handleSubmit, reset, formState } = useForm({ - shouldUnregister: true, - mode: 'onChange', - }); - - const { isSubmitting, isValid } = formState; - - const [showImportDialog, setShowImportDialog] = useState(false); - - const onSubmit = async (data: unknown) => { - try { - const safeData = FormSchema.parse(data); - const result = await importParticipants(safeData.csvFile); - - if ( - result.existingParticipants && - result.existingParticipants.length > 0 - ) { - toast({ - title: 'Import completed with collisions', - description: ( - <> -

    - Your participants were imported successfully, but some - identifiers collided with existing participants and were not - imported. -

    - {result.existingParticipants.length < 5 && ( -
      - {result.existingParticipants.map((item) => ( -
    • {item.identifier}
    • - ))} -
    - )} - - ), - variant: 'destructive', - }); - } else { - toast({ - title: 'Participants imported', - description: 'Participants have been imported successfully', - variant: 'success', - }); - } - - onImportComplete?.(); - - reset(); - setShowImportDialog(false); - } catch (e) { - // if it's a validation error, show the error message - if (e instanceof ZodError) { - toast({ - title: 'Error', - description: e.errors[0] - ? `Invalid CSV File: ${e.errors[0].message}` - : 'Invalid CSV file. Please check the file requirements and try again.', - variant: 'destructive', - }); - return; - } - // eslint-disable-next-line no-console - console.log(e); - toast({ - title: 'Error', - description: 'An error occurred while importing participants', - variant: 'destructive', - }); - } - }; - - return ( - <> - - - - - - - Import participants - - - - CSV file requirements - - - Your CSV file can contain the following columns: - - -
  • - identifier - must be a unique string, and{' '} - should not be easy to guess. Used to - generate the onboarding URL to allow integration with - other survey tools. -
  • -
  • - label - can be any text or number. Used to provide a human - readable label for the participant. -
  • -
    - - Either an identifier column or a label column{' '} - must be provided for each participant. - - - Note: The identifier and label column headers must be - lowercase. - -
    -
    -
    -
    -
    await onSubmit(data))} - className="flex flex-col" - > - - - - - - -
    -
    - - ); -}; - -export default ImportCSVModal; diff --git a/app/dashboard/participants/_components/ImportParticipants.tsx b/app/dashboard/participants/_components/ImportParticipants.tsx new file mode 100644 index 000000000..0b53cf828 --- /dev/null +++ b/app/dashboard/participants/_components/ImportParticipants.tsx @@ -0,0 +1,161 @@ +'use client'; + +import { FileDown, Upload } from 'lucide-react'; +import { useCallback, useState } from 'react'; +import { useDropzone } from 'react-dropzone'; +import { importParticipants } from '~/actions/participants'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; +import { Button } from '~/components/ui/Button'; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from '~/components/ui/popover'; +import { useToast } from '~/components/ui/Toast'; +import { csvDataSchema } from '~/schemas/participant'; +import { cx } from '~/utils/cva'; +import parseCSV from '~/utils/parseCSV'; + +export default function ImportParticipants() { + const [open, setOpen] = useState(false); + const { add } = useToast(); + + const handleFilesAccepted = useCallback( + async (files: File[]) => { + const file = files[0]; + if (!file) return; + + try { + const csvData = await parseCSV(file); + const parsed = csvDataSchema.safeParse(csvData); + + if (!parsed.success) { + add({ + title: 'Error', + description: + 'File must be a valid CSV with label or identifier columns', + type: 'destructive', + }); + return; + } + + const result = await importParticipants(parsed.data); + + if (result.error) { + add({ + title: 'Error', + description: result.error, + type: 'destructive', + }); + return; + } + + if ( + result.existingParticipants && + result.existingParticipants.length > 0 + ) { + add({ + title: 'Import completed with collisions', + description: ( + <> +

    + Your participants were imported successfully, but some + identifiers collided with existing participants and were not + imported. +

    + {result.existingParticipants.length < 5 && ( +
      + {result.existingParticipants.map((item) => ( +
    • {item.identifier}
    • + ))} +
    + )} + + ), + type: 'destructive', + }); + } else { + add({ + title: 'Participants imported', + description: 'Participants have been imported successfully', + type: 'success', + }); + } + + setOpen(false); + } catch (e) { + // eslint-disable-next-line no-console + console.log(e); + add({ + title: 'Error', + description: 'An error occurred while importing participants', + type: 'destructive', + }); + } + }, + [add], + ); + + const { + getRootProps, + getInputProps, + isDragActive, + open: openFileDialog, + } = useDropzone({ + onDropAccepted: handleFilesAccepted, + accept: { + 'text/csv': [], + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': [], + 'application/vnd.ms-excel': [], + }, + noClick: true, + multiple: false, + maxFiles: 1, + maxSize: 1024 * 5000, + }); + + return ( + + } />}> + Import Participants + + +
    + +
    + +
    +
    + + {isDragActive ? 'Drop file here' : 'Import participants'} + + + Drag & drop a .csv file here + +
    + +
    +
    +
    + ); +} diff --git a/app/dashboard/participants/_components/ParticipantModal.tsx b/app/dashboard/participants/_components/ParticipantModal.tsx index 792ac3d12..d5af44a1c 100644 --- a/app/dashboard/participants/_components/ParticipantModal.tsx +++ b/app/dashboard/participants/_components/ParticipantModal.tsx @@ -1,26 +1,23 @@ 'use client'; import { createId } from '@paralleldrive/cuid2'; -import type { Participant } from '~/lib/db/generated/client'; -import { HelpCircle, Loader2 } from 'lucide-react'; +import { HelpCircle, WandSparkles } from 'lucide-react'; import { useRouter } from 'next/navigation'; -import { useEffect, useState, type Dispatch, type SetStateAction } from 'react'; -import { z } from 'zod'; +import { useState, type Dispatch, type SetStateAction } from 'react'; import { createParticipant, updateParticipant } from '~/actions/participants'; import ActionError from '~/components/ActionError'; import InfoTooltip from '~/components/InfoTooltip'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; -import { Input } from '~/components/ui/Input'; -import { - Dialog, - DialogContent, - DialogFooter, - DialogHeader, - DialogTitle, -} from '~/components/ui/dialog'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import useZodForm from '~/hooks/useZodForm'; +import type { Participant } from '~/lib/db/generated/client'; +import Dialog from '~/lib/dialogs/Dialog'; +import Field from '~/lib/form/components/Field/Field'; +import { FormWithoutProvider } from '~/lib/form/components/Form'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import InputField from '~/lib/form/components/fields/InputField'; +import useFormStore from '~/lib/form/hooks/useFormStore'; +import FormStoreProvider from '~/lib/form/store/formStoreProvider'; +import { z } from 'zod/mini'; import { participantIdentifierSchema, participantLabelSchema, @@ -42,172 +39,189 @@ function ParticipantModal({ existingParticipants, }: ParticipantModalProps) { const [error, setError] = useState(null); - const [working, setWorking] = useState(false); - const router = useRouter(); - const formSchema = z - .object({ - identifier: participantIdentifierSchema, - label: participantLabelSchema, - }) - .refine( - (data) => { - const existingParticipant = existingParticipants.find( - (p) => p.identifier === data.identifier, - ); - // Allow the current identifier if editing - return ( - !existingParticipant || - (existingParticipant.id === editingParticipant?.id) - ); - }, - { - path: ['identifier'], - message: 'This identifier is already in use.', - }, - ); - - type ValidationSchema = z.infer; - - const { - register, - handleSubmit, - reset, - setValue, - formState: { errors }, - } = useZodForm({ - schema: formSchema, - shouldUnregister: true, - }); - - const onSubmit = async (data: ValidationSchema) => { + + const handleSubmit = async (data: unknown) => { setError(null); - setWorking(true); + + const typedData = data as { + identifier: string; + label?: string | null; + }; if (editingParticipant) { await updateParticipant({ existingIdentifier: editingParticipant.identifier, formData: data, }); + router.refresh(); + setOpen(false); + return { success: true }; } - if (!editingParticipant) { - const result = await createParticipant([data]); + const result = await createParticipant([typedData]); - if (result.error) { - setError(result.error); - } else { - router.refresh(); - setOpen(false); - } + if (result.error) { + setError(result.error); + return { + success: false, + errors: { form: [result.error] }, + }; } - setWorking(false); + router.refresh(); + setOpen(false); + return { success: true }; }; - useEffect(() => { - if (editingParticipant) { - setValue('identifier', editingParticipant.identifier); - setValue('label', editingParticipant.label ?? ''); - } - }, [editingParticipant, setValue]); - const handleOpenChange = (isOpen: boolean) => { setOpen(isOpen); if (!isOpen) { setEditingParticipant?.(null); setError(null); - reset(); } }; + // Use initialValues to set values when editing + const initialValues = editingParticipant + ? { + identifier: editingParticipant.identifier, + label: editingParticipant.label ?? '', + } + : undefined; + return ( - - - - - {editingParticipant ? 'Edit Participant' : 'Add Participant'} - - + + handleOpenChange(false)} + title={editingParticipant ? 'Edit Participant' : 'Add Participant'} + footer={ + <> + + + {editingParticipant ? 'Update' : 'Submit'} + + + } + > {error && (
    )} -
    await onSubmit(data))} - className="flex flex-col gap-2" + - - This could be a study ID, a number, or any other unique - identifier. It should be unique for each participant, and should - not be easy to guess{' '} - } - content={ - <> - - Participant Identifiers - - - Participant identifiers are used by Fresco to onboard - participants. They might be exposed to the participant - during this process via the participation URL, and so - must not contain any sensitive - information, and must not be easy for other participants - to guess (e.g. sequential numbers, or easily guessable - strings). - - - } - /> - . - - } - placeholder="Enter an identifier..." - error={errors.identifier?.message} - // Add an adornment to the right to allow automatically generating an ID - inputClassName="pr-28" - rightAdornment={ - - } + - - - - - - -
    -
    + + + + ); +} + +// Separate component to handle the identifier field with generate button +function IdentifierField({ + existingParticipants, + editingParticipant, + initialValue, +}: { + existingParticipants: Participant[]; + editingParticipant?: Participant | null; + initialValue?: string; +}) { + const setFieldValue = useFormStore((state) => state.setFieldValue); + + // Create validation that includes the uniqueness check + const identifierValidation = participantIdentifierSchema.check( + z.refine( + (data) => { + const existingParticipant = existingParticipants.find( + (p) => p.identifier === data, + ); + // Allow the current identifier if editing + return ( + !existingParticipant || + existingParticipant.id === editingParticipant?.id + ); + }, + { + message: 'This identifier is already in use.', + }, + ), + ); + + const hint = ( + <> + This could be a study ID, a number, or any other unique identifier. It + should be unique for each participant, and should not be easy to guess{' '} + } + title="Participant Identifiers" + description={(props) => ( + + Participant identifiers are used by Fresco to onboard participants. + They might be exposed to the participant during this process via the + participation URL, and so must not contain any + sensitive information, and must not be easy for other participants + to guess (e.g. sequential numbers, or easily guessable strings). + + )} + /> + . + + ); + + return ( + { + setFieldValue('identifier', `p-${createId()}`); + }} + icon={} + > + Generate + + } + initialValue={initialValue} + /> ); } diff --git a/app/dashboard/participants/loading.tsx b/app/dashboard/participants/loading.tsx deleted file mode 100644 index d1548c68d..000000000 --- a/app/dashboard/participants/loading.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; -import Section from '~/components/layout/Section'; -import { SettingsSectionSkeleton } from '~/components/layout/SettingsSection'; -import { ButtonSkeleton } from '~/components/ui/Button'; -import PageHeader from '~/components/ui/typography/PageHeader'; - -export default function Loading() { - return ( - <> - - - - - - - -
    - } - /> - - - -
    - -
    -
    - - ); -} diff --git a/app/dashboard/participants/page.tsx b/app/dashboard/participants/page.tsx index 8ee130ff7..211990578 100644 --- a/app/dashboard/participants/page.tsx +++ b/app/dashboard/participants/page.tsx @@ -1,29 +1,46 @@ +import { Suspense } from 'react'; import ParticipantsTable from '~/app/dashboard/_components/ParticipantsTable/ParticipantsTable'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import Section from '~/components/layout/Section'; -import PageHeader from '~/components/ui/typography/PageHeader'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; +import ResponsiveContainer from '~/components/layout/ResponsiveContainer'; +import PageHeader from '~/components/typography/PageHeader'; +import { requirePageAuth } from '~/lib/auth/guards'; import { requireAppNotExpired } from '~/queries/appSettings'; -import { requirePageAuth } from '~/utils/auth'; -import ImportExportSection from './_components/ExportParticipants/ImportExportSection'; - -export default async function ParticipantPage() { - await requireAppNotExpired(); - await requirePageAuth(); +export default function ParticipantPage() { return ( <> - - - - - -
    - -
    -
    + + + + + } + > + + ); } + +async function AuthenticatedParticipants() { + await requireAppNotExpired(); + await requirePageAuth(); + return ( + + + + ); +} diff --git a/app/dashboard/protocols/_components/AnonymousRecruitmentWarning.tsx b/app/dashboard/protocols/_components/AnonymousRecruitmentWarning.tsx index 2ff664da7..07b26f266 100644 --- a/app/dashboard/protocols/_components/AnonymousRecruitmentWarning.tsx +++ b/app/dashboard/protocols/_components/AnonymousRecruitmentWarning.tsx @@ -1,6 +1,6 @@ -import { AlertCircle } from 'lucide-react'; -import Link from '~/components/Link'; +import ResponsiveContainer from '~/components/layout/ResponsiveContainer'; import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; +import Link from '~/components/ui/Link'; import { getAppSetting } from '~/queries/appSettings'; export default async function AnonymousRecruitmentWarning() { @@ -11,15 +11,17 @@ export default async function AnonymousRecruitmentWarning() { if (!allowAnonymousRecruitment) return null; return ( - - - Please Note - - Anonymous recruitment is enabled. This means that participants can - self-enroll in your study without needing to be invited, by visiting the - protocol-specific onboarding link. To disable anonymous recruitment, - visit the settings page. - - + + + Please Note + + Anonymous recruitment is enabled. This means that participants can + self-enroll in your study without needing to be invited, by visiting + the protocol-specific onboarding link. To disable anonymous + recruitment, visit{' '} + the settings page. + + + ); } diff --git a/app/dashboard/protocols/_components/DeleteProtocolsDialog.tsx b/app/dashboard/protocols/_components/DeleteProtocolsDialog.tsx index 5d959472f..7a2f58ffd 100644 --- a/app/dashboard/protocols/_components/DeleteProtocolsDialog.tsx +++ b/app/dashboard/protocols/_components/DeleteProtocolsDialog.tsx @@ -1,19 +1,11 @@ -import { AlertCircle, Loader2, Trash2 } from 'lucide-react'; +import { Trash2 } from 'lucide-react'; import type { Dispatch, SetStateAction } from 'react'; import { useEffect, useState } from 'react'; import { deleteProtocols } from '~/actions/protocols'; import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import { - AlertDialog, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from '~/components/ui/AlertDialog'; import { Button } from '~/components/ui/Button'; -import type { ProtocolWithInterviews } from '~/types/types'; +import Dialog from '~/lib/dialogs/Dialog'; +import type { ProtocolWithInterviews } from '../../_components/ProtocolsTable/ProtocolsTableClient'; type DeleteProtocolsDialogProps = { open: boolean; @@ -62,80 +54,68 @@ export const DeleteProtocolsDialog = ({ }; return ( - - - - Are you absolutely sure? - - This action cannot be undone. This will permanently delete{' '} - - {protocolsToDelete.length}{' '} - {protocolsToDelete.length > 1 ? <>protocols. : <>protocol.} - - - {protocolsInfo.hasInterviews && - !protocolsInfo.hasUnexportedInterviews && ( - - - Warning - - {protocolsToDelete.length > 1 ? ( - <> - One or more of the selected protocols have interview data - that will also be deleted. - - ) : ( - <> - The selected protocol has interview data that will also be - deleted. - - )} - - - )} - {protocolsInfo.hasUnexportedInterviews && ( - - - Warning - - {protocolsToDelete.length > 1 ? ( - <> - One or more of the selected protocols have interview data - that has not yet been exported. Deleting - these protocols will also delete its interview data. - - ) : ( - <> - The selected protocol has interview data that - has not yet been exported. Deleting this - protocol will also delete its interview data. - - )} - - - )} - - - + handleCancelDialog()} + title="Are you absolutely sure?" + description="This action cannot be undone. This will permanently delete the selected protocols." + footer={ + <> + + + } + > + {protocolsInfo.hasInterviews && + !protocolsInfo.hasUnexportedInterviews && ( + + Warning + + {protocolsToDelete.length > 1 ? ( + <> + One or more of the selected protocols have interview data that + will also be deleted. This data is marked as having been + exported, but you may wish to confirm this before proceeding. + + ) : ( + <> + The selected protocol has interview data that will also be + deleted. This data is marked as having been exported, but you + may wish to confirm this before proceeding. + + )} + + + )} + {protocolsInfo.hasUnexportedInterviews && ( + + Warning + + {protocolsToDelete.length > 1 ? ( <> - Deleting... + One or more of the selected protocols have interview data that{' '} + has not yet been exported. Deleting these + protocols will also delete its interview data. ) : ( <> - Permanently Delete + The selected protocol has interview data that + has not yet been exported. Deleting this + protocol will also delete its interview data. )} - - - - + + + )} + ); }; diff --git a/app/dashboard/protocols/loading.tsx b/app/dashboard/protocols/loading.tsx deleted file mode 100644 index c5b8fbd95..000000000 --- a/app/dashboard/protocols/loading.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import { DataTableSkeleton } from '~/components/data-table/data-table-skeleton'; -import Section from '~/components/layout/Section'; -import PageHeader from '~/components/ui/typography/PageHeader'; - -export default function Loading() { - return ( - <> - - - - -
    - -
    -
    - - ); -} diff --git a/app/dashboard/protocols/page.tsx b/app/dashboard/protocols/page.tsx index 75ed794d5..0858fd6e9 100644 --- a/app/dashboard/protocols/page.tsx +++ b/app/dashboard/protocols/page.tsx @@ -1,31 +1,51 @@ import { Suspense } from 'react'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import Section from '~/components/layout/Section'; -import PageHeader from '~/components/ui/typography/PageHeader'; +import { DataTableSkeleton } from '~/components/DataTable/DataTableSkeleton'; +import ResponsiveContainer from '~/components/layout/ResponsiveContainer'; +import PageHeader from '~/components/typography/PageHeader'; +import { requirePageAuth } from '~/lib/auth/guards'; import { requireAppNotExpired } from '~/queries/appSettings'; -import { requirePageAuth } from '~/utils/auth'; import ProtocolsTable from '../_components/ProtocolsTable/ProtocolsTable'; import UpdateUploadThingTokenAlert from '../_components/UpdateUploadThingTokenAlert'; -export default async function ProtocolsPage() { +export default function ProtocolsPage() { + return ( + <> + + + + + } + > + + + + ); +} + +async function AuthenticatedProtocols() { await requireAppNotExpired(); await requirePageAuth(); - return ( <> - - - - - - - -
    - -
    + + + + + ); diff --git a/app/dashboard/settings/ReadOnlyEnvAlert.tsx b/app/dashboard/settings/ReadOnlyEnvAlert.tsx index 04ffda13e..eb0c5e720 100644 --- a/app/dashboard/settings/ReadOnlyEnvAlert.tsx +++ b/app/dashboard/settings/ReadOnlyEnvAlert.tsx @@ -1,9 +1,8 @@ -import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; +import { Alert, AlertDescription } from '~/components/ui/Alert'; export default function ReadOnlyEnvAlert() { return ( - - Note: + This setting is controlled by your .env file, and so can only be changed by modifying that file. diff --git a/app/dashboard/settings/_components/ApiTokensSection.tsx b/app/dashboard/settings/_components/ApiTokensSection.tsx new file mode 100644 index 000000000..3313010d3 --- /dev/null +++ b/app/dashboard/settings/_components/ApiTokensSection.tsx @@ -0,0 +1,33 @@ +import { Suspense } from 'react'; +import ApiTokenManagement from '~/components/ApiTokenManagement'; +import InterviewDataApiSwitch from '~/components/InterviewDataApiSwitch'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import { ToggleFieldSkeleton } from '~/lib/form/components/fields/ToggleFieldSkeleton'; +import { getApiTokens } from '~/queries/apiTokens'; + +export default function ApiTokensSection() { + const apiTokensPromise = getApiTokens(); + + return ( + + }> + + + } + /> + + + + + ); +} diff --git a/app/dashboard/settings/_components/ConfigurationSection.tsx b/app/dashboard/settings/_components/ConfigurationSection.tsx new file mode 100644 index 000000000..f72770b95 --- /dev/null +++ b/app/dashboard/settings/_components/ConfigurationSection.tsx @@ -0,0 +1,30 @@ +import { Suspense } from 'react'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import VersionSection, { + VersionSectionSkeleton, +} from '~/components/VersionSection'; +import { env } from '~/env'; +import { getInstallationId } from '~/queries/appSettings'; +import UpdateInstallationId from './UpdateInstallationId'; + +export default async function ConfigurationSection() { + const installationId = await getInstallationId(); + + return ( + + }> + + + + + + + ); +} diff --git a/app/dashboard/settings/_components/DeveloperToolsSection.tsx b/app/dashboard/settings/_components/DeveloperToolsSection.tsx new file mode 100644 index 000000000..73d3ada4c --- /dev/null +++ b/app/dashboard/settings/_components/DeveloperToolsSection.tsx @@ -0,0 +1,22 @@ +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import RecruitmentTestSectionServer from '../../_components/RecruitmentTestSectionServer'; +import ResetButton from '../../_components/ResetButton'; + +export default function DeveloperToolsSection() { + return ( + + } + /> + + + ); +} diff --git a/app/dashboard/settings/_components/InterviewSettingsSection.tsx b/app/dashboard/settings/_components/InterviewSettingsSection.tsx new file mode 100644 index 000000000..aaca703b4 --- /dev/null +++ b/app/dashboard/settings/_components/InterviewSettingsSection.tsx @@ -0,0 +1,84 @@ +import { Suspense } from 'react'; +import AnonymousRecruitmentSwitch from '~/components/AnonymousRecruitmentSwitch'; +import FreezeInterviewsSwitch from '~/components/FreezeInterviewsSwitch'; +import LimitInterviewsSwitch from '~/components/LimitInterviewsSwitch'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import ToggleSmallScreenWarning from '~/components/ToggleSmallScreenWarning'; +import { Alert, AlertDescription } from '~/components/ui/Alert'; +import { ToggleFieldSkeleton } from '~/lib/form/components/fields/ToggleFieldSkeleton'; +import { getAppSetting } from '~/queries/appSettings'; + +export default async function InterviewSettingsSection() { + const disableSmallScreenOverlay = await getAppSetting( + 'disableSmallScreenOverlay', + ); + + return ( + + }> + + + } + /> + + If this option is enabled, each participant will only be able to + submit a single completed interview for each + protocol (although they may have multiple incomplete interviews). + Once an interview has been completed, attempting to start a new + interview or to resume any other in-progress interview, will be + prevented. + + } + control={ + }> + + + } + /> + }> + + + } + /> + }> + + + } + > + {disableSmallScreenOverlay && ( + + + Ensure that you test your interview thoroughly on a small screen + when disabling this warning. Fresco is designed to work best on + larger screens, and using it on a small screen may lead to a poor + user experience for participants. + + + )} + + + ); +} diff --git a/app/dashboard/settings/_components/PasskeySettings.tsx b/app/dashboard/settings/_components/PasskeySettings.tsx new file mode 100644 index 000000000..13e7a7bd9 --- /dev/null +++ b/app/dashboard/settings/_components/PasskeySettings.tsx @@ -0,0 +1,191 @@ +'use client'; + +import { startRegistration } from '@simplewebauthn/browser'; +import { KeyRound, Plus, Trash } from 'lucide-react'; +import { useState } from 'react'; +import { + generateRegistrationOptions, + removePasskey, + verifyRegistration, +} from '~/actions/webauthn'; +import Surface from '~/components/layout/Surface'; +import SettingsField from '~/components/settings/SettingsField'; +import { Badge } from '~/components/ui/badge'; +import { Button } from '~/components/ui/Button'; +import useDialog from '~/lib/dialogs/useDialog'; + +type Passkey = { + id: string; + friendlyName: string | null; + deviceType: string; + createdAt: Date; + lastUsedAt: Date | null; + backedUp: boolean; +}; + +type PasskeySettingsProps = { + initialPasskeys: Passkey[]; + sandboxMode: boolean; + hasPassword: boolean; +}; + +function formatDate(date: Date | null) { + if (!date) return 'Never'; + return new Date(date).toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + }); +} + +export default function PasskeySettings({ + initialPasskeys, + sandboxMode, + hasPassword, +}: PasskeySettingsProps) { + const [passkeys, setPasskeys] = useState(initialPasskeys); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const { confirm } = useDialog(); + + const handleAddPasskey = async () => { + setError(null); + setLoading(true); + + try { + const { error: genError, data } = await generateRegistrationOptions(); + if (genError || !data) { + setError(genError ?? 'Failed to start registration'); + return; + } + + // IMMEDIATELY call startRegistration — preserves Safari user gesture + const credential = await startRegistration({ + optionsJSON: data.options, + }); + + const result = await verifyRegistration({ credential }); + if (result.error) { + setError(result.error); + return; + } + + if (result.data) { + setPasskeys((prev) => [ + { + id: result.data.id, + friendlyName: result.data.friendlyName, + deviceType: result.data.deviceType, + createdAt: result.data.createdAt, + lastUsedAt: null, + backedUp: false, + }, + ...prev, + ]); + } + } catch (e) { + if (e instanceof Error && e.name === 'NotAllowedError') { + return; + } + setError('Passkey registration failed'); + } finally { + setLoading(false); + } + }; + + const handleRemovePasskey = (passkey: Passkey) => { + void confirm({ + title: 'Remove Passkey', + description: `Remove "${passkey.friendlyName ?? 'Unnamed passkey'}"? You won't be able to sign in with it anymore.`, + confirmLabel: 'Remove', + onConfirm: async () => { + const result = await removePasskey(passkey.id); + if (result.error) { + setError(result.error); + } else { + setPasskeys((prev) => prev.filter((p) => p.id !== passkey.id)); + } + }, + }); + }; + + return ( + void handleAddPasskey()} + disabled={sandboxMode || loading} + color="primary" + icon={} + > + {loading ? 'Registering...' : 'Add passkey'} + + } + > + {error &&

    {error}

    } + + {passkeys.length > 0 && ( +
    + {passkeys.map((passkey) => ( + +
    + +
    +
    + + {passkey.friendlyName ?? 'Unnamed passkey'} + + + {passkey.deviceType === 'multiDevice' + ? 'Synced' + : 'Device-bound'} + +
    +
    + + Added {formatDate(passkey.createdAt)} + + + Last used {formatDate(passkey.lastUsedAt)} + +
    +
    +
    + +
    + ))} +
    + )} +
    + ); +} diff --git a/app/dashboard/settings/_components/PreviewModeSection.tsx b/app/dashboard/settings/_components/PreviewModeSection.tsx new file mode 100644 index 000000000..1b1c5d66a --- /dev/null +++ b/app/dashboard/settings/_components/PreviewModeSection.tsx @@ -0,0 +1,53 @@ +import { Suspense } from 'react'; +import PreviewModeAuthSwitch from '~/components/PreviewModeAuthSwitch'; +import PreviewModeSwitch from '~/components/PreviewModeSwitch'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; +import { env } from '~/env'; +import { ToggleFieldSkeleton } from '~/lib/form/components/fields/ToggleFieldSkeleton'; +import { getAppSetting, getPreviewMode } from '~/queries/appSettings'; +import ReadOnlyEnvAlert from '../ReadOnlyEnvAlert'; + +export default async function PreviewModeSection() { + const previewMode = await getPreviewMode(); + const previewModeIsReadOnly = env.PREVIEW_MODE !== undefined; + const authenticationEnabled = await getAppSetting('previewModeRequireAuth'); + + return ( + + }> + + + } + > + {previewModeIsReadOnly && } + + }> + + + } + > + {!authenticationEnabled && ( + + Security Warning + + Disabling authentication allows anyone with the URL of your study + to upload protocols. Only disable this in trusted environments. + + + )} + + + ); +} diff --git a/app/dashboard/settings/_components/PrivacySection.tsx b/app/dashboard/settings/_components/PrivacySection.tsx new file mode 100644 index 000000000..dca1661c6 --- /dev/null +++ b/app/dashboard/settings/_components/PrivacySection.tsx @@ -0,0 +1,26 @@ +import { Suspense } from 'react'; +import DisableAnalyticsSwitch from '~/components/DisableAnalyticsSwitch'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import { ToggleFieldSkeleton } from '~/lib/form/components/fields/ToggleFieldSkeleton'; +import { env } from '~/env'; +import ReadOnlyEnvAlert from '../ReadOnlyEnvAlert'; + +export default function PrivacySection() { + return ( + + }> + + + } + > + {!!env.DISABLE_ANALYTICS && } + + + ); +} diff --git a/app/dashboard/settings/_components/StorageProviderSection.tsx b/app/dashboard/settings/_components/StorageProviderSection.tsx new file mode 100644 index 000000000..f9a52ef67 --- /dev/null +++ b/app/dashboard/settings/_components/StorageProviderSection.tsx @@ -0,0 +1,79 @@ +import { Alert, AlertDescription } from '~/components/ui/Alert'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import Link from '~/components/ui/Link'; +import { getAppSetting } from '~/queries/appSettings'; +import { hasProtocols } from '~/queries/storageProvider'; +import UpdateUploadThingToken from './UpdateUploadThingToken'; +import UpdateS3Settings from './UpdateS3Settings'; + +export default async function StorageProviderSection() { + const [ + storageProvider, + uploadThingKey, + , + s3Endpoint, + s3Bucket, + s3Region, + s3AccessKeyId, + s3SecretAccessKey, + ] = await Promise.all([ + getAppSetting('storageProvider'), + getAppSetting('uploadThingToken'), + hasProtocols(), + getAppSetting('s3Endpoint'), + getAppSetting('s3Bucket'), + getAppSetting('s3Region'), + getAppSetting('s3AccessKeyId'), + getAppSetting('s3SecretAccessKey'), + ]); + + const provider = storageProvider ?? 'uploadthing'; + const providerLabel = + provider === 's3' ? 'S3 / S3-Compatible' : 'UploadThing'; + + return ( + + + + + The storage provider type cannot be changed once the application has + been deployed. You can update the credentials below. + + + + + {provider === 'uploadthing' && ( + + The API key used to communicate with UploadThing. See the{' '} + + deployment documentation + {' '} + for details. + + } + > + + + )} + + {provider === 's3' && ( + + )} + + ); +} diff --git a/app/dashboard/settings/_components/SyntheticInterviewDataSection.tsx b/app/dashboard/settings/_components/SyntheticInterviewDataSection.tsx new file mode 100644 index 000000000..e3ef7582b --- /dev/null +++ b/app/dashboard/settings/_components/SyntheticInterviewDataSection.tsx @@ -0,0 +1,246 @@ +'use client'; + +import { useRouter } from 'next/navigation'; +import { use, useState } from 'react'; +import { SuperJSON } from 'superjson'; +import { + deleteSyntheticData, + revalidateSyntheticData, +} from '~/actions/synthetic-interviews'; +import SettingsCard from '~/components/settings/SettingsCard'; +import SettingsField from '~/components/settings/SettingsField'; +import { Button } from '~/components/ui/Button'; +import ProgressBar from '~/components/ui/ProgressBar'; +import { useToast } from '~/components/ui/Toast'; +import InputField from '~/lib/form/components/fields/InputField'; +import SelectField from '~/lib/form/components/fields/Select/Native'; +import ToggleField from '~/lib/form/components/fields/ToggleField'; +import { + type GetProtocolsQuery, + type GetProtocolsReturnType, +} from '~/queries/protocols'; + +type SyntheticInterviewDataSectionProps = { + protocolsPromise: GetProtocolsReturnType; + initialCounts: { interviewCount: number; participantCount: number }; +}; + +export default function SyntheticInterviewDataSection({ + protocolsPromise, + initialCounts, +}: SyntheticInterviewDataSectionProps) { + const rawProtocols = use(protocolsPromise); + const protocols = SuperJSON.parse(rawProtocols); + + const [selectedProtocolId, setSelectedProtocolId] = useState(); + const [count, setCount] = useState(10); + const [simulateDropOut, setSimulateDropOut] = useState(true); + const [respectSkipLogicAndFiltering, setRespectSkipLogicAndFiltering] = + useState(false); + const [isGenerating, setIsGenerating] = useState(false); + const [isDeleting, setIsDeleting] = useState(false); + const [progress, setProgress] = useState({ current: 0, total: 0 }); + const [syntheticCounts, setSyntheticCounts] = useState(initialCounts); + const { toast } = useToast(); + const router = useRouter(); + + const handleGenerate = async () => { + if (!selectedProtocolId) return; + + setIsGenerating(true); + setProgress({ current: 0, total: count }); + + try { + const response = await fetch('/api/generate-test-interviews', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + protocolId: selectedProtocolId, + count, + simulateDropOut, + respectSkipLogicAndFiltering, + }), + }); + + if (!response.ok || !response.body) { + setIsGenerating(false); + return; + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + + for (;;) { + const { done, value } = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + const events = buffer.split('\n\n'); + buffer = events.pop() ?? ''; + + for (const event of events) { + const dataLine = event + .split('\n') + .find((line) => line.startsWith('data: ')); + if (!dataLine) continue; + + const data = JSON.parse(dataLine.slice(6)) as { + type: string; + current?: number; + total?: number; + created?: number; + message?: string; + }; + + if (data.type === 'progress' && data.current !== undefined) { + setProgress({ + current: data.current, + total: data.total ?? count, + }); + } else if (data.type === 'error' && data.message) { + toast({ + title: 'Generation failed', + description: data.message, + type: 'destructive', + }); + } else if (data.type === 'complete' && data.created !== undefined) { + const created = data.created; + setSyntheticCounts((prev) => ({ + interviewCount: prev.interviewCount + created, + participantCount: prev.participantCount + created, + })); + toast({ + title: 'Generation complete', + description: `Successfully generated ${String(created)} synthetic interviews.`, + type: 'success', + }); + } + } + } + } finally { + setIsGenerating(false); + await revalidateSyntheticData(); + router.refresh(); + } + }; + + const handleDelete = async () => { + setIsDeleting(true); + try { + const result = await deleteSyntheticData(); + if (!result.error) { + setSyntheticCounts({ interviewCount: 0, participantCount: 0 }); + } + } finally { + setIsDeleting(false); + } + }; + + const progressPercent = + progress.total > 0 + ? Math.round((progress.current / progress.total) * 100) + : 0; + + return ( + + +
    + ({ + value: p.id, + label: p.name, + }))} + onChange={(value) => { + if (typeof value === 'string') { + setSelectedProtocolId(value); + } + }} + value={selectedProtocolId} + placeholder="Select a Protocol..." + className="min-w-auto" + /> + setCount(Number(value))} + disabled={isGenerating} + className="shrink-0" + /> + +
    + {isGenerating && ( +
    + +

    + {progress.current} / {progress.total} interviews generated +

    +
    + )} +
    + setSimulateDropOut(value ?? true)} + disabled={isGenerating} + /> + } + /> + + setRespectSkipLogicAndFiltering(value ?? false) + } + disabled={isGenerating} + /> + } + /> + + {isDeleting ? 'Deleting...' : 'Delete All'} + + } + /> +
    + ); +} diff --git a/app/dashboard/settings/_components/SyntheticInterviewDataServer.tsx b/app/dashboard/settings/_components/SyntheticInterviewDataServer.tsx new file mode 100644 index 000000000..b64d50cce --- /dev/null +++ b/app/dashboard/settings/_components/SyntheticInterviewDataServer.tsx @@ -0,0 +1,18 @@ +import { Suspense } from 'react'; +import { getProtocols } from '~/queries/protocols'; +import { getSyntheticInterviewCount } from '~/queries/synthetic-interviews'; +import SyntheticInterviewDataSection from '~/app/dashboard/settings/_components/SyntheticInterviewDataSection'; + +export default async function SyntheticInterviewDataServer() { + const protocolsPromise = getProtocols(); + const initialCounts = await getSyntheticInterviewCount(); + + return ( + + + + ); +} diff --git a/app/dashboard/settings/_components/TwoFactorSettings.tsx b/app/dashboard/settings/_components/TwoFactorSettings.tsx new file mode 100644 index 000000000..c99456c86 --- /dev/null +++ b/app/dashboard/settings/_components/TwoFactorSettings.tsx @@ -0,0 +1,181 @@ +'use client'; + +import { RefreshCw } from 'lucide-react'; +import { useState } from 'react'; +import { disableTotp, regenerateRecoveryCodes } from '~/actions/totp'; +import RecoveryCodes from '~/components/RecoveryCodes'; +import SettingsField from '~/components/settings/SettingsField'; +import { useTwoFactorSetup } from '~/components/TwoFactorSetup'; +import TwoFactorVerify from '~/components/TwoFactorVerify'; +import { Alert, AlertDescription } from '~/components/ui/Alert'; +import { Button } from '~/components/ui/Button'; +import ToggleField from '~/lib/form/components/fields/ToggleField'; +import Dialog from '~/lib/dialogs/Dialog'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import FormStoreProvider from '~/lib/form/store/formStoreProvider'; + +type TwoFactorSettingsProps = { + hasTwoFactor: boolean; + userCount: number; + sandboxMode?: boolean; +}; + +export default function TwoFactorSettings({ + hasTwoFactor: initialHasTwoFactor, + userCount, + sandboxMode = false, +}: TwoFactorSettingsProps) { + const [hasTwoFactor, setHasTwoFactor] = useState(initialHasTwoFactor); + const [showDisable, setShowDisable] = useState(false); + const [showRegenerateVerify, setShowRegenerateVerify] = useState(false); + const [showRecoveryCodes, setShowRecoveryCodes] = useState(false); + const [recoveryCodes, setRecoveryCodes] = useState([]); + + const startTwoFactorSetup = useTwoFactorSetup(userCount); + + const handleToggle = async (checked: boolean) => { + if (checked) { + const completed = await startTwoFactorSetup(); + if (completed) { + setHasTwoFactor(true); + } + } else { + setShowDisable(true); + } + }; + + return ( + <> + void handleToggle(checked ?? false)} + disabled={sandboxMode} + aria-label="Toggle two-factor authentication" + /> + } + > + {hasTwoFactor && ( + + )} + + + + setShowDisable(false)} + title="Disable Two-Factor Authentication" + description="Enter your current authenticator code or a recovery code to disable two-factor authentication." + footer={ + <> + + + Disable + + + } + > + + + If you can't access your authenticator app, you need to use a + recovery code to disable two-factor authentication. If you + don't have any valid recovery codes, you will need another + user to disable two-factor authentication for you. + + + { + const result = await disableTotp({ code }); + if (result.error) throw new Error(result.error); + setHasTwoFactor(false); + setShowDisable(false); + }} + allowRecoveryCodes + /> + + + + + setShowRegenerateVerify(false)} + title="Regenerate Recovery Codes" + description="Enter your current authenticator code to generate new recovery codes. Your existing codes will be invalidated." + footer={ + <> + + + Regenerate + + + } + > + + + If you can't access your authenticator app, you need to + disable two-factor authentication using an existing recovery code + before you generate new codes. If you don't have any valid + recovery codes, you will need another user to disable two-factor + authentication for you. + + + { + const result = await regenerateRecoveryCodes({ code }); + if (result.error) throw new Error(result.error); + if (result.data) { + setShowRegenerateVerify(false); + setRecoveryCodes(result.data.recoveryCodes); + setShowRecoveryCodes(true); + } + }} + /> + + + + { + setShowRecoveryCodes(false); + setRecoveryCodes([]); + }} + title="New Recovery Codes" + description="Your previous recovery codes have been invalidated. Save these new codes." + footer={ + + } + > + + + + ); +} diff --git a/app/dashboard/settings/_components/UpdateInstallationId.tsx b/app/dashboard/settings/_components/UpdateInstallationId.tsx index 93d6a500c..f4dc5ca1b 100644 --- a/app/dashboard/settings/_components/UpdateInstallationId.tsx +++ b/app/dashboard/settings/_components/UpdateInstallationId.tsx @@ -1,7 +1,10 @@ 'use client'; -import { setAppSetting } from '~/actions/appSettings'; -import { appSettingsSchema } from '~/schemas/appSettings'; +import { Loader2, RefreshCw } from 'lucide-react'; +import { useState } from 'react'; +import { z } from 'zod/mini'; +import { regenerateInstallationId } from '~/actions/appSettings'; +import { Button } from '~/components/ui/Button'; import UpdateSettingsValue from '../../_components/UpdateSettingsValue'; export default function UpdateInstallationId({ @@ -11,14 +14,41 @@ export default function UpdateInstallationId({ installationId?: string; readOnly?: boolean; }) { + const [currentId, setCurrentId] = useState(installationId); + const [isRegenerating, setIsRegenerating] = useState(false); + + const handleRegenerate = async () => { + setIsRegenerating(true); + try { + const newId = await regenerateInstallationId(); + setCurrentId(newId); + } finally { + setIsRegenerating(false); + } + }; + return ( { - await setAppSetting('installationId', value); - }} - schema={appSettingsSchema.shape.installationId} + settingsKey="installationId" + initialValue={currentId} readOnly={readOnly} + schema={z + .string() + .check(z.minLength(1, 'Installation ID cannot be empty'))} + suffixComponent={ + + } /> ); } diff --git a/app/dashboard/settings/_components/UpdateS3Settings.tsx b/app/dashboard/settings/_components/UpdateS3Settings.tsx new file mode 100644 index 000000000..5a760ed36 --- /dev/null +++ b/app/dashboard/settings/_components/UpdateS3Settings.tsx @@ -0,0 +1,89 @@ +'use client'; + +import { useState } from 'react'; +import SettingsField from '~/components/settings/SettingsField'; +import { setAppSetting } from '~/actions/appSettings'; +import { Button } from '~/components/ui/Button'; +import InputField from '~/lib/form/components/fields/InputField'; +import { type AppSetting } from '~/schemas/appSettings'; + +type S3Field = { + key: Extract; + label: string; + type: 'text' | 'password'; +}; + +const s3Fields: S3Field[] = [ + { key: 's3Endpoint', label: 'Endpoint URL', type: 'text' }, + { key: 's3Bucket', label: 'Bucket Name', type: 'text' }, + { key: 's3Region', label: 'Region', type: 'text' }, + { key: 's3AccessKeyId', label: 'Access Key ID', type: 'password' }, + { key: 's3SecretAccessKey', label: 'Secret Access Key', type: 'password' }, +]; + +export default function UpdateS3Settings({ + initialValues, +}: { + initialValues: Partial>; +}) { + return ( + <> + {s3Fields.map((field) => ( + + + + ))} + + ); +} + +function S3FieldEditor({ + settingsKey, + inputType, + initialValue, +}: { + settingsKey: S3Field['key']; + inputType: 'text' | 'password'; + initialValue: string; +}) { + const [value, setValue] = useState(initialValue); + const [isSaving, setSaving] = useState(false); + const [savedValue, setSavedValue] = useState(initialValue); + + const handleSave = async () => { + setSaving(true); + await setAppSetting(settingsKey, value); + setSavedValue(value); + setSaving(false); + }; + + return ( +
    + setValue(v ?? '')} + type={inputType} + className="w-full" + disabled={isSaving} + /> + {value !== savedValue && ( +
    + + +
    + )} +
    + ); +} diff --git a/app/dashboard/settings/_components/UpdateUploadThingToken.tsx b/app/dashboard/settings/_components/UpdateUploadThingToken.tsx index 57ee5d34c..8a9686785 100644 --- a/app/dashboard/settings/_components/UpdateUploadThingToken.tsx +++ b/app/dashboard/settings/_components/UpdateUploadThingToken.tsx @@ -1,20 +1,17 @@ 'use client'; -import { setAppSetting } from '~/actions/appSettings'; import { createUploadThingTokenSchema } from '~/schemas/appSettings'; import UpdateSettingsValue from '../../_components/UpdateSettingsValue'; export default function UpdateUploadThingToken({ uploadThingKey, }: { - uploadThingKey?: string; + uploadThingKey: string | null | undefined; }) { return ( { - await setAppSetting('uploadThingToken', value); - }} + settingsKey="uploadThingToken" + initialValue={uploadThingKey ?? undefined} schema={createUploadThingTokenSchema} /> ); diff --git a/app/dashboard/settings/_components/UserManagement.tsx b/app/dashboard/settings/_components/UserManagement.tsx new file mode 100644 index 000000000..323c95c0a --- /dev/null +++ b/app/dashboard/settings/_components/UserManagement.tsx @@ -0,0 +1,937 @@ +'use client'; + +import { + startAuthentication, + startRegistration, +} from '@simplewebauthn/browser'; +import { type StrictColumnDef } from '~/components/DataTable/types'; +import { Plus, Trash, User } from 'lucide-react'; +import { useRouter } from 'next/navigation'; +import { use, useCallback, useState } from 'react'; +import { z } from 'zod/mini'; +import { + changePassword, + checkUsernameAvailable, + createUser, + deleteUsers, +} from '~/actions/users'; +import { + generateAuthenticationOptions, + generateRegistrationOptions, + resetAuthForUser, + switchToPasskeyMode, + switchToPasswordMode, + verifyPasskeyReauth, +} from '~/actions/webauthn'; +import PasskeySettings from '~/app/dashboard/settings/_components/PasskeySettings'; +import TwoFactorSettings from '~/app/dashboard/settings/_components/TwoFactorSettings'; +import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; +import { DataTable } from '~/components/DataTable/DataTable'; +import { DataTableFloatingBar } from '~/components/DataTable/DataTableFloatingBar'; +import Surface from '~/components/layout/Surface'; +import SettingsField from '~/components/settings/SettingsField'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; +import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; +import { Button } from '~/components/ui/Button'; +import { useClientDataTable } from '~/hooks/useClientDataTable'; +import Dialog from '~/lib/dialogs/Dialog'; +import useDialog from '~/lib/dialogs/useDialog'; +import Field from '~/lib/form/components/Field/Field'; +import { FormWithoutProvider } from '~/lib/form/components/Form'; +import SubmitButton from '~/lib/form/components/SubmitButton'; +import Checkbox from '~/lib/form/components/fields/Checkbox'; +import InputField from '~/lib/form/components/fields/InputField'; +import PasswordField from '~/lib/form/components/fields/PasswordField'; +import FormStoreProvider from '~/lib/form/store/formStoreProvider'; +import { type FormSubmissionResult } from '~/lib/form/store/types'; +import { type GetUsersReturnType } from '~/queries/users'; + +type UserRow = GetUsersReturnType[number]; + +type Passkey = { + id: string; + friendlyName: string | null; + deviceType: string; + createdAt: Date; + lastUsedAt: Date | null; + backedUp: boolean; +}; + +type UserManagementProps = { + usersPromise: Promise; + currentUserId: string; + currentUsername: string; + hasTwoFactorPromise: Promise; + passkeysPromise: Promise; + hasPasswordPromise: Promise; + sandboxMode: boolean; +}; + +const usernameSchema = z + .string() + .check(z.minLength(4, 'Username must be at least 4 characters')) + .check(z.refine((s) => !s.includes(' '), 'Username cannot contain spaces')); + +const usernameUniqueSchema = z.string().check( + z.refine(async (username) => { + if (!username || username.length < 4 || username.includes(' ')) { + return true; // Let the basic validation handle these cases + } + const result = await checkUsernameAvailable(username); + return result.available; + }, 'Username is already taken'), +); + +const passwordSchema = z + .string() + .check(z.minLength(8, 'Password must be at least 8 characters')) + .check(z.regex(/[a-z]/, 'Password must contain at least 1 lowercase letter')) + .check(z.regex(/[A-Z]/, 'Password must contain at least 1 uppercase letter')) + .check(z.regex(/[0-9]/, 'Password must contain at least 1 number')) + .check(z.regex(/[^a-zA-Z0-9]/, 'Password must contain at least 1 symbol')); + +function makeUserColumns( + currentUserId: string, + userCount: number, + onDeleteUser: (user: UserRow) => void, + onResetAuth: (user: UserRow) => void, +): StrictColumnDef[] { + return [ + { + id: 'select', + header: ({ table }) => ( + + table.toggleAllPageRowsSelected(!!value) + } + aria-label="Select all" + /> + ), + cell: ({ row }) => { + const isCurrentUser = row.original.id === currentUserId; + return ( + row.toggleSelected(!!value)} + aria-label="Select row" + disabled={isCurrentUser} + /> + ); + }, + enableSorting: false, + enableHiding: false, + }, + { + id: 'username', + accessorKey: 'username', + sortingFn: 'text', + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const isCurrentUser = row.original.id === currentUserId; + return ( +
    + {row.original.username} + {isCurrentUser && ( + (you) + )} +
    + ); + }, + }, + { + id: 'authMethod', + enableSorting: false, + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const hasPasskeys = row.original.webAuthnCredentials.length > 0; + const has2FA = row.original.totpCredential?.verified === true; + + if (hasPasskeys) return 'Passkey'; + if (has2FA) return 'Password + 2FA'; + return 'Password'; + }, + }, + { + id: 'actions', + enableSorting: false, + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const isCurrentUser = row.original.id === currentUserId; + const isLastUser = userCount <= 1; + const hasAuth = + row.original.totpCredential?.verified === true || + row.original.webAuthnCredentials.length > 0; + return ( +
    + {hasAuth && !isCurrentUser && ( + + )} + +
    + ); + }, + }, + ]; +} + +export default function UserManagement({ + usersPromise, + currentUserId, + currentUsername, + hasTwoFactorPromise, + passkeysPromise, + hasPasswordPromise, + sandboxMode, +}: UserManagementProps) { + // TanStack Table: consumers must also opt out so React Compiler doesn't memoize JSX that depends on the table ref. + 'use no memo'; + const router = useRouter(); + const users = use(usersPromise); + const hasTwoFactor = use(hasTwoFactorPromise); + const initialPasskeys = use(passkeysPromise); + const hasPassword = use(hasPasswordPromise); + const [isCreating, setIsCreating] = useState(false); + const [isChangingPassword, setIsChangingPassword] = useState(false); + const [passwordChangeSuccess, setPasswordChangeSuccess] = useState(false); + const [error, setError] = useState(null); + const [showSwitchToPasskey, setShowSwitchToPasskey] = useState(false); + const [showSwitchToPassword, setShowSwitchToPassword] = useState(false); + const [switchToPasswordReauthed, setSwitchToPasswordReauthed] = + useState(false); + const [switchToPasswordReauthError, setSwitchToPasswordReauthError] = + useState(null); + const [switchToPasswordReauthLoading, setSwitchToPasswordReauthLoading] = + useState(false); + + const { confirm } = useDialog(); + + const doDeleteUsers = useCallback( + async (usersToDelete: UserRow[]) => { + const ids = usersToDelete.map((u) => u.id); + const result = await deleteUsers({ ids }); + + if (result.error) { + setError(result.error); + return; + } + + router.refresh(); + }, + [router], + ); + + const handleDeleteUser = useCallback( + (user: UserRow) => { + void confirm({ + title: 'Delete User', + description: `Are you sure you want to delete the user "${user.username}"? This action cannot be undone.`, + confirmLabel: 'Delete User', + intent: 'destructive', + onConfirm: () => doDeleteUsers([user]), + }); + }, + [confirm, doDeleteUsers], + ); + + const [tempPassword, setTempPassword] = useState(null); + + const handleResetAuth = useCallback( + (user: UserRow) => { + void confirm({ + title: 'Reset Authentication', + description: `This will remove all passkeys, 2FA, and recovery codes for ${user.username}, and set a temporary password. They will need to set up their authentication again.`, + confirmLabel: 'Reset Auth', + intent: 'destructive', + onConfirm: async () => { + const result = await resetAuthForUser(user.id); + if (result.error) { + setError(result.error); + } else if (result.data?.temporaryPassword) { + setTempPassword(result.data.temporaryPassword); + } + }, + }); + }, + [confirm], + ); + + const columns = makeUserColumns( + currentUserId, + users.length, + handleDeleteUser, + handleResetAuth, + ); + + const handleDeleteSelected = useCallback( + (selectedUsers: UserRow[]) => { + const deletableUsers = selectedUsers.filter( + (user) => user.id !== currentUserId, + ); + + if (deletableUsers.length === 0) { + setError('You cannot delete your own account'); + return; + } + + const isSingle = deletableUsers.length === 1; + void confirm({ + title: isSingle ? 'Delete User' : 'Delete Multiple Users', + description: isSingle + ? `Are you sure you want to delete the user "${deletableUsers[0]?.username}"? This action cannot be undone.` + : `Are you sure you want to delete ${deletableUsers.length} users? This action cannot be undone.`, + confirmLabel: isSingle + ? 'Delete User' + : `Delete ${deletableUsers.length} Users`, + intent: 'destructive', + onConfirm: () => doDeleteUsers(deletableUsers), + }); + }, + [currentUserId, confirm, doDeleteUsers], + ); + + const { table } = useClientDataTable({ + data: users, + columns, + enablePagination: false, + enableRowSelection: (row) => row.original.id !== currentUserId, + }); + + const handleCreateUser = async ( + values: unknown, + ): Promise => { + setError(null); + + const { username, password, confirmPassword } = values as { + username: string; + password: string; + confirmPassword: string; + }; + + if (password !== confirmPassword) { + return { + success: false, + formErrors: ['Passwords do not match'], + }; + } + + const result = await createUser({ username, password, confirmPassword }); + + if (result.error) { + return { + success: false, + formErrors: [result.error], + }; + } + + setIsCreating(false); + router.refresh(); + return { success: true }; + }; + + const handleChangePassword = async ( + values: unknown, + ): Promise => { + const { currentPassword, newPassword, confirmNewPassword } = values as { + currentPassword: string; + newPassword: string; + confirmNewPassword: string; + }; + + if (newPassword !== confirmNewPassword) { + return { + success: false, + formErrors: ['New passwords do not match'], + }; + } + + const result = await changePassword({ + currentPassword, + newPassword, + confirmNewPassword, + }); + + if (result.error) { + return { + success: false, + formErrors: [result.error], + }; + } + + setPasswordChangeSuccess(true); + setTimeout(() => { + setIsChangingPassword(false); + setPasswordChangeSuccess(false); + }, 1500); + + return { success: true }; + }; + + const handleSwitchToPasskey = async ( + values: unknown, + ): Promise => { + const { currentPassword } = values as { currentPassword: string }; + + const { error: genError, data } = await generateRegistrationOptions(); + if (genError || !data) { + return { + success: false, + formErrors: [genError ?? 'Failed to start registration'], + }; + } + + let credential; + try { + credential = await startRegistration({ optionsJSON: data.options }); + } catch (e) { + if (e instanceof Error && e.name === 'NotAllowedError') { + return { success: false, formErrors: ['Passkey creation cancelled.'] }; + } + return { success: false, formErrors: ['Passkey creation failed.'] }; + } + + const result = await switchToPasskeyMode({ currentPassword, credential }); + + if (result.error) { + return { success: false, formErrors: [result.error] }; + } + + setShowSwitchToPasskey(false); + router.refresh(); + return { success: true }; + }; + + const handleSwitchToPasswordReauth = async () => { + setSwitchToPasswordReauthError(null); + setSwitchToPasswordReauthLoading(true); + + try { + const { error: genError, data: regData } = + await generateAuthenticationOptions(); + if (genError || !regData) { + setSwitchToPasswordReauthError( + genError ?? 'Failed to start verification', + ); + setSwitchToPasswordReauthLoading(false); + return; + } + + const credential = await startAuthentication({ + optionsJSON: regData.options, + }); + + const result = await verifyPasskeyReauth({ credential }); + + if (result.error) { + setSwitchToPasswordReauthError(result.error); + setSwitchToPasswordReauthLoading(false); + return; + } + + setSwitchToPasswordReauthed(true); + setSwitchToPasswordReauthLoading(false); + } catch (e) { + if (e instanceof Error && e.name === 'NotAllowedError') { + setSwitchToPasswordReauthLoading(false); + return; + } + setSwitchToPasswordReauthError('Verification failed'); + setSwitchToPasswordReauthLoading(false); + } + }; + + const handleSwitchToPassword = async ( + values: unknown, + ): Promise => { + const { newPassword, confirmNewPassword } = values as { + newPassword: string; + confirmNewPassword: string; + }; + + if (newPassword !== confirmNewPassword) { + return { + success: false, + formErrors: ['Passwords do not match'], + }; + } + + const result = await switchToPasswordMode(newPassword); + + if (result.error) { + return { success: false, formErrors: [result.error] }; + } + + setShowSwitchToPassword(false); + setSwitchToPasswordReauthed(false); + router.refresh(); + return { success: true }; + }; + + return ( +
    + +
    +
    +
    +
    + +
    +
    + + Logged in as: + + + {currentUsername} + +
    +
    + {hasPassword && !sandboxMode && ( + + )} +
    + {hasPassword && !hasTwoFactor && !sandboxMode && ( + + Security Warning + + Your account is only protected by a password. Enable two-factor + authentication for stronger security. + + + )} +
    + + {hasPassword ? ( + <> + + {!sandboxMode && ( + setShowSwitchToPasskey(true)} + size="sm" + color="destructive" + > + Switch to Passkey + + } + /> + )} + + ) : ( + <> + + {users.length === 1 && ( +
    + + + You are the only user. If you lose access to your passkey, + you will be locked out. Consider adding another user or + backing up your passkey. + + +
    + )} + {!sandboxMode && ( + setShowSwitchToPassword(true)} + size="sm" + color="destructive" + > + Switch to Password + + } + /> + )} + + )} +
    +
    +
    + All Users + +
    + + + + + } + /> +
    + + { + setIsChangingPassword(false); + setPasswordChangeSuccess(false); + }} + title="Change Password" + description="Update your account password." + footer={ + passwordChangeSuccess ? null : ( + <> + + + Update Password + + + ) + } + > + {passwordChangeSuccess ? ( +
    + Password updated successfully! +
    + ) : ( + + + + + + + )} +
    +
    + {/* Create User Dialog */} + + { + setIsCreating(false); + setError(null); + }} + title="Add User" + footer={ + <> + + Create User + + } + > + + {error && ( +
    {error}
    + )} + + + +
    +
    +
    + setTempPassword(null)} + title="Temporary Password" + description="The user's authentication has been reset. Share this temporary password with them so they can sign in and set up their account again." + footer={ + + } + > +
    + + {tempPassword} + +
    +
    + {/* Switch to Passkey Dialog */} + + setShowSwitchToPasskey(false)} + title="Switch to Passkey Authentication" + description="Enter your current password, then register a passkey. Your password and two-factor authentication will be removed." + footer={ + <> + + + Switch to Passkey + + + } + > + + + + + + + {/* Switch to Password Dialog */} + + { + setShowSwitchToPassword(false); + setSwitchToPasswordReauthed(false); + setSwitchToPasswordReauthError(null); + setSwitchToPasswordReauthLoading(false); + }} + title="Switch to Password Authentication" + description="All your passkeys will be removed and replaced with a password." + footer={ + switchToPasswordReauthed ? ( + <> + + + Switch to Password + + + ) : null + } + > + {switchToPasswordReauthed ? ( + + + + + + ) : ( +
    + + Verify your identity with a passkey to continue. + + {switchToPasswordReauthError && ( +

    + {switchToPasswordReauthError} +

    + )} + +
    + )} +
    +
    +
    + ); +} diff --git a/app/dashboard/settings/_components/UserManagementSection.tsx b/app/dashboard/settings/_components/UserManagementSection.tsx new file mode 100644 index 000000000..ac6745c54 --- /dev/null +++ b/app/dashboard/settings/_components/UserManagementSection.tsx @@ -0,0 +1,65 @@ +import SettingsCard from '~/components/settings/SettingsCard'; +import { env } from '~/env'; +import { prisma } from '~/lib/db'; +import { getUsers } from '~/queries/users'; +import UserManagement from './UserManagement'; + +async function getHasTwoFactor(userId: string) { + const result = await prisma.totpCredential.findFirst({ + where: { user_id: userId, verified: true }, + select: { id: true }, + }); + + return !!result; +} + +async function getPasskeys(userId: string) { + return prisma.webAuthnCredential.findMany({ + where: { user_id: userId }, + select: { + id: true, + friendlyName: true, + deviceType: true, + createdAt: true, + lastUsedAt: true, + backedUp: true, + }, + orderBy: { createdAt: 'desc' }, + }); +} + +async function getHasPassword(userId: string) { + const key = await prisma.key.findFirst({ + where: { user_id: userId }, + select: { hashed_password: true }, + }); + + return !!key?.hashed_password; +} + +export default function UserManagementSection({ + userId, + username, +}: { + userId: string; + username: string; +}) { + const usersPromise = getUsers(); + const hasTwoFactorPromise = getHasTwoFactor(userId); + const passkeysPromise = getPasskeys(userId); + const hasPasswordPromise = getHasPassword(userId); + + return ( + + + + ); +} diff --git a/app/dashboard/settings/loading.tsx b/app/dashboard/settings/loading.tsx deleted file mode 100644 index b8a488ac0..000000000 --- a/app/dashboard/settings/loading.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import { SettingsSectionSkeleton } from '~/components/layout/SettingsSection'; -import { ButtonSkeleton } from '~/components/ui/Button'; -import { Skeleton } from '~/components/ui/skeleton'; -import { SwitchSkeleton } from '~/components/ui/switch'; -import PageHeader from '~/components/ui/typography/PageHeader'; -import { env } from '~/env'; - -export default function Loading() { - return ( - <> - - - - - - } - /> - } /> - } /> - {!env.SANDBOX_MODE && ( - - } - /> - )} - - {env.NODE_ENV === 'development' && ( - <> - } - /> - } - /> - - )} - - - ); -} diff --git a/app/dashboard/settings/page.tsx b/app/dashboard/settings/page.tsx index 16827e8a6..2841796f1 100644 --- a/app/dashboard/settings/page.tsx +++ b/app/dashboard/settings/page.tsx @@ -1,168 +1,125 @@ import { Suspense } from 'react'; -import AnonymousRecruitmentSwitch from '~/components/AnonymousRecruitmentSwitch'; -import DisableAnalyticsSwitch from '~/components/DisableAnalyticsSwitch'; -import SettingsSection from '~/components/layout/SettingsSection'; -import LimitInterviewsSwitch from '~/components/LimitInterviewsSwitch'; -import Link from '~/components/Link'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; -import ToggleSmallScreenWarning from '~/components/ToggleSmallScreenWarning'; -import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; -import PageHeader from '~/components/ui/typography/PageHeader'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import VersionSection, { - VersionSectionSkeleton, -} from '~/components/VersionSection'; +import { SettingsCardSkeleton } from '~/components/settings/SettingsCard'; +import SettingsNavigation, { + type SettingsSection, +} from '~/components/settings/SettingsNavigation'; +import PageHeader from '~/components/typography/PageHeader'; import { env } from '~/env'; -import { - getAppSetting, - getInstallationId, - requireAppNotExpired, -} from '~/queries/appSettings'; -import { requirePageAuth } from '~/utils/auth'; -import AnalyticsButton from '../_components/AnalyticsButton'; -import RecruitmentTestSectionServer from '../_components/RecruitmentTestSectionServer'; -import ResetButton from '../_components/ResetButton'; -import UpdateUploadThingTokenAlert from '../_components/UpdateUploadThingTokenAlert'; -import UpdateInstallationId from './_components/UpdateInstallationId'; -import UpdateUploadThingToken from './_components/UpdateUploadThingToken'; -import ReadOnlyEnvAlert from './ReadOnlyEnvAlert'; +import { requirePageAuth } from '~/lib/auth/guards'; +import { requireAppNotExpired } from '~/queries/appSettings'; +import ApiTokensSection from './_components/ApiTokensSection'; +import ConfigurationSection from './_components/ConfigurationSection'; +import DeveloperToolsSection from './_components/DeveloperToolsSection'; +import InterviewSettingsSection from './_components/InterviewSettingsSection'; +import PreviewModeSection from './_components/PreviewModeSection'; +import PrivacySection from './_components/PrivacySection'; +import StorageProviderSection from './_components/StorageProviderSection'; +import SyntheticInterviewDataServer from './_components/SyntheticInterviewDataServer'; +import UserManagementSection from './_components/UserManagementSection'; -export default async function Settings() { - await requireAppNotExpired(); - await requirePageAuth(); +function getSettingsSections(): SettingsSection[] { + const sections: SettingsSection[] = [ + { id: 'app-details', title: 'App Details' }, + { id: 'user-management', title: 'User Management' }, + { id: 'storage', title: 'Storage' }, + { id: 'interview-settings', title: 'Interview Settings' }, + { id: 'privacy', title: 'Privacy' }, + { id: 'api-tokens', title: 'API Tokens' }, + { id: 'preview-mode', title: 'Preview Mode' }, + { id: 'synthetic-interview-data', title: 'Synthetic Interview Data' }, + ]; + + if (env.NODE_ENV === 'development' || !env.SANDBOX_MODE) { + sections.push({ + id: 'developer-tools', + title: 'Developer Tools', + variant: 'destructive', + }); + } + + return sections; +} - const installationId = await getInstallationId(); - const uploadThingKey = await getAppSetting('uploadThingToken'); +function SettingsContentSkeleton() { + const sections = getSettingsSections(); + + return ( +
    +
    + +
    + + + + + + + + + {(env.NODE_ENV === 'development' || !env.SANDBOX_MODE) && ( + + )} +
    +
    +
    + ); +} +export default function Settings() { return ( <> - - - - - }> - - - - - This is the unique identifier for your installation of Fresco. This - ID is used to track analytics data and for other internal purposes. - - - - - - This is the API key used to communicate with the UploadThing - service. See our{' '} - - deployment documentation - {' '} - for information about how to obtain this key. - - - - - - - - } - > - - If anonymous recruitment is enabled, you may generate an anonymous - participation URL. This URL can be shared with participants to allow - them to self-enroll in your study. - - - - - - } - > - - If this option is enabled, the warning about using Fresco on a small - screen will be disabled. - - - Important - - Ensure that you test your interview thoroughly on a small screen - before disabling this warning. Fresco is designed to work best on - larger screens, and using it on a small screen may lead to a poor - user experience for participants. - - - - - - - } - > - - If this option is enabled, each participant will only be able to - submit a single completed interview for each - protocol (although they may have multiple incomplete interviews). - Once an interview has been completed, attempting to start a new - interview or to resume any other in-progress interview, will be - prevented. - - - - - - } - > - - If this option is enabled, no anonymous analytics data will be sent - to the Network Canvas team. - - {!!env.DISABLE_ANALYTICS && } - - {(env.NODE_ENV === 'development' || !env.SANDBOX_MODE) && ( - } - > - - Delete all data and reset Fresco to its default state. - - - )} - {env.NODE_ENV === 'development' && ( - // Only show the Analytics and Recruitment test sections in development - <> - } - > - - This will send a test analytics event to the Fresco analytics - server. - - - - - )} - + + }> + + ); } + +async function SettingsContent() { + await requireAppNotExpired(); + const session = await requirePageAuth(); + const sections = getSettingsSections(); + + return ( +
    +
    + +
    + }> + + + }> + + + }> + + + }> + + + + }> + + + }> + + + }> + + + {(env.NODE_ENV === 'development' || !env.SANDBOX_MODE) && ( + + )} +
    +
    +
    + ); +} diff --git a/app/error.tsx b/app/error.tsx index 3c970c033..4ce12a23e 100644 --- a/app/error.tsx +++ b/app/error.tsx @@ -1,15 +1,13 @@ 'use client'; import { ClipboardCopy } from 'lucide-react'; -import Image from 'next/image'; -import ErrorReportNotifier from '~/components/ErrorReportNotifier'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; +import posthog from 'posthog-js'; +import { useEffect } from 'react'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; -import { cardClasses } from '~/components/ui/card'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { useToast } from '~/components/ui/use-toast'; -import { cn } from '~/utils/shadcn'; +import { useToast } from '~/components/ui/Toast'; export default function Error({ error, @@ -19,7 +17,7 @@ export default function Error({ reset: () => void; heading?: string; }) { - const { toast } = useToast(); + const { add } = useToast(); const handleReset = () => { reset(); @@ -34,55 +32,43 @@ Stack Trace: ${error.stack}`; await navigator.clipboard.writeText(debugInfo); - toast({ + add({ title: 'Success', description: 'Debug information copied to clipboard', - variant: 'success', - duration: 3000, + type: 'success', }); }; + useEffect(() => { + posthog.captureException(error); + }, [error]); + return ( -
    - - -
    - Error robot - - Something went wrong. - -
    - +
    + + + Something went wrong. + + Fresco encountered an error while trying to load the page, and could not continue. This error has been automatically reported to us, but if you would like to provide further information that you think might be useful - please use the feedback button. You can also use the rety button to - attempt to load the page again. + please contact us. You can also use the retry button to attempt to + load the page again. -
    - -
    - +
    ); } diff --git a/app/global-error.tsx b/app/global-error.tsx index f69f4e3bf..8dfe8d104 100644 --- a/app/global-error.tsx +++ b/app/global-error.tsx @@ -2,15 +2,13 @@ import { ClipboardCopy } from 'lucide-react'; import Image from 'next/image'; -import ErrorReportNotifier from '~/components/ErrorReportNotifier'; -import Link from '~/components/Link'; -import ResponsiveContainer from '~/components/ResponsiveContainer'; +import posthog from 'posthog-js'; +import { useEffect, useState } from 'react'; +import Surface from '~/components/layout/Surface'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; import { Button } from '~/components/ui/Button'; -import { cardClasses } from '~/components/ui/card'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; -import { useToast } from '~/components/ui/use-toast'; -import { cn } from '~/utils/shadcn'; +import Link from '~/components/ui/Link'; export default function Error({ error, @@ -20,7 +18,7 @@ export default function Error({ reset: () => void; heading?: string; }) { - const { toast } = useToast(); + const [copied, setCopied] = useState(false); const handleReset = () => { reset(); @@ -35,24 +33,17 @@ Stack Trace: ${error.stack}`; await navigator.clipboard.writeText(debugInfo); - toast({ - title: 'Success', - description: 'Debug information copied to clipboard', - variant: 'success', - duration: 3000, - }); + setCopied(true); + setTimeout(() => setCopied(false), 2000); }; + useEffect(() => { + posthog.captureException(error); + }, [error]); + return ( -
    - - +
    +
    Error robot - + There's a problem with Fresco.
    - + Fresco encountered a serious error and is unable to continue. @@ -77,15 +68,15 @@ ${error.stack}`; .
    - -
    - +
    ); } diff --git a/app/layout.tsx b/app/layout.tsx index 075cb6a57..b0c2fee8f 100644 --- a/app/layout.tsx +++ b/app/layout.tsx @@ -1,24 +1,59 @@ -import { Quicksand } from 'next/font/google'; -import { Toaster } from '~/components/ui/toaster'; +import { type Metadata, type Viewport } from 'next'; +import { connection } from 'next/server'; +import { Suspense } from 'react'; +import Providers from '~/components/Providers'; +import { PostHogIdentify } from '~/components/Providers/PosthogIdentify'; +import { env } from '~/env'; +import { getDisableAnalytics, getInstallationId } from '~/queries/appSettings'; import '~/styles/globals.css'; +import '~/styles/themes/default.css'; -export const metadata = { +export const metadata: Metadata = { title: 'Network Canvas Fresco', description: 'Fresco.', }; -const quicksand = Quicksand({ - weight: ['300', '400', '500', '600', '700'], - subsets: ['latin', 'latin-ext'], - display: 'swap', -}); +export const viewport: Viewport = { + viewportFit: 'cover', +}; + +async function AnalyticsLoader() { + // Opt this subtree out of prerendering — getInstallationId and + // getDisableAnalytics can fall back to the database, which isn't + // available at build time (e.g. when building the distributable + // Docker image). The boundary in RootLayout lets Next + // stream this in at request time instead. + await connection(); + + try { + const [installationId, disableAnalytics] = await Promise.all([ + getInstallationId(), + getDisableAnalytics(), + ]); + + return ( + + ); + } catch { + return null; + } +} function RootLayout({ children }: { children: React.ReactNode }) { return ( - - {children} - + +
    + + + + + {children} + +
    ); diff --git a/app/not-found.tsx b/app/not-found.tsx index cffcac8d5..10c5dc1d4 100644 --- a/app/not-found.tsx +++ b/app/not-found.tsx @@ -1,13 +1,13 @@ import { FileWarning } from 'lucide-react'; -import Heading from '~/components/ui/typography/Heading'; -import Paragraph from '~/components/ui/typography/Paragraph'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; export default function NotFound() { return ( -
    - - 404 - Page not found. +
    + + 404 + Page not found.
    ); } diff --git a/auth.d.ts b/auth.d.ts deleted file mode 100644 index 45e9b1630..000000000 --- a/auth.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -/* eslint-disable @typescript-eslint/consistent-type-imports */ -/// -declare namespace Lucia { - type Auth = import('./utils/auth').Auth; - type DatabaseUserAttributes = { - username: string; - }; - // type DatabaseSessionAttributes = {}; -} diff --git a/chromatic.config.json b/chromatic.config.json new file mode 100644 index 000000000..74ff7ea6f --- /dev/null +++ b/chromatic.config.json @@ -0,0 +1,5 @@ +{ + "onlyChanged": true, + "projectId": "Project:68b1958ee9350657446b5406", + "zip": true +} diff --git a/components.json b/components.json deleted file mode 100644 index cebfc325f..000000000 --- a/components.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "$schema": "https://ui.shadcn.com/schema.json", - "style": "default", - "rsc": true, - "tsx": true, - "tailwind": { - "config": "tailwind.config.ts", - "css": "styles/globals.css", - "baseColor": "slate", - "cssVariables": true - }, - "aliases": { - "components": "~/components", - "utils": "~/utils/shadcn" - } -} diff --git a/components/ActionError.tsx b/components/ActionError.tsx index ca41f994c..6442dc1a0 100644 --- a/components/ActionError.tsx +++ b/components/ActionError.tsx @@ -1,5 +1,4 @@ -import { AlertCircle } from 'lucide-react'; -import { Alert, AlertTitle, AlertDescription } from '~/components/ui/Alert'; +import { Alert, AlertDescription, AlertTitle } from '~/components/ui/Alert'; const ActionError = ({ errorTitle, @@ -9,8 +8,7 @@ const ActionError = ({ errorDescription: string; }) => { return ( - - + {errorTitle} {errorDescription} diff --git a/components/AnonymousRecruitmentSwitchClient.tsx b/components/AnonymousRecruitmentSwitchClient.tsx deleted file mode 100644 index ec6372240..000000000 --- a/components/AnonymousRecruitmentSwitchClient.tsx +++ /dev/null @@ -1,21 +0,0 @@ -'use client'; -import { setAppSetting } from '~/actions/appSettings'; -import SwitchWithOptimisticUpdate from './SwitchWithOptimisticUpdate'; - -const AnonymousRecruitmentSwitchClient = ({ - allowAnonymousRecruitment, -}: { - allowAnonymousRecruitment: boolean; -}) => { - return ( - { - await setAppSetting('allowAnonymousRecruitment', value); - return value; - }} - /> - ); -}; - -export default AnonymousRecruitmentSwitchClient; diff --git a/components/ApiTokenManagement.tsx b/components/ApiTokenManagement.tsx new file mode 100644 index 000000000..5a7ff5461 --- /dev/null +++ b/components/ApiTokenManagement.tsx @@ -0,0 +1,311 @@ +'use client'; + +import { type Row } from '@tanstack/react-table'; +import { type StrictColumnDef } from '~/components/DataTable/types'; +import { Clipboard } from 'lucide-react'; +import { use, useState } from 'react'; +import { + createApiToken, + deleteApiToken, + updateApiToken, +} from '~/actions/apiTokens'; +import { DataTable } from '~/components/DataTable/DataTable'; +import { useClientDataTable } from '~/hooks/useClientDataTable'; +import Dialog from '~/lib/dialogs/Dialog'; +import InputField from '~/lib/form/components/fields/InputField'; +import { type GetApiTokensReturnType } from '~/queries/apiTokens'; +import { DataTableColumnHeader } from './DataTable/ColumnHeader'; +import { Alert, AlertDescription, AlertTitle } from './ui/Alert'; +import { Button } from './ui/Button'; +import { Label } from './ui/Label'; +import ToggleField from '~/lib/form/components/fields/ToggleField'; +import TimeAgo from './ui/TimeAgo'; +import { useToast } from './ui/Toast'; + +type ApiToken = GetApiTokensReturnType[number]; + +type ApiTokenManagementProps = { + tokensPromise: Promise; + disabled?: boolean; +}; + +export default function ApiTokenManagement({ + tokensPromise, + disabled, +}: ApiTokenManagementProps) { + // TanStack Table: consumers must also opt out so React Compiler doesn't memoize JSX that depends on the table ref. + 'use no memo'; + const initialTokens = use(tokensPromise); + const [tokens, setTokens] = useState(initialTokens); + const [isCreating, setIsCreating] = useState(false); + const [newTokenDescription, setNewTokenDescription] = useState(''); + const [createdToken, setCreatedToken] = useState(null); + const [isLoading, setIsLoading] = useState(false); + const [tokenToDelete, setTokenToDelete] = useState(null); + const [isDeleting, setIsDeleting] = useState(false); + + const { add } = useToast(); + + const handleCreateToken = async () => { + setIsLoading(true); + const result = await createApiToken({ + description: newTokenDescription || undefined, + }); + + if (result.error) { + alert(result.error); + } else if (result.data) { + setTokens([ + { + id: result.data.id, + description: result.data.description, + createdAt: result.data.createdAt, + lastUsedAt: result.data.lastUsedAt, + isActive: result.data.isActive, + }, + ...tokens, + ]); + setCreatedToken(result.data.token); + setNewTokenDescription(''); + setIsCreating(false); + } + + setIsLoading(false); + }; + + const handleToggleActive = async (id: string, isActive: boolean) => { + const result = await updateApiToken({ id, isActive: !isActive }); + + if (result.error) { + alert(result.error); + } else if (result.data) { + setTokens( + tokens.map((token) => + token.id === id ? { ...token, isActive: !isActive } : token, + ), + ); + } + }; + + const handleDeleteToken = async (token: ApiToken) => { + setIsDeleting(true); + const result = await deleteApiToken({ id: token.id }); + + if (result.error) { + add({ title: result.error, type: 'destructive' }); + } else { + setTokens(tokens.filter((t) => t.id !== token.id)); + setTokenToDelete(null); + } + setIsDeleting(false); + }; + + const columns: StrictColumnDef[] = [ + { + accessorKey: 'description', + header: ({ column }) => ( + + ), + cell: ({ row }) => ( + + {row.original.description ?? Untitled} + + ), + enableSorting: false, + enableHiding: false, + }, + { + accessorKey: 'createdAt', + sortingFn: 'datetime', + header: ({ column }) => ( + + ), + cell: ({ row }) => ( + + ), + }, + { + accessorKey: 'lastUsedAt', + sortingFn: 'datetime', + header: ({ column }) => ( + + ), + cell: ({ row }) => { + if (!row.original.lastUsedAt) { + return 'Never'; + } + + return ( + + ); + }, + }, + { + accessorKey: 'isActive', + sortingFn: 'basic', + header: ({ column }) => ( + + ), + cell: ({ row }) => ( + + handleToggleActive(row.original.id, row.original.isActive) + } + /> + ), + }, + { + id: 'actions', + enableSorting: false, + cell: ({ row }: { row: Row }) => ( + + ), + }, + ]; + + const { table } = useClientDataTable({ + data: tokens, + columns, + enablePagination: false, + }); + + return ( +
    + + + + {/* Create Token Dialog */} + setIsCreating(false)} + title="Create API Token" + description="Create a new API token for authenticating preview protocol uploads." + footer={ + <> + + + + } + > +
    + + setNewTokenDescription(value ?? '')} + /> +
    +
    + + {/* Show Created Token Dialog */} + setCreatedToken(null)} + title="API Token Created" + description="Your token has been created and is displayed below. Save this token somewhere safe now - you won't be able to see it again after you close this dialog." + footer={ + <> + + + + } + > + + Your API Token + + + {createdToken} + + + + + {/* Delete Token Confirmation Dialog */} + setTokenToDelete(null)} + title="Delete API Token" + description="Are you sure you want to delete this API token? Any applications using this token will no longer be able to authenticate." + footer={ + <> + + + + } + /> +
    + ); +} diff --git a/components/BackgroundBlobs/BackgroundBlobs.tsx b/components/BackgroundBlobs/BackgroundBlobs.tsx index 8bb243772..c6e347422 100644 --- a/components/BackgroundBlobs/BackgroundBlobs.tsx +++ b/components/BackgroundBlobs/BackgroundBlobs.tsx @@ -3,9 +3,20 @@ import * as blobs2 from 'blobs/v2'; import { interpolatePath as interpolate } from 'd3-interpolate-path'; import { memo, useMemo } from 'react'; -import { random, randomInt } from '~/utils/general'; import Canvas from './Canvas'; +const random = (a = 1, b = 0) => { + const lower = Math.min(a, b); + const upper = Math.max(a, b); + return lower + Math.random() * (upper - lower); +}; + +const randomInt = (a = 1, b = 0) => { + const lower = Math.ceil(Math.min(a, b)); + const upper = Math.floor(Math.max(a, b)); + return Math.floor(lower + Math.random() * (upper - lower + 1)); +}; + const gradients = [ ['rgb(237,0,140)', 'rgb(226,33,91)'], ['#00c9ff', '#92fe9d'], diff --git a/components/BackgroundBlobs/Canvas.tsx b/components/BackgroundBlobs/Canvas.tsx index 4a4618b4e..35b6c7c04 100644 --- a/components/BackgroundBlobs/Canvas.tsx +++ b/components/BackgroundBlobs/Canvas.tsx @@ -1,7 +1,6 @@ -"use client"; +'use client'; -import React from "react"; -import useCanvas from "~/hooks/useCanvas"; +import useCanvas from '~/hooks/useCanvas'; type CanvasProps = { draw: (ctx: CanvasRenderingContext2D, time: number) => void; @@ -13,7 +12,7 @@ const Canvas = (props: CanvasProps) => { const { draw, predraw, postdraw } = props; const canvasRef = useCanvas(draw, predraw, postdraw); - return ; + return ; }; export default Canvas; diff --git a/components/Colors.stories.tsx b/components/Colors.stories.tsx new file mode 100644 index 000000000..0539a426a --- /dev/null +++ b/components/Colors.stories.tsx @@ -0,0 +1,388 @@ +import type { Meta, StoryObj } from '@storybook/nextjs-vite'; +import Heading from '~/components/typography/Heading'; +import Paragraph from '~/components/typography/Paragraph'; + +const ColorSwatch = ({ + name, + cssVar, + contrastVar, +}: { + name: string; + cssVar: string; + contrastVar?: string; +}) => ( +
    +
    + {name} +
    +
    +
    {cssVar}
    + {contrastVar && ( +
    {contrastVar}
    + )} +
    +
    +); + +const meta = { + title: 'Design System/Colors', + parameters: { + layout: 'padded', + a11y: { disable: true }, + }, + tags: ['autodocs'], +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const SemanticColors: Story = { + render: () => ( +
    +
    + + Semantic Colors + + + Core brand and semantic colors used throughout the application + +
    + + + + +
    +
    + +
    + + Status Colors + + + Colors for indicating status, alerts, and user feedback + +
    + + + + +
    +
    + +
    + + Base Colors + + + Foundation colors for backgrounds, surfaces, and text + +
    + + + + +
    +
    + +
    + + Surface Levels + + + Layered surfaces for depth and hierarchy + +
    + + + + + +
    +
    + +
    + + Interactive Colors + + + Colors for interactive elements and inputs + +
    + + + +
    +
    +
    + ), +}; + +export const NodeColors: Story = { + render: () => ( +
    +
    + + Node Colors + + + Sequential colors for network nodes (1-8) + +
    + {[1, 2, 3, 4, 5, 6, 7, 8].map((num) => ( + + ))} +
    +
    +
    + ), +}; + +export const EdgeColors: Story = { + render: () => ( +
    +
    + + Edge Colors + + + Sequential colors for network edges (1-10) + +
    + {[1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map((num) => ( + + ))} +
    +
    +
    + ), +}; + +export const OrdinalColors: Story = { + render: () => ( +
    +
    + + Ordinal Colors + + + Ordinal scale colors (1-8) + +
    + {[1, 2, 3, 4, 5, 6, 7, 8].map((num) => ( + + ))} +
    +
    +
    + ), +}; + +export const CategoricalColors: Story = { + render: () => ( +
    +
    + + Categorical Colors + + + Categorical colors for groups and convex hulls (1-10) + +
    + {[1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map((num) => ( + + ))} +
    +
    +
    + ), +}; + +export const AllColors: Story = { + render: () => ( +
    +
    + + Complete Color System + + + All colors available in the Fresco design system + +
    + +
    + + Semantic Colors + +
    + + + + + + + + +
    +
    + +
    + + Node Sequence + +
    + {[1, 2, 3, 4, 5, 6, 7, 8].map((num) => ( + + ))} +
    +
    + +
    + + Edge Sequence + +
    + {[1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map((num) => ( + + ))} +
    +
    + +
    + + Ordinal & Categorical + +
    + {[1, 2, 3, 4, 5, 6, 7, 8].map((num) => ( + + ))} +
    +
    + {[1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map((num) => ( + + ))} +
    +
    +
    + ), +}; diff --git a/components/ContainerClasses.ts b/components/ContainerClasses.ts index 057b65b76..50cb44401 100644 --- a/components/ContainerClasses.ts +++ b/components/ContainerClasses.ts @@ -1,6 +1,6 @@ -import { cn } from '~/utils/shadcn'; +import { cx } from '~/utils/cva'; -export const containerClasses = cn( - 'relative mt-[-60px] flex flex-col rounded-xl min-w-full-[30rem] bg-card p-8', - 'after:absolute after:inset-[-20px] after:z-[-1] after:rounded-3xl after:bg-panel/30 after:shadow-2xl after:backdrop-blur-sm', +export const containerClasses = cx( + 'relative m-6! overflow-visible', + 'before:bg-surface-1/30 mx-0 before:absolute before:inset-[-20px] before:z-[-1] before:rounded before:shadow-2xl before:backdrop-blur-sm', ); diff --git a/components/CopyDebugInfoButton.tsx b/components/CopyDebugInfoButton.tsx index 1d62d3a2f..0c2772397 100644 --- a/components/CopyDebugInfoButton.tsx +++ b/components/CopyDebugInfoButton.tsx @@ -1,9 +1,9 @@ 'use client'; -import { cn } from '~/utils/shadcn'; -import { useToast } from './ui/use-toast'; -import { Check, ClipboardCopy } from 'lucide-react'; +import { ClipboardCopy } from 'lucide-react'; +import { cx } from '~/utils/cva'; import { Button } from './ui/Button'; +import { useToast } from './ui/Toast'; export default function CopyDebugInfoButton({ debugInfo, @@ -14,17 +14,15 @@ export default function CopyDebugInfoButton({ showToast?: boolean; className?: string; }) { - const { toast } = useToast(); + const { add } = useToast(); const copyDebugInfoToClipboard = async () => { await navigator.clipboard.writeText(debugInfo); if (showToast) { - toast({ - icon: , - title: 'Success', - description: 'Debug information copied to clipboard', - variant: 'success', + add({ + title: 'Debug information copied to clipboard', + type: 'success', }); } }; @@ -32,15 +30,15 @@ export default function CopyDebugInfoButton({ return ( ); diff --git a/components/DataTable/ColumnHeader.tsx b/components/DataTable/ColumnHeader.tsx index b81749d0f..041a7e6a7 100644 --- a/components/DataTable/ColumnHeader.tsx +++ b/components/DataTable/ColumnHeader.tsx @@ -1,31 +1,81 @@ -import { ArrowDown, ArrowUp, ArrowUpDown } from 'lucide-react'; -import { type Column } from '@tanstack/react-table'; +'use client'; -import { Button, buttonVariants } from '~/components/ui/Button'; +import { type Column, type Table } from '@tanstack/react-table'; +import { + ArrowDown, + ArrowDown01, + ArrowDownAZ, + ArrowUp, + ArrowUp01, + ArrowUpAZ, + Filter, +} from 'lucide-react'; +import React, { type ReactNode, useRef, useState } from 'react'; +import BooleanFilter from '~/components/DataTable/filters/BooleanFilter'; +import DateFilter from '~/components/DataTable/filters/DateFilter'; +import FacetedFilter from '~/components/DataTable/filters/FacetedFilter'; +import OperatorFilter from '~/components/DataTable/filters/OperatorFilter'; +import RangeFilter from '~/components/DataTable/filters/RangeFilter'; +import { + type FilterConfig, + type FilterValue, +} from '~/components/DataTable/filters/types'; +import Button, { buttonVariants } from '~/components/ui/Button'; import { DropdownMenu, DropdownMenuContent, DropdownMenuItem, + DropdownMenuRadioGroup, + DropdownMenuRadioItem, + DropdownMenuSeparator, DropdownMenuTrigger, } from '~/components/ui/dropdown-menu'; -import { cn } from '~/utils/shadcn'; +import { Popover, PopoverContent } from '~/components/ui/popover'; +import { cx } from '~/utils/cva'; + +const stringSortFns = new Set(['text', 'textCaseSensitive']); type DataTableColumnHeaderProps = { column: Column; - title: string; -} & React.HTMLAttributes; + title: ReactNode; + table?: Table; +} & Omit, 'title'>; export function DataTableColumnHeader({ column, title, + table, className, }: DataTableColumnHeaderProps) { - if (!column.getCanSort()) { + 'use no memo'; + + const meta = column.columnDef.meta; + const filterConfig = meta?.filterConfig; + const hasFilter = !!meta?.filterType && !!filterConfig; + + const sortingFn = column.columnDef.sortingFn; + const isStringSortFn = + typeof sortingFn === 'string' && stringSortFns.has(sortingFn); + + const isFiltered = column.getIsFiltered(); + const canSort = column.getCanSort(); + const isSorted = column.getIsSorted(); + + const buttonRef = useRef(null); + const [menuOpen, setMenuOpen] = useState(false); + const [filterOpen, setFilterOpen] = useState(false); + const [stagedValue, setStagedValue] = useState( + undefined, + ); + + const isActive = isSorted !== false || isFiltered || menuOpen || filterOpen; + + if (!canSort && !hasFilter) { return (
    @@ -34,48 +84,198 @@ export function DataTableColumnHeader({ ); } + const handleOpenFilter = () => { + setStagedValue(column.getFilterValue() as FilterValue | undefined); + // Defer opening so the dropdown menu has time to fully close first. + // Without this, the popover opens and immediately closes because the + // dropdown's close handler fires after our open. + requestAnimationFrame(() => { + setFilterOpen(true); + }); + }; + + const handleApplyFilter = () => { + column.setFilterValue(stagedValue); + setFilterOpen(false); + }; + + const handleClearFilter = () => { + column.setFilterValue(undefined); + setStagedValue(undefined); + setFilterOpen(false); + }; + + const icons: ReactNode[] = []; + if (isSorted === 'asc') + icons.push(); + if (isSorted === 'desc') + icons.push(); + if (isFiltered) + icons.push(); + + const data = + hasFilter && table + ? table.getCoreRowModel().rows.map((r) => r.original) + : []; + + const canFilter = + hasFilter && + !( + filterConfig?.type === 'operator' && + filterConfig.entitySelector?.getOptions(data).length === 0 + ); + return ( -
    - - - + <> + setMenuOpen(open)}> + 0 ? ( + {icons} + ) : undefined + } + /> + } + nativeButton + > + {title} - column.toggleSorting(false)} - > - - Asc - - column.toggleSorting(true)} - > - - Desc - - {/* column.toggleVisibility(false)} - > - */} + {canSort && ( + column.toggleSorting(value !== 'asc')} + className="flex flex-col gap-1" + > + : } + > + Sort ascending + + : } + > + Sort descending + + + {isSorted !== false && ( + column.clearSorting()}> + Clear sort + + )} + + )} + {canSort && canFilter && } + {canFilter && ( + } + closeOnClick + > + {isFiltered ? 'Edit filter' : 'Filter'} + + )} -
    + + {canFilter && filterConfig && ( + + +
    + +
    + + +
    +
    +
    +
    + )} + ); } + +function FilterRenderer({ + filterConfig, + value, + onChange, + data, +}: { + filterConfig: FilterConfig; + value: FilterValue | undefined; + onChange: (value: FilterValue | undefined) => void; + data: unknown[]; +}) { + switch (filterConfig.type) { + case 'range': + return ( + [0]['value']} + onChange={onChange} + config={filterConfig} + /> + ); + case 'date': + return ( + [0]['value']} + onChange={onChange} + config={filterConfig} + /> + ); + case 'boolean': + return ( + [0]['value']} + onChange={onChange} + config={filterConfig} + /> + ); + case 'faceted': + return ( + [0]['value']} + onChange={onChange} + config={filterConfig} + data={data} + /> + ); + case 'operator': + return ( + [0]['value']} + onChange={onChange} + config={filterConfig} + data={data} + /> + ); + default: + return null; + } +} diff --git a/components/DataTable/DataTable.stories.tsx b/components/DataTable/DataTable.stories.tsx new file mode 100644 index 000000000..5f5006ce4 --- /dev/null +++ b/components/DataTable/DataTable.stories.tsx @@ -0,0 +1,463 @@ +import { faker } from '@faker-js/faker'; +import type { Meta, StoryObj } from '@storybook/nextjs-vite'; +import { type ColumnDef } from '@tanstack/react-table'; +import { Trash } from 'lucide-react'; +import { DataTableColumnHeader } from '~/components/DataTable/ColumnHeader'; +import { + booleanFilterFn, + dateFilterFn, + facetedFilterFn, + rangeFilterFn, +} from '~/components/DataTable/filters/filterFns'; +import { Badge } from '~/components/ui/badge'; +import { Button } from '~/components/ui/Button'; +import { useClientDataTable } from '~/hooks/useClientDataTable'; +import { DataTable } from './DataTable'; +import { DataTableFloatingBar } from './DataTableFloatingBar'; +import { DataTableSkeleton } from './DataTableSkeleton'; +import { DataTableToolbar } from './DataTableToolbar'; + +type Person = { + id: string; + name: string; + email: string; + role: string; + status: 'active' | 'inactive' | 'pending'; +}; + +const ROLES = ['Admin', 'User', 'Editor', 'Viewer']; +const STATUSES: Person['status'][] = ['active', 'inactive', 'pending']; + +faker.seed(42); + +const sampleData: Person[] = Array.from({ length: 12 }, (_, i) => ({ + id: String(i + 1), + name: faker.person.fullName(), + email: faker.internet.email().toLowerCase(), + role: faker.helpers.arrayElement(ROLES), + status: faker.helpers.arrayElement(STATUSES), +})); + +const columns: ColumnDef[] = [ + { + accessorKey: 'name', + header: ({ column }) => ( + + ), + cell: ({ row }) => ( +
    {row.getValue('name')}
    + ), + }, + { + accessorKey: 'email', + header: ({ column }) => ( + + ), + cell: ({ row }) =>
    {row.getValue('email')}
    , + }, + { + accessorKey: 'role', + header: ({ column }) => ( + + ), + }, + { + accessorKey: 'status', + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const status = String(row.getValue('status')); + return ( + + {status} + + ); + }, + }, +]; + +const meta = { + title: 'Components/DataTable', + component: DataTable, + parameters: { + layout: 'padded', + }, + tags: ['autodocs'], +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +function DefaultTable({ data }: { data: Person[] }) { + const { table } = useClientDataTable({ data, columns }); + return ; +} + +export const Default: Story = { + render: () => , + args: {} as never, +}; + +function PaginatedTable() { + const { table } = useClientDataTable({ + data: sampleData, + columns, + }); + return ; +} + +export const WithPagination: Story = { + render: () => , + args: {} as never, +}; + +function NoPaginationTable() { + const { table } = useClientDataTable({ + data: sampleData, + columns, + enablePagination: false, + }); + return ; +} + +export const WithoutPagination: Story = { + render: () => , + args: {} as never, +}; + +function SearchToolbarTable() { + const { table } = useClientDataTable({ data: sampleData, columns }); + return ( + + } + /> + ); +} + +export const WithSearchToolbar: Story = { + render: () => , + args: {} as never, +}; + +function FacetedFilterTable() { + const { table } = useClientDataTable({ data: sampleData, columns }); + return ( + + } + /> + ); +} + +export const WithFacetedFilters: Story = { + render: () => , + args: {} as never, +}; + +function FloatingBarTable() { + const { table } = useClientDataTable({ data: sampleData, columns }); + return ( + + + + } + /> + ); +} + +export const WithFloatingBar: Story = { + render: () => , + args: {} as never, +}; + +function HeaderItemsTable() { + const { table } = useClientDataTable({ data: sampleData, columns }); + return ( + + + + + } + /> + ); +} + +export const WithHeaderItems: Story = { + render: () => , + args: {} as never, +}; + +function FullyFeaturedTable() { + const { table } = useClientDataTable({ data: sampleData, columns }); + return ( + + + + } + floatingBar={ + + + + } + /> + ); +} + +export const FullyFeatured: Story = { + render: () => , + args: {} as never, +}; + +export const Empty: Story = { + render: () => , + args: {} as never, +}; + +export const Skeleton: Story = { + render: () => ( + + ), + args: {} as never, +}; + +// --- Column Filters Story --- + +type FilterablePerson = { + id: string; + name: string; + email: string; + role: string; + score: number; + active: boolean; + joinDate: Date; +}; + +const FILTERABLE_ROLES = ['Admin', 'User', 'Editor', 'Viewer']; + +faker.seed(99); + +const filterSampleData: FilterablePerson[] = Array.from( + { length: 30 }, + (_, i) => ({ + id: String(i + 1), + name: faker.person.fullName(), + email: faker.internet.email().toLowerCase(), + role: faker.helpers.arrayElement(FILTERABLE_ROLES), + score: faker.number.int({ min: 0, max: 100 }), + active: faker.datatype.boolean(), + joinDate: faker.date.between({ + from: new Date('2024-01-01'), + to: new Date('2025-12-31'), + }), + }), +); + +const filterColumns: ColumnDef[] = [ + { + accessorKey: 'name', + header: ({ column, table }) => ( + + ), + cell: ({ row }) => ( +
    {row.getValue('name')}
    + ), + }, + { + accessorKey: 'role', + meta: { + filterType: 'faceted' as const, + filterConfig: { + type: 'faceted' as const, + options: FILTERABLE_ROLES.map((r) => ({ label: r, value: r })), + }, + }, + filterFn: facetedFilterFn, + header: ({ column, table }) => ( + + ), + }, + { + accessorKey: 'score', + meta: { + filterType: 'range' as const, + filterConfig: { + type: 'range' as const, + min: 0, + max: 100, + presets: [ + { label: 'Low', min: 0, max: 33 }, + { label: 'Medium', min: 34, max: 66 }, + { label: 'High', min: 67, max: 100 }, + ], + }, + }, + filterFn: rangeFilterFn, + header: ({ column, table }) => ( + + ), + }, + { + accessorKey: 'active', + meta: { + filterType: 'boolean' as const, + filterConfig: { + type: 'boolean' as const, + trueLabel: 'Active', + falseLabel: 'Inactive', + }, + }, + filterFn: booleanFilterFn, + header: ({ column, table }) => ( + + ), + cell: ({ row }) => ( + + {row.getValue('active') ? 'Active' : 'Inactive'} + + ), + }, + { + accessorKey: 'joinDate', + meta: { + filterType: 'date' as const, + filterConfig: { type: 'date' as const }, + }, + filterFn: dateFilterFn, + header: ({ column, table }) => ( + + ), + cell: ({ row }) => { + const date = row.getValue('joinDate'); + return {date.toLocaleDateString()}; + }, + }, +]; + +function ColumnFilterTable() { + const { table } = useClientDataTable({ + data: filterSampleData, + columns: filterColumns, + }); + + return ( + + +
    + } + /> + ); +} + +export const WithColumnFilters: Story = { + render: () => , + args: {} as never, +}; diff --git a/components/DataTable/DataTable.tsx b/components/DataTable/DataTable.tsx index 41b0367c6..77b3e5d94 100644 --- a/components/DataTable/DataTable.tsx +++ b/components/DataTable/DataTable.tsx @@ -1,21 +1,12 @@ +'use client'; + import { + type Column, flexRender, - getCoreRowModel, - getFilteredRowModel, - getPaginationRowModel, - getSortedRowModel, - useReactTable, - type ColumnDef, - type ColumnFiltersState, type Row, - type SortingState, type Table as TTable, } from '@tanstack/react-table'; -import { FileUp, Loader } from 'lucide-react'; -import { useCallback, useState } from 'react'; -import { makeDefaultColumns } from '~/components/DataTable/DefaultColumns'; -import { Button } from '~/components/ui/Button'; -import { Input } from '~/components/ui/Input'; +import { type ReactNode } from 'react'; import { Table, TableBody, @@ -24,264 +15,96 @@ import { TableHeader, TableRow, } from '~/components/ui/table'; +import { cx } from '~/utils/cva'; +import { DataTablePagination } from './DataTablePagination'; + +function getColumnHighlight(column: Column) { + const isSorted = column.getIsSorted(); + const isFiltered = column.getIsFiltered(); + if (isSorted && isFiltered) + return 'bg-[color-mix(in_oklab,var(--color-sea-green)_5%,var(--selected)_5%)]'; + if (isSorted) + return 'bg-[color-mix(in_oklab,var(--color-sea-green)_5%,transparent)]'; + if (isFiltered) + return 'bg-[color-mix(in_oklab,var(--selected)_5%,transparent)]'; + return undefined; +} -type CustomTable = TTable & { - options?: { - meta?: { - getRowClasses?: (row: Row) => string | undefined; - navigatorLanguages?: string[]; - }; - }; -}; - -type DataTableProps = { - columns?: ColumnDef[]; - data: TData[]; - filterColumnAccessorKey?: string; - handleDeleteSelected?: (data: TData[]) => Promise | void; - handleExportSelected?: (data: TData[]) => void; - actions?: React.ComponentType<{ - row: Row; - data: TData[]; - deleteHandler: (item: TData) => void; - }>; - actionsHeader?: React.ReactNode; - calculateRowClasses?: (row: Row) => string | undefined; - headerItems?: React.ReactNode; - defaultSortBy?: SortingState[0]; +type DataTableProps = { + table: TTable; + toolbar?: ReactNode; + floatingBar?: ReactNode; + showPagination?: boolean; + surfaceLevel?: 0 | 1 | 2 | 3; + emptyText?: string; + getRowClasses?: (row: Row) => string | undefined; }; -export function DataTable({ - columns = [], - data, - handleDeleteSelected, - handleExportSelected, - filterColumnAccessorKey = '', - actions, - actionsHeader, - calculateRowClasses, - headerItems, - defaultSortBy, -}: DataTableProps) { - const [sorting, setSorting] = useState( - defaultSortBy ? [{ ...defaultSortBy }] : [], - ); - const [isDeleting, setIsDeleting] = useState(false); - const [rowSelection, setRowSelection] = useState({}); - const [columnFilters, setColumnFilters] = useState([]); - - if (columns.length === 0) { - columns = makeDefaultColumns(data); - } - - const deleteHandler = async () => { - setIsDeleting(true); - const selectedData = table - .getSelectedRowModel() - .rows.map((r) => r.original); - - try { - await handleDeleteSelected?.(selectedData); - } catch (error) { - if (error instanceof Error) { - throw new Error(error.message); - } - throw new Error('An unknown error occurred.'); - } - - setIsDeleting(false); - setRowSelection({}); - }; - - if (actions) { - const actionsColumn = { - id: 'actions', - header: () => actionsHeader ?? null, - cell: ({ row }: { row: Row }) => { - const cellDeleteHandler = async (item: TData) => { - await handleDeleteSelected?.([item]); - }; - - return flexRender(actions, { - row, - data, - deleteHandler: cellDeleteHandler, - }); - }, - }; - - columns = [...columns, actionsColumn]; - } - - const table = useReactTable({ - data, - columns, - getCoreRowModel: getCoreRowModel(), - getPaginationRowModel: getPaginationRowModel(), - onSortingChange: setSorting, - getSortedRowModel: getSortedRowModel(), - onRowSelectionChange: setRowSelection, - onColumnFiltersChange: setColumnFilters, - getFilteredRowModel: getFilteredRowModel(), - meta: { - getRowClasses: (row: Row) => calculateRowClasses?.(row), - }, - state: { - sorting, - rowSelection, - columnFilters, - }, - }) as CustomTable; - - const hasSelectedRows = table.getSelectedRowModel().rows.length > 0; - - const exportHandler = useCallback(() => { - const selectedData = table - .getSelectedRowModel() - .rows.map((r) => r.original); - - handleExportSelected?.(selectedData); - - setRowSelection({}); - }, [handleExportSelected, table, setRowSelection]); +export function DataTable({ + table, + toolbar, + floatingBar, + showPagination = true, + surfaceLevel = 0, + emptyText = 'No results.', + getRowClasses, +}: DataTableProps) { + // TanStack Table returns a mutable ref with stable identity, defeating React Compiler memoization. + 'use no memo'; + const columnCount = table.getAllColumns().length; return ( - <> - {(filterColumnAccessorKey || headerItems) && ( -
    - {filterColumnAccessorKey && ( - - table - .getColumn(filterColumnAccessorKey) - ?.setFilterValue(event.target.value) - } - className="mt-0" - /> - )} - {headerItems} -
    - )} -
    - - - {table.getHeaderGroups().map((headerGroup) => ( - - {headerGroup.headers.map((header) => { - return ( - - {header.isPlaceholder - ? null - : flexRender( - header.column.columnDef.header, - header.getContext(), - )} - - ); - })} - - ))} - - - {table.getRowModel().rows?.length ? ( - table.getRowModel().rows.map((row) => ( - + {toolbar} +
    + + {table.getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map((header) => ( + - {row.getVisibleCells().map((cell) => ( - - {flexRender( - cell.column.columnDef.cell, - cell.getContext(), + {header.isPlaceholder + ? null + : flexRender( + header.column.columnDef.header, + header.getContext(), )} - - ))} - - )) - ) : ( - - - No results. - + + ))} + + ))} + + + {table.getRowModel().rows?.length ? ( + table.getRowModel().rows.map((row) => ( + + {row.getVisibleCells().map((cell) => ( + + {flexRender(cell.column.columnDef.cell, cell.getContext())} + + ))} - )} - -
    -
    -
    -
    -
    - {table.getFilteredSelectedRowModel().rows.length} of{' '} - {table.getFilteredRowModel().rows.length} row(s) selected. -
    -
    - - -
    -
    - {/** - * TODO: This is garbage. - * - * This shouldn't be part of the data table - it should be a component - * that is passed in to the table that gets given access to the table - * state. See the other data-table for an example. - */} - {hasSelectedRows && ( - - )} - - {hasSelectedRows && handleExportSelected && ( - - )} -
    - + )) + ) : ( + + + {emptyText} + + + )} + + + {showPagination && } + {floatingBar} +
    ); } diff --git a/components/DataTable/DataTableFacetedFilter.tsx b/components/DataTable/DataTableFacetedFilter.tsx new file mode 100644 index 000000000..5ad421f1d --- /dev/null +++ b/components/DataTable/DataTableFacetedFilter.tsx @@ -0,0 +1,57 @@ +'use client'; + +import { type Column } from '@tanstack/react-table'; +import { useMemo } from 'react'; +import { type Option } from '~/components/DataTable/types'; +import ComboboxField from '~/lib/form/components/fields/Combobox/Combobox'; +import { type ComboboxOption } from '~/lib/form/components/fields/Combobox/shared'; + +type DataTableFacetedFilterProps = { + column?: Column; + title?: string; + options: Option[]; + className?: string; +}; + +export function DataTableFacetedFilter({ + column, + title, + options, + className, +}: DataTableFacetedFilterProps) { + // TanStack Table returns a mutable ref with stable identity, defeating React Compiler memoization. + 'use no memo'; + const selectedValues = (column?.getFilterValue() as string[]) ?? []; + + const comboboxOptions: ComboboxOption[] = useMemo( + () => + options.map((option) => ({ + value: option.value, + label: option.label, + })), + [options], + ); + + const handleChange = (newValues: (string | number)[] | undefined) => { + if (!newValues || newValues.length === 0) { + column?.setFilterValue(undefined); + } else { + column?.setFilterValue(newValues); + } + }; + + return ( + + ); +} diff --git a/components/DataTable/DataTableFloatingBar.tsx b/components/DataTable/DataTableFloatingBar.tsx new file mode 100644 index 000000000..2f021cfc7 --- /dev/null +++ b/components/DataTable/DataTableFloatingBar.tsx @@ -0,0 +1,58 @@ +'use client'; + +import { type Table } from '@tanstack/react-table'; +import { AnimatePresence } from 'motion/react'; +import { type ComponentProps } from 'react'; +import { cx } from '~/utils/cva'; +import { MotionSurface } from '../layout/Surface'; +import Paragraph from '../typography/Paragraph'; +import CloseButton from '../ui/CloseButton'; + +type DataTableFloatingBarProps = { + table: Table; + className?: string; +} & Omit, 'table' | 'className'>; + +export function DataTableFloatingBar({ + table, + children, + className, + ...props +}: DataTableFloatingBarProps) { + // TanStack Table returns a mutable ref with stable identity, defeating React Compiler memoization. + 'use no memo'; + const selectedCount = table.getFilteredSelectedRowModel().rows.length; + + return ( + + {selectedCount > 0 && ( + + + {selectedCount} row + {selectedCount === 1 ? '' : 's'} selected + +
    {children}
    + table.toggleAllRowsSelected(false)} + aria-label="Close selection bar" + /> +
    + )} +
    + ); +} diff --git a/components/DataTable/DataTablePagination.tsx b/components/DataTable/DataTablePagination.tsx new file mode 100644 index 000000000..2392d5b5b --- /dev/null +++ b/components/DataTable/DataTablePagination.tsx @@ -0,0 +1,90 @@ +import { type Table } from '@tanstack/react-table'; +import { + ChevronLeft, + ChevronRight, + ChevronsLeft, + ChevronsRight, +} from 'lucide-react'; +import { IconButton } from '~/components/ui/Button'; +import SelectField from '~/lib/form/components/fields/Select/Native'; +import Paragraph from '../typography/Paragraph'; +import { pageSizes } from './types'; + +type DataTablePaginationProps = { + table: Table; +}; + +export function DataTablePagination({ + table, +}: DataTablePaginationProps) { + // TanStack Table returns a mutable ref with stable identity, defeating React Compiler memoization. + 'use no memo'; + const pageCount = table.getPageCount(); + const showPageCount = pageCount > 0; + + return ( +
    +
    + + Rows per page + + { + table.setPageSize(Number(value)); + }} + options={pageSizes.map((size) => ({ + label: size.toLocaleString(), + value: size, + }))} + placeholder={table.getState().pagination.pageSize.toLocaleString()} + /> +
    + {showPageCount && ( +
    + Page {table.getState().pagination.pageIndex + 1} of {pageCount} +
    + )} +
    + table.setPageIndex(0)} + disabled={!table.getCanPreviousPage()} + icon={} + /> + table.previousPage()} + disabled={!table.getCanPreviousPage()} + icon={} + /> + table.nextPage()} + disabled={!table.getCanNextPage()} + icon={} + /> + table.setPageIndex(table.getPageCount() - 1)} + disabled={!table.getCanNextPage()} + icon={} + /> +
    +
    + ); +} diff --git a/components/DataTable/DataTableSkeleton.tsx b/components/DataTable/DataTableSkeleton.tsx new file mode 100644 index 000000000..5a048a613 --- /dev/null +++ b/components/DataTable/DataTableSkeleton.tsx @@ -0,0 +1,98 @@ +import { Skeleton } from '~/components/ui/skeleton'; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from '~/components/ui/table'; + +type DataTableSkeletonProps = { + columnCount: number; + rowCount?: number; + searchableColumnCount?: number; + filterableColumnCount?: number; + headerItemsCount?: number; +}; + +export function DataTableSkeleton({ + columnCount, + rowCount = 10, + searchableColumnCount = 0, + filterableColumnCount = 0, + headerItemsCount = 0, +}: DataTableSkeletonProps) { + const hasToolbar = + searchableColumnCount > 0 || + filterableColumnCount > 0 || + headerItemsCount > 0; + + return ( +
    + {hasToolbar && ( +
    + {searchableColumnCount > 0 + ? Array.from({ length: searchableColumnCount }).map((_, i) => ( + + )) + : null} + {filterableColumnCount > 0 + ? Array.from({ length: filterableColumnCount }).map((_, i) => ( + + )) + : null} + {headerItemsCount > 0 + ? Array.from({ length: headerItemsCount }).map((_, i) => ( + + )) + : null} +
    + )} + + + + + {Array.from({ length: columnCount }).map((_, i) => ( + + + + ))} + + + + {Array.from({ length: rowCount }).map((_, i) => ( + + {Array.from({ length: columnCount }).map((_, j) => ( + + + + ))} + + ))} + +
    + +
    +
    +
    + + +
    +
    + +
    +
    + + + + +
    +
    +
    +
    + ); +} diff --git a/components/DataTable/DataTableToolbar.tsx b/components/DataTable/DataTableToolbar.tsx new file mode 100644 index 000000000..b008f7ad0 --- /dev/null +++ b/components/DataTable/DataTableToolbar.tsx @@ -0,0 +1,88 @@ +'use client'; + +import type { Table } from '@tanstack/react-table'; +import { Search, X } from 'lucide-react'; +import { type ReactNode } from 'react'; +import { + type DataTableFilterableColumn, + type DataTableSearchableColumn, +} from '~/components/DataTable/types'; +import { Button } from '~/components/ui/Button'; +import InputField from '~/lib/form/components/fields/InputField'; +import { DataTableFacetedFilter } from './DataTableFacetedFilter'; + +type DataTableToolbarProps = { + table: Table; + filterableColumns?: DataTableFilterableColumn[]; + searchableColumns?: DataTableSearchableColumn[]; + children?: ReactNode; +}; + +export function DataTableToolbar({ + table, + filterableColumns = [], + searchableColumns = [], + children, +}: DataTableToolbarProps) { + // TanStack Table returns a mutable ref with stable identity, defeating React Compiler memoization. + 'use no memo'; + const isFiltered = table.getState().columnFilters?.length > 0; + + if ( + searchableColumns.length === 0 && + filterableColumns.length === 0 && + !children + ) { + return null; + } + + return ( +
    + {searchableColumns.length > 0 && + searchableColumns.map( + (searchCol) => + table.getColumn(searchCol.id ? String(searchCol.id) : '') && ( + } + name="Filter" + className="tablet-landscape:min-w-0 tablet-landscape:flex-1 tablet-landscape:max-w-xl w-full min-w-fit" + placeholder={`Filter ${searchCol.title}...`} + value={ + (table + .getColumn(String(searchCol.id)) + ?.getFilterValue() as string) ?? '' + } + onChange={(value) => + table.getColumn(String(searchCol.id))?.setFilterValue(value) + } + /> + ), + )} + {filterableColumns.length > 0 && + filterableColumns.map( + (column) => + table.getColumn(column.id ? String(column.id) : '') && ( + + ), + )} + {isFiltered && ( + + )} + {children} +
    + ); +} diff --git a/components/DataTable/DefaultColumns.tsx b/components/DataTable/DefaultColumns.tsx deleted file mode 100644 index 2d8072768..000000000 --- a/components/DataTable/DefaultColumns.tsx +++ /dev/null @@ -1,18 +0,0 @@ -export const makeDefaultColumns = (data: TData[]) => { - const firstRow = data[0]; - - if (!firstRow || typeof firstRow !== 'object') { - throw new Error('Data must be an array of objects.'); - } - - const columnKeys = Object.keys(firstRow); - - const columns = columnKeys.map((key) => { - return { - accessorKey: key, - header: key, - }; - }); - - return columns; -}; diff --git a/components/DataTable/SelectAllHeader.tsx b/components/DataTable/SelectAllHeader.tsx new file mode 100644 index 000000000..c93976d27 --- /dev/null +++ b/components/DataTable/SelectAllHeader.tsx @@ -0,0 +1,76 @@ +'use client'; + +import { type Table } from '@tanstack/react-table'; +import { ChevronDown } from 'lucide-react'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '~/components/ui/dropdown-menu'; +import Checkbox from '~/lib/form/components/fields/Checkbox'; +import { IconButton } from '../ui/Button'; + +type SelectAllHeaderProps = { + table: Table; +}; + +export function SelectAllHeader({ table }: SelectAllHeaderProps) { + 'use no memo'; + + const isAllPageSelected = table.getIsAllPageRowsSelected(); + const isSomePage = table.getIsSomePageRowsSelected(); + const isAllSelected = table.getIsAllRowsSelected(); + const totalRows = table.getFilteredRowModel().rows.length; + const pageRows = table.getRowModel().rows.length; + const hasMultiplePages = totalRows > pageRows; + + return ( +
    + { + if (isAllSelected) { + table.toggleAllRowsSelected(false); + } else { + table.toggleAllPageRowsSelected(!!value); + } + }} + aria-label="Select all on page" + /> + {hasMultiplePages && ( + + + )} +
    + ); +} diff --git a/components/DataTable/filters/BooleanFilter.tsx b/components/DataTable/filters/BooleanFilter.tsx new file mode 100644 index 000000000..c85d9d5f4 --- /dev/null +++ b/components/DataTable/filters/BooleanFilter.tsx @@ -0,0 +1,53 @@ +'use client'; + +import { type BooleanFilterConfig } from '~/components/DataTable/filters/types'; +import CheckboxGroupField from '~/lib/form/components/fields/CheckboxGroup'; + +type BooleanFilterProps = { + value: boolean | undefined; + onChange: (value: boolean | undefined) => void; + config: BooleanFilterConfig; +}; + +export default function BooleanFilter({ + value, + onChange, + config, +}: BooleanFilterProps) { + const options = [ + { value: 'true', label: config.trueLabel }, + { value: 'false', label: config.falseLabel }, + ]; + + const selected: string[] = []; + if (value === true) selected.push('true'); + if (value === false) selected.push('false'); + + const handleChange = (values: (string | number)[] | undefined) => { + const stringValues = (values ?? []).map(String); + const hasTrue = stringValues.includes('true'); + const hasFalse = stringValues.includes('false'); + + if (hasTrue && hasFalse) { + // Both selected = no filter + onChange(undefined); + } else if (hasTrue) { + onChange(true); + } else if (hasFalse) { + onChange(false); + } else { + onChange(undefined); + } + }; + + return ( + + ); +} diff --git a/components/DataTable/filters/DateFilter.tsx b/components/DataTable/filters/DateFilter.tsx new file mode 100644 index 000000000..c1ae28204 --- /dev/null +++ b/components/DataTable/filters/DateFilter.tsx @@ -0,0 +1,111 @@ +'use client'; + +import { DateTime } from 'luxon'; +import { + type DateFilterConfig, + type DateFilterValue, +} from '~/components/DataTable/filters/types'; +import Button from '~/components/ui/Button'; +import InputField from '~/lib/form/components/fields/InputField'; + +type DateFilterProps = { + value: DateFilterValue | undefined; + onChange: (value: DateFilterValue | undefined) => void; + config: DateFilterConfig; +}; + +type RelativePreset = { + label: string; + days: number; +}; + +const relativePresets: RelativePreset[] = [ + { label: 'Today', days: 0 }, + { label: 'Last 7 days', days: 7 }, + { label: 'Last 30 days', days: 30 }, + { label: 'Last 90 days', days: 90 }, +]; + +function getPresetRange(days: number): DateFilterValue { + const now = DateTime.now(); + const to = now.toISODate(); + const from = days === 0 ? to : now.minus({ days }).toISODate(); + return { from, to }; +} + +function isPresetActive( + value: DateFilterValue | undefined, + days: number, +): boolean { + if (!value) return false; + const preset = getPresetRange(days); + return value.from === preset.from && value.to === preset.to; +} + +export default function DateFilter({ + value, + onChange, + config: _config, +}: DateFilterProps) { + const handlePresetClick = (days: number) => { + if (isPresetActive(value, days)) { + onChange(undefined); + } else { + onChange(getPresetRange(days)); + } + }; + + const handleFromChange = (from: string) => { + if (!from) { + onChange(undefined); + return; + } + const to = value?.to ?? DateTime.now().toISODate(); + onChange({ from, to }); + }; + + const handleToChange = (to: string) => { + if (!to) { + onChange(undefined); + return; + } + const from = value?.from ?? to; + onChange({ from, to }); + }; + + return ( +
    +
    + {relativePresets.map((preset) => ( + + ))} +
    + +
    + handleFromChange(val ?? '')} + /> + to + handleToChange(val ?? '')} + /> +
    +
    + ); +} diff --git a/components/DataTable/filters/FacetedFilter.tsx b/components/DataTable/filters/FacetedFilter.tsx new file mode 100644 index 000000000..6593251c4 --- /dev/null +++ b/components/DataTable/filters/FacetedFilter.tsx @@ -0,0 +1,135 @@ +'use client'; + +import { Combobox } from '@base-ui/react/combobox'; +import { Check, SearchIcon } from 'lucide-react'; +import { useMemo } from 'react'; +import { type FacetedFilterConfig } from '~/components/DataTable/filters/types'; +import Button from '~/components/ui/Button'; +import { ScrollArea } from '~/components/ui/ScrollArea'; +import { type ComboboxOption } from '~/lib/form/components/fields/Combobox/shared'; +import InputField from '~/lib/form/components/fields/InputField'; +import { dropdownItemVariants } from '~/styles/shared/controlVariants'; +import { cx } from '~/utils/cva'; + +type FacetedFilterProps = { + value: string[] | undefined; + onChange: (value: string[] | undefined) => void; + config: FacetedFilterConfig; + data: unknown[]; +}; + +export default function FacetedFilter({ + value, + onChange, + config, + data, +}: FacetedFilterProps) { + const resolvedOptions = + typeof config.options === 'function' + ? config.options(data) + : config.options; + + const comboboxOptions: ComboboxOption[] = useMemo( + () => + resolvedOptions.map((option) => ({ + value: option.value, + label: option.label, + })), + [resolvedOptions], + ); + + const selectedValues = useMemo(() => value ?? [], [value]); + + const selectedOptions = useMemo( + () => + comboboxOptions.filter((opt) => + selectedValues.includes(String(opt.value)), + ), + [comboboxOptions, selectedValues], + ); + + const handleValueChange = ( + newValue: unknown[] | null, + _event: Combobox.Root.ChangeEventDetails, + ) => { + if (newValue === null || newValue.length === 0) { + onChange(undefined); + } else { + const typedValue = newValue as ComboboxOption[]; + onChange(typedValue.map((opt) => String(opt.value))); + } + }; + + const handleSelectAll = () => { + onChange(comboboxOptions.map((opt) => String(opt.value))); + }; + + const handleDeselectAll = () => { + onChange(undefined); + }; + + return ( + +
    + { + const inputFieldProps = + rest as unknown as React.ComponentPropsWithRef; + return ( + } + className="w-full" + nativeOnChange={onChange} + /> + ); + }} + /> + + No options found. + + } + > + {(option: ComboboxOption) => ( + + + + + + {option.label} + + + )} + +
    + + +
    +
    +
    + ); +} diff --git a/components/DataTable/filters/OperatorFilter.tsx b/components/DataTable/filters/OperatorFilter.tsx new file mode 100644 index 000000000..c91b9ac18 --- /dev/null +++ b/components/DataTable/filters/OperatorFilter.tsx @@ -0,0 +1,196 @@ +'use client'; + +import { X } from 'lucide-react'; +import { useState } from 'react'; +import { + type OperatorCondition, + type OperatorFilterConfig, + type OperatorFilterValue, +} from '~/components/DataTable/filters/types'; +import Paragraph from '~/components/typography/Paragraph'; +import Button, { IconButton } from '~/components/ui/Button'; +import InputField from '~/lib/form/components/fields/InputField'; +import SelectField from '~/lib/form/components/fields/Select/Native'; + +type OperatorFilterProps = { + value: OperatorFilterValue | undefined; + onChange: (value: OperatorFilterValue | undefined) => void; + config: OperatorFilterConfig; + data: unknown[]; +}; + +const operatorLabels: Record = { + eq: 'is equal to (=)', + gt: 'is greater than (>)', + lt: 'is less than (<)', + gte: 'is at least (≥)', + lte: 'is at most (≤)', +}; + +const operatorSymbols: Record = { + eq: '=', + gt: '>', + lt: '<', + gte: '≥', + lte: '≤', +}; + +export default function OperatorFilter({ + value, + onChange, + config, + data, +}: OperatorFilterProps) { + const [selectedEntity, setSelectedEntity] = useState(''); + const [selectedOperator, setSelectedOperator] = useState< + OperatorCondition['operator'] + >(config.operators[0]!); + const [inputValue, setInputValue] = useState(''); + + const entityOptions = config.entitySelector?.getOptions(data) ?? []; + const conditions = value?.conditions ?? []; + + const handleAddCondition = () => { + if (!selectedEntity || inputValue === '') return; + + const numericValue = Number(inputValue); + if (Number.isNaN(numericValue)) return; + + const [entityKind, entityType] = selectedEntity.split('.') as [ + string, + string | undefined, + ]; + if (!entityType || (entityKind !== 'nodes' && entityKind !== 'edges')) + return; + + const entityLabel = + entityOptions.find((o) => o.value === selectedEntity)?.label ?? + entityType; + + const newCondition: OperatorCondition = { + entityType, + entityLabel, + entityKind, + operator: selectedOperator, + value: numericValue, + }; + + const newConditions = [...conditions, newCondition]; + onChange({ conditions: newConditions }); + + setInputValue(''); + }; + + const handleRemoveCondition = (index: number) => { + const newConditions = conditions.filter((_, i) => i !== index); + if (newConditions.length === 0) { + onChange(undefined); + } else { + onChange({ conditions: newConditions }); + } + }; + + const operatorOptions = config.operators.map((op) => ({ + value: op, + label: operatorLabels[op], + })); + + return ( +
    + + Show interviews where: + + + {conditions.length > 0 && ( +
    + {conditions.map((condition, index) => ( +
    + {index > 0 && ( + + and + + )} +
    + + {condition.entityLabel} {operatorSymbols[condition.operator]}{' '} + {condition.value} + + handleRemoveCondition(index)} + icon={} + className="size-5! shrink-0" + /> +
    +
    + ))} +
    + )} + +
    + { + if (typeof val === 'string' || typeof val === 'number') { + setSelectedEntity(String(val)); + } else { + setSelectedEntity(''); + } + }} + /> + + { + const op = String(val); + if (op in operatorLabels) { + setSelectedOperator(op as OperatorCondition['operator']); + } + }} + /> + +
    + setInputValue(val ?? '')} + placeholder="0" + /> + + +
    +
    + + {conditions.length === 0 && ( + + Add a condition to filter by network data + + )} +
    + ); +} diff --git a/components/DataTable/filters/RangeFilter.tsx b/components/DataTable/filters/RangeFilter.tsx new file mode 100644 index 000000000..64ebe120e --- /dev/null +++ b/components/DataTable/filters/RangeFilter.tsx @@ -0,0 +1,116 @@ +'use client'; + +import { useCallback } from 'react'; +import { + type RangeFilterConfig, + type RangeFilterValue, +} from '~/components/DataTable/filters/types'; +import Button from '~/components/ui/Button'; + +type RangeFilterProps = { + value: RangeFilterValue | undefined; + onChange: (value: RangeFilterValue | undefined) => void; + config: RangeFilterConfig; +}; + +export default function RangeFilter({ + value, + onChange, + config, +}: RangeFilterProps) { + const currentMin = value?.min ?? config.min; + const currentMax = value?.max ?? config.max; + const step = config.step ?? 1; + const formatLabel = config.formatLabel ?? String; + + const isPresetActive = useCallback( + (presetMin: number, presetMax: number) => + currentMin === presetMin && currentMax === presetMax, + [currentMin, currentMax], + ); + + const handlePresetClick = (presetMin: number, presetMax: number) => { + if (isPresetActive(presetMin, presetMax)) { + onChange(undefined); + } else { + onChange({ min: presetMin, max: presetMax }); + } + }; + + const handleMinChange = (newMin: number) => { + const clampedMin = Math.min(newMin, currentMax); + if (clampedMin === config.min && currentMax === config.max) { + onChange(undefined); + } else { + onChange({ min: clampedMin, max: currentMax }); + } + }; + + const handleMaxChange = (newMax: number) => { + const clampedMax = Math.max(newMax, currentMin); + if (currentMin === config.min && clampedMax === config.max) { + onChange(undefined); + } else { + onChange({ min: currentMin, max: clampedMax }); + } + }; + + const rangePercent = (val: number) => + ((val - config.min) / (config.max - config.min)) * 100; + + return ( +
    + {config.presets && config.presets.length > 0 && ( +
    + {config.presets.map((preset) => ( + + ))} +
    + )} + +
    +
    +
    + handleMinChange(Number(e.target.value))} + className="text-accent pointer-events-none absolute w-full appearance-none bg-transparent [&::-moz-range-thumb]:pointer-events-auto [&::-moz-range-thumb]:size-4 [&::-moz-range-thumb]:cursor-pointer [&::-moz-range-thumb]:appearance-none [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:border-2 [&::-moz-range-thumb]:border-current [&::-moz-range-thumb]:bg-current [&::-webkit-slider-thumb]:pointer-events-auto [&::-webkit-slider-thumb]:size-4 [&::-webkit-slider-thumb]:cursor-pointer [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:border-2 [&::-webkit-slider-thumb]:border-current [&::-webkit-slider-thumb]:bg-current" + /> + handleMaxChange(Number(e.target.value))} + className="text-accent pointer-events-none absolute w-full appearance-none bg-transparent [&::-moz-range-thumb]:pointer-events-auto [&::-moz-range-thumb]:size-4 [&::-moz-range-thumb]:cursor-pointer [&::-moz-range-thumb]:appearance-none [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:border-2 [&::-moz-range-thumb]:border-current [&::-moz-range-thumb]:bg-current [&::-webkit-slider-thumb]:pointer-events-auto [&::-webkit-slider-thumb]:size-4 [&::-webkit-slider-thumb]:cursor-pointer [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:border-2 [&::-webkit-slider-thumb]:border-current [&::-webkit-slider-thumb]:bg-current" + /> +
    + +
    + {formatLabel(currentMin)} + {formatLabel(currentMax)} +
    +
    + ); +} diff --git a/components/DataTable/filters/__tests__/filterFns.test.ts b/components/DataTable/filters/__tests__/filterFns.test.ts new file mode 100644 index 000000000..bf9292771 --- /dev/null +++ b/components/DataTable/filters/__tests__/filterFns.test.ts @@ -0,0 +1,259 @@ +import { type Row } from '@tanstack/react-table'; +import { describe, expect, it } from 'vitest'; +import { + booleanFilterFn, + dateFilterFn, + facetedFilterFn, + operatorFilterFn, + rangeFilterFn, +} from '~/components/DataTable/filters/filterFns'; + +function mockRow(original: T, getValue?: (id: string) => unknown): Row { + return { + original, + getValue: getValue ?? (() => undefined), + } as unknown as Row; +} + +describe('rangeFilterFn', () => { + it('returns true when value is within range', () => { + const row = mockRow({}, () => 5); + expect(rangeFilterFn(row, 'col', { min: 1, max: 10 })).toBe(true); + }); + + it('returns false when value is outside range', () => { + const row = mockRow({}, () => 15); + expect(rangeFilterFn(row, 'col', { min: 1, max: 10 })).toBe(false); + }); + + it('returns true when value is at boundary', () => { + const rowMin = mockRow({}, () => 1); + const rowMax = mockRow({}, () => 10); + expect(rangeFilterFn(rowMin, 'col', { min: 1, max: 10 })).toBe(true); + expect(rangeFilterFn(rowMax, 'col', { min: 1, max: 10 })).toBe(true); + }); +}); + +describe('dateFilterFn', () => { + it('returns true when date is within range', () => { + const row = mockRow({}, () => new Date('2025-06-15')); + expect( + dateFilterFn(row, 'col', { from: '2025-06-01', to: '2025-06-30' }), + ).toBe(true); + }); + + it('returns false when date is outside range', () => { + const row = mockRow({}, () => new Date('2025-07-15')); + expect( + dateFilterFn(row, 'col', { from: '2025-06-01', to: '2025-06-30' }), + ).toBe(false); + }); + + it('returns true when date is at boundary (end of day)', () => { + const row = mockRow({}, () => new Date('2025-06-30T18:00:00')); + expect( + dateFilterFn(row, 'col', { from: '2025-06-01', to: '2025-06-30' }), + ).toBe(true); + }); + + it('returns false when date value is null', () => { + const row = mockRow({}, () => null); + expect( + dateFilterFn(row, 'col', { from: '2025-06-01', to: '2025-06-30' }), + ).toBe(false); + }); +}); + +describe('booleanFilterFn', () => { + it('returns true when filterValue is true and value is truthy', () => { + const row = mockRow({}, () => 'some value'); + expect(booleanFilterFn(row, 'col', true)).toBe(true); + }); + + it('returns false when filterValue is true and value is null', () => { + const row = mockRow({}, () => null); + expect(booleanFilterFn(row, 'col', true)).toBe(false); + }); + + it('returns true when filterValue is false and value is null', () => { + const row = mockRow({}, () => null); + expect(booleanFilterFn(row, 'col', false)).toBe(true); + }); + + it('returns false when filterValue is false and value is truthy', () => { + const row = mockRow({}, () => 'some value'); + expect(booleanFilterFn(row, 'col', false)).toBe(false); + }); +}); + +describe('facetedFilterFn', () => { + it('returns true when value is in the filter list', () => { + const row = mockRow({}, () => 'active'); + expect(facetedFilterFn(row, 'col', ['active', 'pending'])).toBe(true); + }); + + it('returns false when value is not in the filter list', () => { + const row = mockRow({}, () => 'archived'); + expect(facetedFilterFn(row, 'col', ['active', 'pending'])).toBe(false); + }); + + it('returns false when filter list is empty', () => { + const row = mockRow({}, () => 'active'); + expect(facetedFilterFn(row, 'col', [])).toBe(false); + }); +}); + +describe('operatorFilterFn', () => { + const networkData = { + network: { + nodes: [ + { type: 'person', count: 5 }, + { type: 'place', count: 3 }, + ], + edges: [{ type: 'knows', count: 7 }], + }, + }; + + it('returns true when gte condition passes', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'person', + entityLabel: 'person (nodes)', + entityKind: 'nodes', + operator: 'gte', + value: 5, + }, + ], + }), + ).toBe(true); + }); + + it('returns false when gte condition fails', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'person', + entityLabel: 'person (nodes)', + entityKind: 'nodes', + operator: 'gte', + value: 10, + }, + ], + }), + ).toBe(false); + }); + + it('treats missing entity type as count 0', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'organization', + entityLabel: 'organization (nodes)', + entityKind: 'nodes', + operator: 'eq', + value: 0, + }, + ], + }), + ).toBe(true); + }); + + it('applies AND logic across multiple conditions', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'person', + entityLabel: 'person (nodes)', + entityKind: 'nodes', + operator: 'gte', + value: 5, + }, + { + entityType: 'place', + entityLabel: 'place (nodes)', + entityKind: 'nodes', + operator: 'gt', + value: 10, + }, + ], + }), + ).toBe(false); + }); + + it('handles eq operator', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'person', + entityLabel: 'person (nodes)', + entityKind: 'nodes', + operator: 'eq', + value: 5, + }, + ], + }), + ).toBe(true); + }); + + it('handles gt operator', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'knows', + entityLabel: 'knows (edges)', + entityKind: 'edges', + operator: 'gt', + value: 6, + }, + ], + }), + ).toBe(true); + }); + + it('handles lt operator', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'place', + entityLabel: 'place (nodes)', + entityKind: 'nodes', + operator: 'lt', + value: 5, + }, + ], + }), + ).toBe(true); + }); + + it('handles lte operator', () => { + const row = mockRow(networkData); + expect( + operatorFilterFn(row, '', { + conditions: [ + { + entityType: 'knows', + entityLabel: 'knows (edges)', + entityKind: 'edges', + operator: 'lte', + value: 7, + }, + ], + }), + ).toBe(true); + }); +}); diff --git a/components/DataTable/filters/__tests__/types.test.ts b/components/DataTable/filters/__tests__/types.test.ts new file mode 100644 index 000000000..7f6e8d90d --- /dev/null +++ b/components/DataTable/filters/__tests__/types.test.ts @@ -0,0 +1,63 @@ +import { describe, expect, it } from 'vitest'; +import { ColumnFiltersStateSchema } from '~/components/DataTable/filters/types'; + +describe('ColumnFiltersStateSchema', () => { + it('accepts an empty array', () => { + const result = ColumnFiltersStateSchema.safeParse([]); + expect(result.success).toBe(true); + }); + + it('accepts a range filter value', () => { + const result = ColumnFiltersStateSchema.safeParse([ + { id: 'networkSize', value: { min: 0, max: 10 } }, + ]); + expect(result.success).toBe(true); + }); + + it('accepts a boolean filter value', () => { + const result = ColumnFiltersStateSchema.safeParse([ + { id: 'finished', value: true }, + ]); + expect(result.success).toBe(true); + }); + + it('accepts an operator filter value', () => { + const result = ColumnFiltersStateSchema.safeParse([ + { + id: 'degree', + value: { + conditions: [ + { + entityType: 'person', + entityKind: 'nodes', + operator: 'gte', + value: 3, + }, + ], + }, + }, + ]); + expect(result.success).toBe(true); + }); + + it('accepts multiple filters', () => { + const result = ColumnFiltersStateSchema.safeParse([ + { id: 'networkSize', value: { min: 0, max: 10 } }, + { id: 'finished', value: true }, + { id: 'protocol', value: ['abc', 'def'] }, + ]); + expect(result.success).toBe(true); + }); + + it('rejects a non-array', () => { + const result = ColumnFiltersStateSchema.safeParse('not-an-array'); + expect(result.success).toBe(false); + }); + + it('rejects entries without id', () => { + const result = ColumnFiltersStateSchema.safeParse([ + { value: { min: 0, max: 10 } }, + ]); + expect(result.success).toBe(false); + }); +}); diff --git a/components/DataTable/filters/filterFns.ts b/components/DataTable/filters/filterFns.ts new file mode 100644 index 000000000..a3ec3980f --- /dev/null +++ b/components/DataTable/filters/filterFns.ts @@ -0,0 +1,82 @@ +import { type Row } from '@tanstack/react-table'; +import { + type DateFilterValue, + type OperatorFilterValue, + type RangeFilterValue, +} from '~/components/DataTable/filters/types'; + +export function rangeFilterFn( + row: Row, + columnId: string, + filterValue: RangeFilterValue, +): boolean { + const value = row.getValue(columnId); + return value >= filterValue.min && value <= filterValue.max; +} + +export function dateFilterFn( + row: Row, + columnId: string, + filterValue: DateFilterValue, +): boolean { + const date = row.getValue(columnId); + if (!date) return false; + const from = new Date(filterValue.from); + const to = new Date(filterValue.to); + to.setHours(23, 59, 59, 999); + return date >= from && date <= to; +} + +export function booleanFilterFn( + row: Row, + columnId: string, + filterValue: boolean, +): boolean { + const value = row.getValue(columnId); + return filterValue ? !!value : !value; +} + +export function facetedFilterFn( + row: Row, + columnId: string, + filterValue: string[], +): boolean { + const value = row.getValue(columnId); + return filterValue.includes(value); +} + +export function operatorFilterFn< + TData extends { + network: { + nodes: { type: string; count: number }[]; + edges: { type: string; count: number }[]; + }; + }, +>( + row: Row, + _columnId: string, + filterValue: OperatorFilterValue, +): boolean { + const network = row.original.network; + + return filterValue.conditions.every((cond) => { + const entries = network[cond.entityKind]; + const entry = entries.find((e) => e.type === cond.entityType); + const count = entry?.count ?? 0; + + switch (cond.operator) { + case 'eq': + return count === cond.value; + case 'gt': + return count > cond.value; + case 'lt': + return count < cond.value; + case 'gte': + return count >= cond.value; + case 'lte': + return count <= cond.value; + default: + return true; + } + }); +} diff --git a/components/DataTable/filters/types.ts b/components/DataTable/filters/types.ts new file mode 100644 index 000000000..b324851e0 --- /dev/null +++ b/components/DataTable/filters/types.ts @@ -0,0 +1,89 @@ +import { z } from 'zod/mini'; +import { type Option } from '~/components/DataTable/types'; + +export type RangePreset = { + label: string; + min: number; + max: number; +}; + +export type RangeFilterConfig = { + type: 'range'; + min: number; + max: number; + step?: number; + presets?: RangePreset[]; + formatLabel?: (value: number) => string; +}; + +export type DateFilterConfig = { + type: 'date'; +}; + +export type BooleanFilterConfig = { + type: 'boolean'; + trueLabel: string; + falseLabel: string; +}; + +export type FacetedFilterConfig = { + type: 'faceted'; + options: Option[] | ((data: unknown[]) => Option[]); +}; + +export type OperatorFilterConfig = { + type: 'operator'; + operators: ('eq' | 'gt' | 'lt' | 'gte' | 'lte')[]; + entitySelector?: { + label: string; + getOptions: (data: unknown[]) => Option[]; + }; +}; + +export type FilterConfig = + | RangeFilterConfig + | DateFilterConfig + | BooleanFilterConfig + | FacetedFilterConfig + | OperatorFilterConfig; + +export type RangeFilterValue = { + min: number; + max: number; +}; + +export type DateFilterValue = { + from: string; + to: string; +}; + +export type BooleanFilterValue = boolean; + +export type FacetedFilterValue = string[]; + +export type OperatorCondition = { + entityType: string; + entityLabel: string; + entityKind: 'nodes' | 'edges'; + operator: 'eq' | 'gt' | 'lt' | 'gte' | 'lte'; + value: number; +}; + +export type OperatorFilterValue = { + conditions: OperatorCondition[]; +}; + +export type FilterValue = + | RangeFilterValue + | DateFilterValue + | BooleanFilterValue + | FacetedFilterValue + | OperatorFilterValue; + +// z.unknown() is intentional — each filter component validates its own value shape +export const ColumnFiltersStateSchema = z.array( + z.object({ + id: z.string(), + value: z.unknown(), + }), +); diff --git a/components/DataTable/nuqs/NuqsClearFilters.tsx b/components/DataTable/nuqs/NuqsClearFilters.tsx new file mode 100644 index 000000000..dd366201b --- /dev/null +++ b/components/DataTable/nuqs/NuqsClearFilters.tsx @@ -0,0 +1,78 @@ +'use client'; + +import { X } from 'lucide-react'; +import { parseAsString, useQueryStates } from 'nuqs'; +import { useMemo } from 'react'; +import { Button } from '~/components/ui/Button'; +import { nuqsTableUrlKey, useNuqsTable } from './NuqsTableProvider'; + +type NuqsClearFiltersProps = { + /** + * Logical param names (unprefixed) to clear. The provider's `prefix` will + * be applied to derive the URL keys. + */ + paramKeys: readonly string[]; + label?: string; +}; + +/** + * URL-backed "clear all filters" button for server-fetched tables. + * + * Hidden when none of the tracked params are set. Uses a string parser for + * all keys because the only thing we need to know is presence — the actual + * parsing for each key lives in its dedicated filter component. + */ +export default function NuqsClearFilters({ + paramKeys, + label = 'Clear Filters', +}: NuqsClearFiltersProps) { + const { prefix, startTransition } = useNuqsTable(); + + const parsers = useMemo(() => { + const entries = paramKeys.map( + (key) => [key, parseAsString.withOptions({ clearOnDefault: true })] as const, + ); + return Object.fromEntries(entries) as Record< + (typeof paramKeys)[number], + typeof parseAsString + >; + }, [paramKeys]); + + const urlKeys = useMemo(() => { + const entries = paramKeys.map( + (key) => [key, nuqsTableUrlKey(prefix, key)] as const, + ); + return Object.fromEntries(entries) as Record< + (typeof paramKeys)[number], + string + >; + }, [paramKeys, prefix]); + + const [values, setValues] = useQueryStates(parsers, { + urlKeys, + shallow: false, + startTransition, + }); + + const hasAnyFilter = Object.values(values).some( + (v) => v !== null && v !== '', + ); + if (!hasAnyFilter) return null; + + const cleared = Object.fromEntries(paramKeys.map((k) => [k, null])) as Record< + (typeof paramKeys)[number], + null + >; + + return ( + + ); +} diff --git a/components/DataTable/nuqs/NuqsFacetedFilter.tsx b/components/DataTable/nuqs/NuqsFacetedFilter.tsx new file mode 100644 index 000000000..8491f6359 --- /dev/null +++ b/components/DataTable/nuqs/NuqsFacetedFilter.tsx @@ -0,0 +1,75 @@ +'use client'; + +import { + parseAsArrayOf, + parseAsStringLiteral, + useQueryState, +} from 'nuqs'; +import { useMemo } from 'react'; +import ComboboxField from '~/lib/form/components/fields/Combobox/Combobox'; +import { nuqsTableUrlKey, useNuqsTable } from './NuqsTableProvider'; + +type NuqsFacetedFilterProps = { + /** Logical param name (unprefixed). The provider's `prefix` will be applied. */ + paramKey: string; + /** Whitelist of values this filter accepts. Used for URL parsing + options. */ + values: readonly T[]; + /** Visible label for each option. Defaults to the value itself. */ + getLabel?: (value: T) => string; + placeholder?: string; + searchPlaceholder?: string; + emptyMessage?: string; + className?: string; +}; + +/** + * URL-backed multi-select filter for server-fetched tables. + * + * Values are parsed via `parseAsStringLiteral(values)` so unknown URL values + * are rejected. Writes go through the provider's `startTransition`, so the + * table body fades concurrently without unmounting. + */ +export default function NuqsFacetedFilter({ + paramKey, + values, + getLabel = (v) => v, + placeholder = 'Filter...', + searchPlaceholder = 'Search...', + emptyMessage = 'No options found.', + className, +}: NuqsFacetedFilterProps) { + const { prefix, startTransition } = useNuqsTable(); + const urlKey = nuqsTableUrlKey(prefix, paramKey); + + const [selected, setSelected] = useQueryState( + urlKey, + parseAsArrayOf(parseAsStringLiteral(values)).withOptions({ + shallow: false, + clearOnDefault: true, + startTransition, + }), + ); + + const options = useMemo( + () => values.map((v) => ({ value: v, label: getLabel(v) })), + [values, getLabel], + ); + + return ( + { + const next = (newValues as T[] | undefined) ?? []; + void setSelected(next.length > 0 ? next : null); + }} + showSelectAll + showDeselectAll + className={className ?? 'w-auto shrink-0'} + /> + ); +} diff --git a/components/DataTable/nuqs/NuqsSearchFilter.tsx b/components/DataTable/nuqs/NuqsSearchFilter.tsx new file mode 100644 index 000000000..d04f765d7 --- /dev/null +++ b/components/DataTable/nuqs/NuqsSearchFilter.tsx @@ -0,0 +1,92 @@ +'use client'; + +import { debounce } from 'es-toolkit'; +import { Search } from 'lucide-react'; +import { parseAsString, useQueryState } from 'nuqs'; +import { useEffect, useMemo, useRef, useState } from 'react'; +import InputField from '~/lib/form/components/fields/InputField'; +import { nuqsTableUrlKey, useNuqsTable } from './NuqsTableProvider'; + +type NuqsSearchFilterProps = { + /** Logical param name (unprefixed). The provider's `prefix` will be applied. */ + paramKey: string; + placeholder?: string; + className?: string; + /** Delay between last keystroke and URL commit. Defaults to 300 ms. */ + debounceMs?: number; +}; + +/** + * URL-backed text filter for server-fetched tables. + * + * Holds a transient local buffer for instant keystroke feedback while the + * actual URL write is debounced and routed through the provider's + * `startTransition` — so the table body fades but the input never unmounts + * or loses focus, and concurrent typing isn't dropped. + */ +export default function NuqsSearchFilter({ + paramKey, + placeholder = 'Filter...', + className, + debounceMs = 300, +}: NuqsSearchFilterProps) { + const { prefix, startTransition } = useNuqsTable(); + const urlKey = nuqsTableUrlKey(prefix, paramKey); + + const [value, setValue] = useQueryState( + urlKey, + parseAsString.withOptions({ + shallow: false, + clearOnDefault: true, + startTransition, + }), + ); + + const [local, setLocal] = useState(value ?? ''); + + // Tracks the last value this component wrote to the URL. Distinguishes + // "my own debounced commit arrived back through nuqs" from "URL changed + // externally" (clear button, browser back, navigation), so the external + // case can cancel any in-flight debounce and adopt the new value without + // being clobbered when the pending commit fires. + const lastWrittenRef = useRef(value); + + const debouncedCommit = useMemo( + () => + debounce( + (next: string | null) => { + lastWrittenRef.current = next; + void setValue(next); + }, + debounceMs, + { edges: ['trailing'] }, + ), + [setValue, debounceMs], + ); + + useEffect(() => () => debouncedCommit.cancel(), [debouncedCommit]); + + useEffect(() => { + if (value !== lastWrittenRef.current) { + debouncedCommit.cancel(); + lastWrittenRef.current = value; + setLocal(value ?? ''); + } + }, [value, debouncedCommit]); + + return ( + } + name={urlKey} + className={className} + placeholder={placeholder} + value={local} + onChange={(next) => { + const v = next ?? ''; + setLocal(v); + debouncedCommit(v.length > 0 ? v : null); + }} + /> + ); +} diff --git a/components/DataTable/nuqs/NuqsTableProvider.tsx b/components/DataTable/nuqs/NuqsTableProvider.tsx new file mode 100644 index 000000000..ef59c4389 --- /dev/null +++ b/components/DataTable/nuqs/NuqsTableProvider.tsx @@ -0,0 +1,68 @@ +'use client'; + +import { + createContext, + useContext, + useMemo, + useTransition, + type ReactNode, + type TransitionStartFunction, +} from 'react'; + +/** + * Shared context for URL-driven server-fetched tables. + * + * Provides a single `useTransition` so every filter / pagination / sort + * control that writes to the URL funnels through one concurrent render, + * letting the table body fade consistently while fresh data is fetched. + * + * Tables that want URL param namespacing (so multiple instances can coexist + * on the same page) pass a `prefix` — filter components then map their + * logical param name (`q`, `type`, `page`, …) to `${prefix}_${name}` in the + * URL, while programmatic state keeps the short name. + */ +type NuqsTableContextValue = { + prefix: string; + isPending: boolean; + startTransition: TransitionStartFunction; +}; + +const NuqsTableContext = createContext(null); + +export function useNuqsTable(): NuqsTableContextValue { + const ctx = useContext(NuqsTableContext); + if (!ctx) { + throw new Error('useNuqsTable must be used within a '); + } + return ctx; +} + +/** + * Resolve a logical param name to its URL key given a namespace prefix. + * Exported so row components that read state directly with nuqs hooks can + * stay in sync with the namespace their parent provider uses. + */ +export function nuqsTableUrlKey(prefix: string, paramKey: string): string { + return prefix ? `${prefix}_${paramKey}` : paramKey; +} + +type NuqsTableProviderProps = { + prefix?: string; + children: ReactNode; +}; + +export function NuqsTableProvider({ + prefix = '', + children, +}: NuqsTableProviderProps) { + const [isPending, startTransition] = useTransition(); + const value = useMemo( + () => ({ prefix, isPending, startTransition }), + [prefix, isPending], + ); + return ( + + {children} + + ); +} diff --git a/components/DataTable/types.ts b/components/DataTable/types.ts new file mode 100644 index 000000000..876f7cad3 --- /dev/null +++ b/components/DataTable/types.ts @@ -0,0 +1,28 @@ +import { type ColumnDef } from '@tanstack/react-table'; + +export type Option = { + label: string; + value: string; + icon?: React.ComponentType<{ className?: string }>; +}; + +/** + * A stricter `ColumnDef` that requires `sortingFn` on every sortable column. + * Columns that set `enableSorting: false` are exempt. + */ +export type StrictColumnDef = + | (ColumnDef & { enableSorting: false }) + | (ColumnDef & { + sortingFn: NonNullable['sortingFn']>; + }); + +export type DataTableSearchableColumn = { + id: keyof TData | (string & {}); + title: string; +}; + +export type DataTableFilterableColumn = { + options: Option[]; +} & DataTableSearchableColumn; + +export const pageSizes = [10, 20, 50, 100] as const; diff --git a/components/ErrorDetails.tsx b/components/ErrorDetails.tsx deleted file mode 100644 index bcbae1e9a..000000000 --- a/components/ErrorDetails.tsx +++ /dev/null @@ -1,42 +0,0 @@ -import { type ReactNode, useState } from 'react'; -import { - Collapsible, - CollapsibleContent, - CollapsibleTrigger, -} from './ui/collapsible'; -import Heading from './ui/typography/Heading'; -import { ChevronDown, ChevronUp } from 'lucide-react'; -import CopyDebugInfoButton from './CopyDebugInfoButton'; - -export const ErrorDetails = ({ - errorText, - children, -}: { - errorText: string; - children: ReactNode; -}) => { - const [showStackTrace, setShowStackTrace] = useState(false); - - return ( - - - - {showStackTrace ? 'Hide' : 'Show'} debug information - - {showStackTrace ? ( - - ) : ( - - )} - - - {children} - - - - ); -}; diff --git a/components/ErrorReportNotifier.tsx b/components/ErrorReportNotifier.tsx deleted file mode 100644 index 7b352618d..000000000 --- a/components/ErrorReportNotifier.tsx +++ /dev/null @@ -1,95 +0,0 @@ -import { CheckIcon, Loader2, XCircle } from 'lucide-react'; -import { AnimatePresence, motion } from 'motion/react'; -import { useEffect, useRef, useState } from 'react'; -import trackEvent from '~/lib/analytics'; - -const labelAnimationVariants = { - hidden: { opacity: 0, y: '-100%' }, - visible: { opacity: 1, y: 0 }, - exit: { opacity: 0, y: '100%' }, -}; - -type ReportStates = 'idle' | 'loading' | 'success' | 'error'; - -function ReportNotifier({ state = 'idle' }: { state?: ReportStates }) { - return ( -
    - - {state === 'loading' && ( - - - Sending analytics data... - - )} - {state === 'success' && ( - - - Sent analytics data! - - )} - {state === 'error' && ( - - - Error sending analytics data. - - )} - -
    - ); -} - -export default function ErrorReportNotifier({ error }: { error: Error }) { - const initialized = useRef(false); - const [state, setState] = useState('idle'); - - useEffect(() => { - if (initialized.current) return; - setState('loading'); - - trackEvent({ - type: 'Error', - name: error.name, - message: error.message, - stack: error.stack, - metadata: { - path: window?.location?.pathname ?? 'unknown', - userAgent: window?.navigator?.userAgent ?? 'unknown', - }, - }) - .then((result) => { - if (!result.success) { - setState('error'); - return; - } - - setState('success'); - }) - .catch(() => { - setState('error'); - }); - initialized.current = true; - }, [error]); - - return ; -} diff --git a/components/ExportProgressProvider.tsx b/components/ExportProgressProvider.tsx new file mode 100644 index 000000000..38c74fe35 --- /dev/null +++ b/components/ExportProgressProvider.tsx @@ -0,0 +1,235 @@ +'use client'; + +import { Loader2 } from 'lucide-react'; +import posthog from 'posthog-js'; +import { createContext, useCallback, useContext, useRef } from 'react'; +import { updateExportTime } from '~/actions/interviews'; +import { deleteZipFromStorage } from '~/actions/uploadThing'; +import ProgressBar from '~/components/ui/ProgressBar'; +import { useToast } from '~/components/ui/Toast'; +import { useDownload } from '~/hooks/useDownload'; +import type { ExportEvent } from '~/lib/export/exportEvents'; +import type { ExportOptions } from '~/lib/network-exporters/utils/types'; +import { ensureError } from '~/utils/ensureError'; +import Spinner from './Spinner'; + +type ExportContextValue = { + startExport: (interviewIds: string[], exportOptions: ExportOptions) => void; +}; + +const ExportContext = createContext(null); + +export function useExportProgress() { + const ctx = useContext(ExportContext); + if (!ctx) { + throw new Error( + 'useExportProgress must be used within ExportProgressProvider', + ); + } + return ctx; +} + +function ExportProgressDescription({ + stage, + message, + current, + total, +}: { + stage: string; + message: string; + current?: number; + total?: number; +}) { + const showProgress = + stage === 'generating' && total !== undefined && total > 0; + const percent = + showProgress && current !== undefined + ? Math.round((current / total) * 100) + : 0; + + return ( +
    +
    +
    + {showProgress && ( + + )} +
    + ); +} + +export function ExportProgressProvider({ + children, +}: { + children: React.ReactNode; +}) { + const { add, update, close } = useToast(); + const download = useDownload(); + const abortControllers = useRef(new Map()); + + const abortExport = useCallback((toastId: string) => { + const controller = abortControllers.current.get(toastId); + controller?.abort(); + abortControllers.current.delete(toastId); + }, []); + + const startExport = useCallback( + (interviewIds: string[], exportOptions: ExportOptions) => { + const controller = new AbortController(); + + const toastId = add({ + icon: