From 0c7e602e74027e15e070244566726d04e54d1bf1 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Mon, 30 Mar 2026 17:03:10 -0600 Subject: [PATCH 01/17] Add cookbook section with 50 workflow pattern recipes Migrate the "Workflow API Explorer" decision tree concept from workflow-campaign-demos into useworkflow.dev docs as a Cookbook. Infrastructure: - docs/lib/cookbook-tree.ts: decision tree data, 50 recipe metadata entries, slug-to-category mapping - docs/components/geistdocs/cookbook-explorer.tsx: interactive "I want to..." decision tree UI with breadcrumb navigation - docs/content/docs/cookbook/index.mdx: landing page rendering CookbookExplorer component - docs/content/docs/cookbook/meta.json + 8 category meta.json files for sidebar nav - docs/content/docs/meta.json: added cookbook to docs nav between foundations and how-it-works - docs/app/[lang]/docs/[[...slug]]/page.tsx: registered CookbookExplorer component 50 recipe MDX files across 8 categories (payments, approvals, resilience, notifications, webhooks, data-processing, routing, observability), each with: - Frontmatter (title, description, type: guide, summary with use-case scenario) - Simplified code snippet (core pattern only, stripped of demo UI concerns) - Full implementation code snippet (exact source from campaign demos) - Key APIs section with links to API reference docs --- docs/app/[lang]/docs/[[...slug]]/page.tsx | 2 + .../geistdocs/cookbook-explorer.tsx | 211 +++++ .../cookbook/approvals/approval-chain.mdx | 244 ++++++ .../docs/cookbook/approvals/approval-gate.mdx | 222 +++++ .../cookbook/approvals/cancellable-export.mdx | 127 +++ .../content/docs/cookbook/approvals/meta.json | 9 + .../approvals/scheduler-agent-supervisor.mdx | 338 ++++++++ .../cookbook/data-processing/aggregator.mdx | 221 +++++ .../data-processing/batch-processor.mdx | 171 ++++ .../data-processing/competing-consumers.mdx | 194 +++++ .../cookbook/data-processing/map-reduce.mdx | 201 +++++ .../docs/cookbook/data-processing/meta.json | 14 + .../cookbook/data-processing/pipeline.mdx | 157 ++++ .../data-processing/priority-queue.mdx | 169 ++++ .../cookbook/data-processing/resequencer.mdx | 205 +++++ .../data-processing/scatter-gather.mdx | 254 ++++++ .../cookbook/data-processing/splitter.mdx | 240 ++++++ docs/content/docs/cookbook/index.mdx | 7 + docs/content/docs/cookbook/meta.json | 22 + .../docs/cookbook/notifications/fan-out.mdx | 298 +++++++ .../docs/cookbook/notifications/meta.json | 11 + .../notifications/onboarding-drip.mdx | 139 ++++ .../notifications/publish-subscribe.mdx | 211 +++++ .../cookbook/notifications/recipient-list.mdx | 264 ++++++ .../notifications/scheduled-digest.mdx | 234 ++++++ .../notifications/wakeable-reminder.mdx | 183 +++++ .../observability/correlation-identifier.mdx | 274 +++++++ .../cookbook/observability/event-sourcing.mdx | 307 +++++++ .../observability/message-history.mdx | 567 +++++++++++++ .../docs/cookbook/observability/meta.json | 10 + .../observability/namespaced-streams.mdx | 125 +++ .../docs/cookbook/observability/wire-tap.mdx | 259 ++++++ .../docs/cookbook/payments/choreography.mdx | 632 +++++++++++++++ .../cookbook/payments/guaranteed-delivery.mdx | 185 +++++ .../cookbook/payments/idempotent-receiver.mdx | 191 +++++ docs/content/docs/cookbook/payments/meta.json | 11 + .../cookbook/payments/process-manager.mdx | 624 +++++++++++++++ docs/content/docs/cookbook/payments/saga.mdx | 373 +++++++++ .../payments/transactional-outbox.mdx | 163 ++++ .../docs/cookbook/resilience/bulkhead.mdx | 222 +++++ .../cookbook/resilience/circuit-breaker.mdx | 226 ++++++ .../cookbook/resilience/dead-letter-queue.mdx | 197 +++++ .../cookbook/resilience/hedge-request.mdx | 170 ++++ .../docs/cookbook/resilience/meta.json | 12 + .../cookbook/resilience/retry-backoff.mdx | 180 +++++ .../resilience/retryable-rate-limit.mdx | 157 ++++ .../docs/cookbook/resilience/throttle.mdx | 195 +++++ .../cookbook/routing/content-based-router.mdx | 283 +++++++ .../cookbook/routing/content-enricher.mdx | 360 +++++++++ docs/content/docs/cookbook/routing/detour.mdx | 201 +++++ .../docs/cookbook/routing/message-filter.mdx | 262 ++++++ .../cookbook/routing/message-translator.mdx | 253 ++++++ docs/content/docs/cookbook/routing/meta.json | 12 + .../docs/cookbook/routing/normalizer.mdx | 312 ++++++++ .../docs/cookbook/routing/routing-slip.mdx | 149 ++++ .../cookbook/webhooks/async-request-reply.mdx | 263 ++++++ .../docs/cookbook/webhooks/claim-check.mdx | 107 +++ .../docs/cookbook/webhooks/event-gateway.mdx | 146 ++++ docs/content/docs/cookbook/webhooks/meta.json | 11 + .../docs/cookbook/webhooks/request-reply.mdx | 213 +++++ .../docs/cookbook/webhooks/status-poller.mdx | 221 +++++ .../docs/cookbook/webhooks/webhook-basics.mdx | 137 ++++ docs/content/docs/meta.json | 1 + docs/lib/cookbook-tree.ts | 757 ++++++++++++++++++ 64 files changed, 13116 insertions(+) create mode 100644 docs/components/geistdocs/cookbook-explorer.tsx create mode 100644 docs/content/docs/cookbook/approvals/approval-chain.mdx create mode 100644 docs/content/docs/cookbook/approvals/approval-gate.mdx create mode 100644 docs/content/docs/cookbook/approvals/cancellable-export.mdx create mode 100644 docs/content/docs/cookbook/approvals/meta.json create mode 100644 docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx create mode 100644 docs/content/docs/cookbook/data-processing/aggregator.mdx create mode 100644 docs/content/docs/cookbook/data-processing/batch-processor.mdx create mode 100644 docs/content/docs/cookbook/data-processing/competing-consumers.mdx create mode 100644 docs/content/docs/cookbook/data-processing/map-reduce.mdx create mode 100644 docs/content/docs/cookbook/data-processing/meta.json create mode 100644 docs/content/docs/cookbook/data-processing/pipeline.mdx create mode 100644 docs/content/docs/cookbook/data-processing/priority-queue.mdx create mode 100644 docs/content/docs/cookbook/data-processing/resequencer.mdx create mode 100644 docs/content/docs/cookbook/data-processing/scatter-gather.mdx create mode 100644 docs/content/docs/cookbook/data-processing/splitter.mdx create mode 100644 docs/content/docs/cookbook/index.mdx create mode 100644 docs/content/docs/cookbook/meta.json create mode 100644 docs/content/docs/cookbook/notifications/fan-out.mdx create mode 100644 docs/content/docs/cookbook/notifications/meta.json create mode 100644 docs/content/docs/cookbook/notifications/onboarding-drip.mdx create mode 100644 docs/content/docs/cookbook/notifications/publish-subscribe.mdx create mode 100644 docs/content/docs/cookbook/notifications/recipient-list.mdx create mode 100644 docs/content/docs/cookbook/notifications/scheduled-digest.mdx create mode 100644 docs/content/docs/cookbook/notifications/wakeable-reminder.mdx create mode 100644 docs/content/docs/cookbook/observability/correlation-identifier.mdx create mode 100644 docs/content/docs/cookbook/observability/event-sourcing.mdx create mode 100644 docs/content/docs/cookbook/observability/message-history.mdx create mode 100644 docs/content/docs/cookbook/observability/meta.json create mode 100644 docs/content/docs/cookbook/observability/namespaced-streams.mdx create mode 100644 docs/content/docs/cookbook/observability/wire-tap.mdx create mode 100644 docs/content/docs/cookbook/payments/choreography.mdx create mode 100644 docs/content/docs/cookbook/payments/guaranteed-delivery.mdx create mode 100644 docs/content/docs/cookbook/payments/idempotent-receiver.mdx create mode 100644 docs/content/docs/cookbook/payments/meta.json create mode 100644 docs/content/docs/cookbook/payments/process-manager.mdx create mode 100644 docs/content/docs/cookbook/payments/saga.mdx create mode 100644 docs/content/docs/cookbook/payments/transactional-outbox.mdx create mode 100644 docs/content/docs/cookbook/resilience/bulkhead.mdx create mode 100644 docs/content/docs/cookbook/resilience/circuit-breaker.mdx create mode 100644 docs/content/docs/cookbook/resilience/dead-letter-queue.mdx create mode 100644 docs/content/docs/cookbook/resilience/hedge-request.mdx create mode 100644 docs/content/docs/cookbook/resilience/meta.json create mode 100644 docs/content/docs/cookbook/resilience/retry-backoff.mdx create mode 100644 docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx create mode 100644 docs/content/docs/cookbook/resilience/throttle.mdx create mode 100644 docs/content/docs/cookbook/routing/content-based-router.mdx create mode 100644 docs/content/docs/cookbook/routing/content-enricher.mdx create mode 100644 docs/content/docs/cookbook/routing/detour.mdx create mode 100644 docs/content/docs/cookbook/routing/message-filter.mdx create mode 100644 docs/content/docs/cookbook/routing/message-translator.mdx create mode 100644 docs/content/docs/cookbook/routing/meta.json create mode 100644 docs/content/docs/cookbook/routing/normalizer.mdx create mode 100644 docs/content/docs/cookbook/routing/routing-slip.mdx create mode 100644 docs/content/docs/cookbook/webhooks/async-request-reply.mdx create mode 100644 docs/content/docs/cookbook/webhooks/claim-check.mdx create mode 100644 docs/content/docs/cookbook/webhooks/event-gateway.mdx create mode 100644 docs/content/docs/cookbook/webhooks/meta.json create mode 100644 docs/content/docs/cookbook/webhooks/request-reply.mdx create mode 100644 docs/content/docs/cookbook/webhooks/status-poller.mdx create mode 100644 docs/content/docs/cookbook/webhooks/webhook-basics.mdx create mode 100644 docs/lib/cookbook-tree.ts diff --git a/docs/app/[lang]/docs/[[...slug]]/page.tsx b/docs/app/[lang]/docs/[[...slug]]/page.tsx index e1bca864c3..784d121949 100644 --- a/docs/app/[lang]/docs/[[...slug]]/page.tsx +++ b/docs/app/[lang]/docs/[[...slug]]/page.tsx @@ -4,6 +4,7 @@ import { createRelativeLink } from 'fumadocs-ui/mdx'; import type { Metadata } from 'next'; import { notFound } from 'next/navigation'; import { AgentTraces } from '@/components/custom/agent-traces'; +import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; import { FluidComputeCallout } from '@/components/custom/fluid-compute-callout'; import { AskAI } from '@/components/geistdocs/ask-ai'; import { CopyPage } from '@/components/geistdocs/copy-page'; @@ -76,6 +77,7 @@ const Page = async ({ params }: PageProps<'/[lang]/docs/[[...slug]]'>) => { ...AccordionComponents, Tabs, Tab, + CookbookExplorer: () => , // No-op for world MDX files (they redirect to /worlds/[id]) WorldTestingPerformance: WorldTestingPerformanceNoop, })} diff --git a/docs/components/geistdocs/cookbook-explorer.tsx b/docs/components/geistdocs/cookbook-explorer.tsx new file mode 100644 index 0000000000..e039afa192 --- /dev/null +++ b/docs/components/geistdocs/cookbook-explorer.tsx @@ -0,0 +1,211 @@ +'use client'; + +import { useCallback, useMemo, useState } from 'react'; +import Link from 'next/link'; +import { + tree, + recipes, + slugToCategory, + type Branch, + type TreeNode, +} from '@/lib/cookbook-tree'; + +type PathEntry = { nodeId: string; branchIndex: number }; + +export function CookbookExplorer({ lang }: { lang: string }) { + const [path, setPath] = useState([]); + + const { currentNode, resultSlugs } = useMemo(() => { + let node: TreeNode | undefined = tree; + let slugs: string[] | undefined; + + for (const entry of path) { + if (!node) break; + const branch: Branch = node.branches[entry.branchIndex]; + if (branch.slugs) { + slugs = branch.slugs; + node = undefined; + } else if (branch.next) { + node = branch.next; + } + } + + return { currentNode: node, resultSlugs: slugs }; + }, [path]); + + const chooseBranch = useCallback( + (branchIndex: number) => { + if (!currentNode) return; + setPath((prev) => [...prev, { nodeId: currentNode.id, branchIndex }]); + }, + [currentNode] + ); + + const goToStep = useCallback((stepIndex: number) => { + setPath((prev) => prev.slice(0, stepIndex)); + }, []); + + const restart = useCallback(() => setPath([]), []); + + const breadcrumbs = useMemo(() => { + const crumbs: { label: string; icon: string }[] = []; + let node: TreeNode | undefined = tree; + for (const entry of path) { + if (!node) break; + const branch: Branch = node.branches[entry.branchIndex]; + crumbs.push({ label: branch.label, icon: branch.icon }); + node = branch.next; + } + return crumbs; + }, [path]); + + const resultRecipes = useMemo(() => { + if (!resultSlugs) return []; + return resultSlugs.map((s) => recipes[s]).filter((r) => r != null); + }, [resultSlugs]); + + const recipeCount = Object.keys(recipes).length; + + return ( +
+ {/* Breadcrumb path */} + {breadcrumbs.length > 0 && ( +
+ + {breadcrumbs.map((crumb, i) => ( +
+ + +
+ ))} +
+ )} + + {/* Current question with branches */} + {currentNode && !resultSlugs && ( +
+

+ {currentNode.question} +

+ {currentNode.id === 'root' && ( +

+ Answer a few questions to find the right pattern from{' '} + {recipeCount} recipes. Each result includes a code example you can + copy. +

+ )} +
+ {currentNode.branches.map((branch, i) => ( + + ))} +
+
+ )} + + {/* Results */} + {resultSlugs && ( +
+

+ Here's what fits +

+

+ {resultRecipes.length} recipe + {resultRecipes.length !== 1 ? 's' : ''} match your path. +

+
+ {resultRecipes.map((recipe) => { + const category = slugToCategory[recipe.slug]; + return ( + +

+ {recipe.whenToUse} +

+
+
+

+ {recipe.title} +

+

+ {recipe.description} +

+
+ + + +
+ + ); + })} +
+ +
+ +
+
+ )} +
+ ); +} diff --git a/docs/content/docs/cookbook/approvals/approval-chain.mdx b/docs/content/docs/cookbook/approvals/approval-chain.mdx new file mode 100644 index 0000000000..1daf9013a8 --- /dev/null +++ b/docs/content/docs/cookbook/approvals/approval-chain.mdx @@ -0,0 +1,244 @@ +--- +title: Approval Chain +description: Route work through a sequence of approvers; advance only when each step signs off. +type: guide +summary: Purchase orders needing manager, director, VP sign-off with per-level escalation timeouts. +--- + +Use the approval chain pattern when a request must pass through multiple approval levels in sequence. Each level has its own timeout, and the chain advances only when an approver signs off. If an approver rejects, the chain stops immediately. + +## Pattern + +The workflow determines the required approval levels based on the request (e.g., amount thresholds), then iterates through each level. At each level, a hook is created for the approver and raced against a timeout. If the timeout fires, the chain escalates to the next level. If approved, the chain completes. If rejected, the workflow stops and returns the rejection. + +### Simplified + +```typescript lineNumbers +import { defineHook, sleep } from "workflow"; + +type ApprovalRole = "manager" | "director" | "vp"; + +declare function notifyApprover(expenseId: string, role: ApprovalRole): Promise; // @setup + +export async function approvalChain(expenseId: string, amount: number) { + "use workflow"; + + const levels: { role: ApprovalRole; timeout: string }[] = + amount < 500 ? [{ role: "manager", timeout: "10s" }] : + amount < 5000 ? [{ role: "manager", timeout: "10s" }, { role: "director", timeout: "8s" }] : + [{ role: "manager", timeout: "10s" }, { role: "director", timeout: "8s" }, { role: "vp", timeout: "6s" }]; + + for (const level of levels) { + await notifyApprover(expenseId, level.role); + + const levelHook = defineHook<{ approved: boolean; comment?: string }>(); + const hook = levelHook.create({ + token: `approval:${expenseId}:${level.role}`, + }); + + const result = await Promise.race([ + hook.then((p) => ({ type: "decision" as const, payload: p })), + sleep(level.timeout).then(() => ({ type: "timeout" as const })), + ]); + + if (result.type === "timeout") continue; // Escalate to next level + + if (!result.payload.approved) { + return { expenseId, status: "rejected", decidedBy: level.role }; + } + + return { expenseId, status: "approved", decidedBy: level.role }; + } + + return { expenseId, status: "expired" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable, sleep } from "workflow"; + +export type ApprovalRole = "manager" | "director" | "vp"; + +export type ApprovalSignal = { + approved: boolean; + comment?: string; + decidedBy?: string; +}; + +export type ChainEvent = + | { type: "submitted"; expenseId: string; amount: number; levels: ApprovalRole[] } + | { type: "level_waiting"; role: ApprovalRole; token: string; timeout: string } + | { type: "level_approved"; role: ApprovalRole; comment?: string } + | { type: "level_rejected"; role: ApprovalRole; comment?: string } + | { type: "level_timeout"; role: ApprovalRole } + | { type: "approved"; decidedBy: ApprovalRole; comment?: string } + | { type: "rejected"; decidedBy: ApprovalRole; comment?: string } + | { type: "expired" } + | { type: "done"; status: "approved" | "rejected" | "expired" }; + +type ApprovalLevel = { + role: ApprovalRole; + timeout: "10s" | "8s" | "6s"; +}; + +const LEVEL_CHAIN: readonly ApprovalLevel[] = [ + { role: "manager", timeout: "10s" }, + { role: "director", timeout: "8s" }, + { role: "vp", timeout: "6s" }, +] as const; + +export function getApprovalLevelsForAmount(amount: number): ApprovalLevel[] { + if (amount < 500) return [LEVEL_CHAIN[0]]; + if (amount < 5000) return [LEVEL_CHAIN[0], LEVEL_CHAIN[1]]; + return [...LEVEL_CHAIN]; +} + +export async function approvalChain(expenseId: string, amount: number) { + "use workflow"; + + const levels = getApprovalLevelsForAmount(amount); + + await submitExpense(expenseId, amount, levels.map((level) => level.role)); + + for (const level of levels) { + await notifyLevel(expenseId, level.role, level.timeout); + + const levelHook = defineHook(); + const hook = levelHook.create({ + token: `approval:${expenseId}:${level.role}`, + }); + + const result = await Promise.race([ + hook.then((payload) => ({ type: "decision" as const, payload })), + sleep(level.timeout).then(() => ({ type: "timeout" as const })), + ]); + + if (result.type === "timeout") { + await recordTimeout(expenseId, level.role); + continue; + } + + if (!result.payload.approved) { + await rejectExpense(expenseId, level.role, result.payload.comment); + await emitDone("rejected"); + return { + expenseId, + amount, + status: "rejected" as const, + decidedBy: level.role, + comment: result.payload.comment, + }; + } + + await approveExpense(expenseId, level.role, result.payload.comment); + await emitDone("approved"); + return { + expenseId, + amount, + status: "approved" as const, + decidedBy: level.role, + comment: result.payload.comment, + }; + } + + await expireExpense(expenseId); + await emitDone("expired"); + return { + expenseId, + amount, + status: "timed_out" as const, + }; +} + +async function submitExpense(expenseId: string, amount: number, levels: ApprovalRole[]) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "submitted", expenseId, amount, levels }); + } finally { + writer.releaseLock(); + } +} + +async function notifyLevel(expenseId: string, role: ApprovalRole, timeout: string) { + "use step"; + + const writer = getWritable().getWriter(); + try { + const token = `approval:${expenseId}:${role}`; + await writer.write({ type: "level_waiting", role, token, timeout }); + } finally { + writer.releaseLock(); + } +} + +async function recordTimeout(expenseId: string, role: ApprovalRole) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "level_timeout", role }); + } finally { + writer.releaseLock(); + } + console.info("[approval-chain] level_timeout", { expenseId, role }); +} + +async function approveExpense(expenseId: string, role: ApprovalRole, comment?: string) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "level_approved", role, comment }); + await writer.write({ type: "approved", decidedBy: role, comment }); + } finally { + writer.releaseLock(); + } +} + +async function rejectExpense(expenseId: string, role: ApprovalRole, comment?: string) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "level_rejected", role, comment }); + await writer.write({ type: "rejected", decidedBy: role, comment }); + } finally { + writer.releaseLock(); + } +} + +async function emitDone(status: "approved" | "rejected" | "expired") { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", status }); + } finally { + writer.releaseLock(); + } +} + +async function expireExpense(expenseId: string) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "expired" }); + } finally { + writer.releaseLock(); + } + console.info("[approval-chain] expense_timed_out", { expenseId }); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates typed hooks for each approval level +- [`sleep()`](/docs/api-reference/workflow/sleep) — per-level escalation timeout +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams chain progress to the client diff --git a/docs/content/docs/cookbook/approvals/approval-gate.mdx b/docs/content/docs/cookbook/approvals/approval-gate.mdx new file mode 100644 index 0000000000..e323b4b0ef --- /dev/null +++ b/docs/content/docs/cookbook/approvals/approval-gate.mdx @@ -0,0 +1,222 @@ +--- +title: Approval Gate +description: Pause the workflow until a human approves or rejects, then resume or fail. +type: guide +summary: Content moderation hold — pause publishing until a reviewer clicks approve or reject. +--- + +Use the approval gate pattern when a workflow must pause and wait for human input before continuing. The workflow creates a hook that external systems (email links, Slack buttons, admin UIs) can call to resume execution. + +## Pattern + +The workflow creates a deterministic hook token and races it against a `sleep()` timeout. If the hook fires before the timeout, the workflow inspects the approval payload and either fulfills or cancels. If the timeout wins, the workflow auto-cancels. This pattern survives server restarts because both the hook and the sleep are durably persisted. + +### Simplified + +```typescript lineNumbers +import { defineHook, sleep } from "workflow"; + +export const approvalHook = defineHook<{ approved: boolean; comment?: string }>(); + +declare function requestApproval(orderId: string): Promise; // @setup +declare function fulfillOrder(orderId: string): Promise; // @setup +declare function cancelOrder(orderId: string, reason: string): Promise; // @setup + +export async function approvalGate(orderId: string) { + "use workflow"; + + await requestApproval(orderId); + + const hook = approvalHook.create({ token: `order_approval:${orderId}` }); + + // Race: human approval vs. timeout + const result = await Promise.race([ + hook.then((payload) => ({ type: "approval" as const, payload })), + sleep("24h").then(() => ({ type: "timeout" as const })), + ]); + + if (result.type === "timeout") { + await cancelOrder(orderId, "Approval timed out"); + return { orderId, status: "timeout" }; + } + + if (result.payload.approved) { + await fulfillOrder(orderId); + return { orderId, status: "approved" }; + } else { + await cancelOrder(orderId, result.payload.comment || "Rejected"); + return { orderId, status: "rejected" }; + } +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable, sleep } from "workflow"; +import type { StringValue } from "ms"; + +// Define the approval payload type +export interface ApprovalPayload { + approved: boolean; + comment?: string; + approvedBy?: string; +} + +// Define the hook for type-safe approval handling +export const orderApprovalHook = defineHook(); + +// Result type for the workflow +export interface ApprovalResult { + orderId: string; + status: "approved" | "rejected" | "timeout"; + comment?: string; + approvedBy?: string; +} + +// Typed events streamed to the UI via getWritable() +export type ApprovalEvent = + | { type: "request_sent"; orderId: string } + | { type: "waiting"; orderId: string; token: string; timeoutMs: number } + | { type: "approved"; orderId: string; approvedBy?: string; comment?: string } + | { type: "rejected"; orderId: string; approvedBy?: string; comment?: string } + | { type: "timeout"; orderId: string } + | { type: "fulfilling"; orderId: string } + | { type: "fulfilled"; orderId: string } + | { type: "cancelling"; orderId: string; reason: string } + | { type: "cancelled"; orderId: string; reason: string } + | { type: "done"; orderId: string; status: "approved" | "rejected" | "timeout" }; + +const TIMEOUT_MS: Record = { + "10s": 10_000, + "30s": 30_000, + "1m": 60_000, + "5m": 300_000, + "24h": 86_400_000, +}; + +/** + * Approval Gate Workflow + * + * Demonstrates the "Signal + timer" pattern: + * - Creates a deterministic hook token for external systems to resume + * - Uses Promise.race to implement timeout behavior + * - Waits for human approval or times out after specified duration + */ +export async function approvalGate( + orderId: string, + timeout: StringValue = "24h" +): Promise { + "use workflow"; + + // Request approval (e.g., send email, create ticket, notify Slack) + await requestApproval(orderId); + await emit({ type: "request_sent", orderId }); + + // Create hook with deterministic token based on orderId + const hook = orderApprovalHook.create({ + token: `order_approval:${orderId}`, + }); + + const timeoutMs = TIMEOUT_MS[timeout] ?? 30_000; + await emit({ + type: "waiting", + orderId, + token: hook.token, + timeoutMs, + }); + + // Race between approval hook and timeout + const result = await Promise.race([ + hook.then((payload) => ({ + type: "approval" as const, + payload, + })), + sleep(timeout).then(() => ({ + type: "timeout" as const, + payload: null, + })), + ]); + + if (result.type === "timeout") { + await emit({ type: "timeout", orderId }); + await emit({ type: "cancelling", orderId, reason: "Approval timed out" }); + await cancelOrder(orderId, "Approval timed out"); + await emit({ type: "cancelled", orderId, reason: "Approval timed out" }); + await emit({ type: "done", orderId, status: "timeout" }); + return { orderId, status: "timeout" }; + } + + const { approved, comment, approvedBy } = result.payload!; + + if (approved) { + await emit({ type: "approved", orderId, approvedBy, comment }); + await emit({ type: "fulfilling", orderId }); + await fulfillOrder(orderId); + await emit({ type: "fulfilled", orderId }); + await emit({ type: "done", orderId, status: "approved" }); + return { orderId, status: "approved", comment, approvedBy }; + } else { + await emit({ type: "rejected", orderId, approvedBy, comment }); + await emit({ type: "cancelling", orderId, reason: comment || "Rejected by approver" }); + await cancelOrder(orderId, comment || "Rejected by approver"); + await emit({ type: "cancelled", orderId, reason: comment || "Rejected by approver" }); + await emit({ type: "done", orderId, status: "rejected" }); + return { orderId, status: "rejected", comment, approvedBy }; + } +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +/** + * Step: Request approval from external system + */ +async function requestApproval(orderId: string): Promise { + "use step"; + // Simulate sending approval request (email, ticket, Slack notification) + await delay(500); + console.log(`[Order ${orderId}] Approval request sent`); +} + +/** + * Step: Fulfill the order after approval + */ +async function fulfillOrder(orderId: string): Promise { + "use step"; + await delay(600); + console.log(`[Order ${orderId}] Order fulfilled successfully`); +} + +/** + * Step: Cancel the order (on rejection or timeout) + */ +async function cancelOrder(orderId: string, reason: string): Promise { + "use step"; + await delay(500); + console.log(`[Order ${orderId}] Order cancelled: ${reason}`); +} + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a typed hook for external signals +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timeout that survives restarts +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams approval events to the client diff --git a/docs/content/docs/cookbook/approvals/cancellable-export.mdx b/docs/content/docs/cookbook/approvals/cancellable-export.mdx new file mode 100644 index 0000000000..8c6d5f1a0b --- /dev/null +++ b/docs/content/docs/cookbook/approvals/cancellable-export.mdx @@ -0,0 +1,127 @@ +--- +title: Cancellable Export +description: Long-running job that the user can cancel while steps are in flight. +type: guide +summary: User starts a 100k-row data export and hits Cancel mid-flight without waiting for completion. +--- + +Use this pattern when a long-running job should be cancellable by the user at any point. Each step in the loop acts as a cancellation checkpoint -- when `run.cancel()` is called, the workflow stops between steps. + +## Pattern + +The workflow iterates through sections of work, with each iteration awaiting a step. Between steps, the runtime checks for cancellation. If `run.cancel()` was called, the workflow terminates gracefully without executing further steps. No special cancellation logic is needed in the workflow code itself. + +### Simplified + +```typescript lineNumbers +declare function generateSection(index: number, title: string): Promise<{ title: string; status: string }>; // @setup + +const SECTIONS = [ + "Introduction", "Market Analysis", "Technical Architecture", + "Implementation Plan", "Risk Assessment", "Financial Projections", + "Timeline", "Team Structure", "Competitive Landscape", "Conclusion", +]; + +export async function generateReport(accountId: string) { + "use workflow"; + + // Each await is a cancellation checkpoint — + // run.cancel() takes effect between steps. + const sections = []; + + for (let i = 0; i < SECTIONS.length; i++) { + const result = await generateSection(i, SECTIONS[i]); + sections.push(result); + } + + return { accountId, sections, status: "completed" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable } from "workflow"; + +export type SectionEvent = + | { type: "section_start"; index: number; title: string } + | { type: "section_done"; index: number; title: string } + | { type: "complete"; total: number } + | { type: "cancelled"; completedCount: number; total: number }; + +const SECTION_DELAY_MS = 500; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +const SECTION_TITLES = [ + "Introduction", + "Market Analysis", + "Technical Architecture", + "Implementation Plan", + "Risk Assessment", + "Financial Projections", + "Timeline", + "Team Structure", + "Competitive Landscape", + "Conclusion", +]; + +export async function generateReport( + accountId: string, + systemPrompt: string +) { + "use workflow"; + + // Each await is a cancellation checkpoint — + // run.cancel() takes effect between steps. + const sections: { title: string; status: string }[] = []; + + for (let i = 0; i < SECTION_TITLES.length; i++) { + const title = SECTION_TITLES[i]; + const result = await generateSection(i, title, sections[i - 1]?.title); + sections.push(result); + } + + await emitDone(sections.length); + + return { accountId, sections, status: "completed" }; +} + +async function emitDone(total: number) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "complete", total }); + } finally { + writer.releaseLock(); + } +} + +emitDone.maxRetries = 0; + +async function generateSection( + index: number, + title: string, + previousTitle?: string +) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "section_start", index, title }); + await delay(SECTION_DELAY_MS); + await writer.write({ type: "section_done", index, title }); + return { title, previous: previousTitle, status: "generated" }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each step is a cancellation checkpoint +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams section progress to the client diff --git a/docs/content/docs/cookbook/approvals/meta.json b/docs/content/docs/cookbook/approvals/meta.json new file mode 100644 index 0000000000..1c87530990 --- /dev/null +++ b/docs/content/docs/cookbook/approvals/meta.json @@ -0,0 +1,9 @@ +{ + "title": "Approvals", + "pages": [ + "approval-gate", + "cancellable-export", + "approval-chain", + "scheduler-agent-supervisor" + ] +} diff --git a/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx b/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx new file mode 100644 index 0000000000..6b49c38ebe --- /dev/null +++ b/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx @@ -0,0 +1,338 @@ +--- +title: Scheduler-Agent-Supervisor +description: Scheduled triggers plus supervised agent/worker style execution. +type: guide +summary: Dispatch content generation to agents in sequence, checking quality thresholds with escalation. +--- + +Use the scheduler-agent-supervisor pattern when work should be dispatched to agents in a priority order, with a supervisor checking quality after each attempt. If an agent's output does not meet the threshold, the supervisor escalates to the next agent. + +## Pattern + +The workflow iterates through a list of agents (fast, thorough, premium). For each agent, it dispatches work and runs a quality check. If the quality passes, the content is published. If it fails, the workflow sleeps (cooldown) and escalates to the next agent. If all agents fail, the workflow reports failure. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +type AgentId = "fast-model" | "thorough-model" | "premium-model"; + +declare function dispatchToAgent(agentId: AgentId, topic: string): Promise<{ draft: string; score: number }>; // @setup +declare function publishContent(agentId: AgentId, draft: string): Promise<{ publicationId: string }>; // @setup + +const AGENTS: AgentId[] = ["fast-model", "thorough-model", "premium-model"]; + +export async function schedulerAgentSupervisor( + topic: string, + requiredScore: number = 80 +) { + "use workflow"; + + for (const agentId of AGENTS) { + const { draft, score } = await dispatchToAgent(agentId, topic); + + if (score >= requiredScore) { + const { publicationId } = await publishContent(agentId, draft); + return { status: "published", publishedBy: agentId, publicationId }; + } + + // Cooldown before escalating to next agent + await sleep("2s"); + } + + return { status: "failed" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getWritable, sleep } from "workflow"; + +const SUPERVISOR_AGENTS = [ + { id: "fast-model", label: "Fast Model" }, + { id: "thorough-model", label: "Thorough Model" }, + { id: "premium-model", label: "Premium Model" }, +] as const; + +export type SupervisorAgentId = (typeof SUPERVISOR_AGENTS)[number]["id"]; +export type QualityThreshold = "low" | "medium" | "high"; + +const QUALITY_THRESHOLD_SCORE: Record = { + low: 65, + medium: 80, + high: 92, +}; + +const AGENT_QUALITY_SCORE: Record = { + "fast-model": 68, + "thorough-model": 82, + "premium-model": 89, +}; + +// Demo: simulate real-world latency so the UI can show progress. +const AGENT_GENERATION_MS: Record = { + "fast-model": 1400, + "thorough-model": 2100, + "premium-model": 2800, +}; + +const QUALITY_CHECK_MS = 600; +const PUBLISH_MS = 700; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export type SupervisorEvent = + | { type: "agent_dispatched"; agentId: SupervisorAgentId; agentIndex: number; label: string } + | { type: "agent_generating"; agentId: SupervisorAgentId; progressPct: number } + | { type: "agent_generated"; agentId: SupervisorAgentId } + | { type: "quality_check"; agentId: SupervisorAgentId } + | { type: "quality_result"; agentId: SupervisorAgentId; score: number; requiredScore: number; passed: boolean } + | { type: "cooldown"; fromAgentId: SupervisorAgentId; toAgentId: SupervisorAgentId; reason: string } + | { type: "publishing"; agentId: SupervisorAgentId } + | { type: "done"; publishedBy: SupervisorAgentId; publicationId: string; qualityScore: number } + | { type: "failed"; reason: string }; + +type DispatchResult = { + agentId: SupervisorAgentId; + topic: string; + draft: string; + estimatedScore: number; +}; + +type QualityGateResult = { + score: number; + requiredScore: number; + passed: boolean; + reason: string; +}; + +type PublishResult = { + publicationId: string; +}; + +type RerouteRecord = { + from: SupervisorAgentId; + to: SupervisorAgentId; + reason: string; + cooldown: "2s"; +}; + +export type SchedulerAgentSupervisorResult = + | { + status: "published"; + topic: string; + threshold: QualityThreshold; + requiredScore: number; + qualityScore: number; + publishedBy: SupervisorAgentId; + publicationId: string; + reroutes: RerouteRecord[]; + } + | { + status: "failed"; + topic: string; + threshold: QualityThreshold; + requiredScore: number; + reroutes: RerouteRecord[]; + }; + +export async function schedulerAgentSupervisor( + topic: string, + threshold: QualityThreshold = "medium" +): Promise { + "use workflow"; + + const normalizedTopic = topic.trim(); + const requiredScore = QUALITY_THRESHOLD_SCORE[threshold]; + const reroutes: RerouteRecord[] = []; + + for (let agentIndex = 0; agentIndex < SUPERVISOR_AGENTS.length; agentIndex += 1) { + const agent = SUPERVISOR_AGENTS[agentIndex]; + + const draft = await dispatchToAgent(agent.id, normalizedTopic, agentIndex); + const quality = await checkQuality(draft, requiredScore); + + if (quality.passed) { + const published = await publishContent(draft, quality); + + return { + status: "published", + topic: normalizedTopic, + threshold, + requiredScore, + qualityScore: quality.score, + publishedBy: agent.id, + publicationId: published.publicationId, + reroutes, + }; + } + + const nextAgent = SUPERVISOR_AGENTS[agentIndex + 1]; + if (!nextAgent) { + break; + } + + const rerouteReason = `${agent.id} score ${quality.score} below ${quality.requiredScore}`; + reroutes.push({ + from: agent.id, + to: nextAgent.id, + reason: rerouteReason, + cooldown: "2s", + }); + + await emitCooldown(agent.id, nextAgent.id, rerouteReason); + await sleep("2s"); + } + + await emitFailed("all_agents_failed_quality"); + + return { + status: "failed", + topic: normalizedTopic, + threshold, + requiredScore, + reroutes, + }; +} + +async function dispatchToAgent( + agentId: SupervisorAgentId, + topic: string, + agentIndex: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const estimatedScore = AGENT_QUALITY_SCORE[agentId]; + const agentDef = SUPERVISOR_AGENTS.find((a) => a.id === agentId)!; + + try { + await writer.write({ + type: "agent_dispatched", + agentId, + agentIndex, + label: agentDef.label, + }); + + // Demo: simulate generation with progress ticks + const genMs = AGENT_GENERATION_MS[agentId]; + const tickCount = 5; + const tickMs = genMs / tickCount; + for (let tick = 1; tick <= tickCount; tick++) { + await delay(tickMs); + await writer.write({ + type: "agent_generating", + agentId, + progressPct: Math.round((tick / tickCount) * 100), + }); + } + + await writer.write({ type: "agent_generated", agentId }); + + const draft = `Draft ${agentIndex + 1} for topic "${topic}" by ${agentId}`; + return { agentId, topic, draft, estimatedScore }; + } finally { + writer.releaseLock(); + } +} + +async function checkQuality( + draft: DispatchResult, + requiredScore: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "quality_check", agentId: draft.agentId }); + await delay(QUALITY_CHECK_MS); + + const score = draft.estimatedScore; + const passed = score >= requiredScore; + const reason = passed + ? `Score ${score} passed threshold ${requiredScore}` + : `Score ${score} below threshold ${requiredScore}`; + + await writer.write({ + type: "quality_result", + agentId: draft.agentId, + score, + requiredScore, + passed, + }); + + return { score, requiredScore, passed, reason }; + } finally { + writer.releaseLock(); + } +} + +async function publishContent( + draft: DispatchResult, + quality: QualityGateResult +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "publishing", agentId: draft.agentId }); + await delay(PUBLISH_MS); + + const publicationId = `pub_${draft.agentId}_${Date.now().toString(36)}`; + + await writer.write({ + type: "done", + publishedBy: draft.agentId, + publicationId, + qualityScore: quality.score, + }); + + return { publicationId }; + } finally { + writer.releaseLock(); + } +} + +async function emitCooldown( + fromAgentId: SupervisorAgentId, + toAgentId: SupervisorAgentId, + reason: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "cooldown", fromAgentId, toAgentId, reason }); + } finally { + writer.releaseLock(); + } +} + +emitCooldown.maxRetries = 0; + +async function emitFailed(reason: string): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "failed", reason }); + } finally { + writer.releaseLock(); + } +} + +emitFailed.maxRetries = 0; +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable cooldown between agent escalations +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams agent progress to the client diff --git a/docs/content/docs/cookbook/data-processing/aggregator.mdx b/docs/content/docs/cookbook/data-processing/aggregator.mdx new file mode 100644 index 0000000000..2caeeae6c6 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/aggregator.mdx @@ -0,0 +1,221 @@ +--- +title: Aggregator +description: Merge many parallel outcomes into one combined result. +type: guide +summary: Collect inventory from multiple warehouses with a timeout so stragglers don't block checkout. +--- + +Collect inventory from multiple warehouses with a timeout so stragglers don't block checkout. Use this pattern when you need to gather signals from multiple sources and combine them, but cannot wait forever for all of them. + +## Pattern + +Create one hook per expected source, then race `Promise.all` (all sources responded) against `sleep` (deadline). Whether all signals arrive or the timeout fires first, aggregate whatever data you have into a single result. + +### Simplified + +```typescript lineNumbers +import { defineHook, sleep } from "workflow"; + +export const aggregatorSignal = defineHook<{ source: string; value: number }>(); + +declare function processBatch(batchId: string, received: Map): Promise<{ totalValue: number }>; // @setup + +const SOURCES = ["warehouse-a", "warehouse-b", "warehouse-c"] as const; + +export async function aggregator(batchId: string, timeoutMs: number = 8000) { + "use workflow"; + + const hooks = SOURCES.map((source) => + aggregatorSignal.create({ token: `${source}:${batchId}` }).then((payload) => ({ source, payload })) + ); + + const outcome = await Promise.race([ + Promise.all(hooks).then((results) => ({ type: "ready" as const, results })), + sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const, results: [] as { source: string; payload: { source: string; value: number } }[] })), + ]); + + const received = new Map(outcome.results.map(({ source, payload }) => [source, payload])); + const summary = await processBatch(batchId, received); + + return { batchId, status: outcome.type === "ready" ? "aggregated" : "partial", summary }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable, sleep } from "workflow"; + +// --------------------------------------------------------------------------- +// Typed events streamed to the UI via getWritable() +// --------------------------------------------------------------------------- +export type AggregatorEvent = + | { type: "collecting"; batchId: string; tokens: Record; expectedCount: number; timeoutMs: number } + | { type: "signal_received"; batchId: string; source: string; value: number; receivedCount: number; expectedCount: number } + | { type: "all_collected"; batchId: string } + | { type: "timeout"; batchId: string; missing: string[]; received: string[] } + | { type: "processing"; batchId: string } + | { type: "done"; batchId: string; status: "aggregated" | "partial"; summary: AggregatorSummary }; + +export type AggregatorSummary = { + totalSignals: number; + receivedSignals: number; + totalValue: number; + sources: string[]; +}; + +// --------------------------------------------------------------------------- +// Hook definition — each source sends { source, value } +// --------------------------------------------------------------------------- +export type SignalPayload = { source: string; value: number }; + +export const aggregatorSignal = defineHook(); + +const SOURCES = ["warehouse-a", "warehouse-b", "warehouse-c"] as const; +export type SourceId = (typeof SOURCES)[number]; + +// --------------------------------------------------------------------------- +// Workflow: collect N signals with a timeout, then aggregate +// --------------------------------------------------------------------------- +export async function aggregator( + batchId: string, + timeoutMs: number = 8000 +): Promise<{ batchId: string; status: "aggregated" | "partial"; summary: AggregatorSummary }> { + "use workflow"; + + // Create one hook per source with deterministic tokens + const tokens: Record = {}; + const hooks = SOURCES.map((source) => { + const token = `${source}:${batchId}`; + tokens[source] = token; + return { source, hook: aggregatorSignal.create({ token }), token }; + }); + + await emit({ + type: "collecting", + batchId, + tokens, + expectedCount: SOURCES.length, + timeoutMs, + }); + + // Track received signals + const received = new Map(); + + const signalPromises = hooks.map(({ source, hook }) => + hook.then((payload) => { + received.set(source, payload); + return { source, payload }; + }) + ); + + // Race: collect all signals OR timeout + const outcome = await Promise.race([ + Promise.all(signalPromises).then((results) => ({ + type: "ready" as const, + results, + })), + sleep(`${timeoutMs}ms`).then(() => ({ + type: "timeout" as const, + results: [] as { source: string; payload: SignalPayload }[], + })), + ]); + + // Snapshot received signals at the timeout boundary so late arrivals + // cannot mutate the summary or trigger additional events. + const receivedSnapshot = new Map(received); + + // Emit signal_received events for signals that arrived + for (const { source, payload } of outcome.results) { + await emit({ + type: "signal_received", + batchId, + source, + value: payload.value, + receivedCount: receivedSnapshot.size, + expectedCount: SOURCES.length, + }); + } + + if (outcome.type === "timeout") { + const receivedSources = [...receivedSnapshot.keys()]; + const missing = SOURCES.filter((s) => !receivedSnapshot.has(s)); + + // Emit signal_received for signals that arrived before timeout but + // were not part of the Promise.all resolution (partial arrivals). + for (const [source, payload] of receivedSnapshot) { + if (!outcome.results.some((r) => r.source === source)) { + await emit({ + type: "signal_received", + batchId, + source, + value: payload.value, + receivedCount: receivedSnapshot.size, + expectedCount: SOURCES.length, + }); + } + } + + await emit({ type: "timeout", batchId, missing, received: receivedSources }); + const summary = await processBatch(batchId, receivedSnapshot); + await emit({ type: "done", batchId, status: "partial", summary }); + return { batchId, status: "partial" as const, summary }; + } + + await emit({ type: "all_collected", batchId }); + const summary = await processBatch(batchId, receivedSnapshot); + await emit({ type: "done", batchId, status: "aggregated", summary }); + return { batchId, status: "aggregated" as const, summary }; +} + +// --------------------------------------------------------------------------- +// Step: emit a single event to the UI stream +// --------------------------------------------------------------------------- +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Step: process collected signals into an aggregated result +// --------------------------------------------------------------------------- +async function processBatch( + batchId: string, + received: ReadonlyMap +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "processing", batchId }); + } finally { + writer.releaseLock(); + } + + // Simulate processing delay + await new Promise((resolve) => setTimeout(resolve, 600)); + + const sources = [...received.keys()]; + const totalValue = [...received.values()].reduce((sum, p) => sum + p.value, 0); + + return { + totalSignals: SOURCES.length, + receivedSignals: received.size, + totalValue, + sources, + }; +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/batch-processor.mdx b/docs/content/docs/cookbook/data-processing/batch-processor.mdx new file mode 100644 index 0000000000..d4c656124e --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/batch-processor.mdx @@ -0,0 +1,171 @@ +--- +title: Batch Processor +description: Collect items over time or up to a size, then process them as a single batch. +type: guide +summary: Process a large CSV import in batches, auto-resuming from the last completed batch after a crash. +--- + +Process a large CSV import in batches, auto-resuming from the last completed batch after a crash. Use this pattern for bulk operations where processing the entire dataset in one step would be too slow or risky. + +## Pattern + +Divide the total work into fixed-size batches and process each as its own step. Because each step is recorded in the event log, a crash mid-way automatically resumes from the last completed batch on replay. + +### Simplified + +```typescript lineNumbers +declare function processBatch(batch: number, start: number, end: number): Promise; // @setup + +export async function batchProcessor( + total: number = 10_000, + batchSize: number = 1_000 +) { + "use workflow"; + + const totalBatches = Math.ceil(total / batchSize); + + for (let batch = 1; batch <= totalBatches; batch++) { + const start = (batch - 1) * batchSize + 1; + const end = Math.min(total, batch * batchSize); + await processBatch(batch, start, end); + } + + return { total, batchSize, status: "done" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need it unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type BatchEvent = + | { type: "batch_start"; batch: number; start: number; end: number; label: string } + | { type: "batch_done"; batch: number; start: number; end: number; label: string } + | { type: "crash"; afterBatch: number; message: string } + | { type: "resume"; fromBatch: number } + | { type: "complete"; totalBatches: number; processedRecords: number } + | { type: "done"; status: "done"; totalBatches: number; processedRecords: number }; + +// Demo: >= 500ms per step (timing rules) +const BATCH_STEP_MS = 650; + +const numberFmt = new Intl.NumberFormat("en-US"); + +function formatNumber(n: number): string { + return numberFmt.format(n); +} + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function batchProcessor( + total: number = 10_000, + batchSize: number = 1_000, + crashAfterBatches: number | null = null +) { + "use workflow"; + + const totalBatches = Math.ceil(total / batchSize); + + for (let batch = 1; batch <= totalBatches; batch++) { + const start = (batch - 1) * batchSize + 1; + const end = Math.min(total, batch * batchSize); + + // Crash simulation: after the specified batch, emit crash + pause + resume + if (crashAfterBatches !== null && batch === crashAfterBatches + 1) { + await emitCrashAndResume(crashAfterBatches, batch, batchSize); + } + + await processBatch(batch, start, end); + } + + await emitComplete(totalBatches, total); + await emitDone(totalBatches, total); + + return { total, batchSize, status: "done" as const }; +} + +async function processBatch( + batch: number, + start: number, + end: number +) { + "use step"; + + const writer = getWritable().getWriter(); + const label = `${formatNumber(start)}\u2013${formatNumber(end)}`; + + try { + await writer.write({ type: "batch_start", batch, start, end, label }); + + // Demo: simulate processing time for visualization + await delay(BATCH_STEP_MS); + + await writer.write({ type: "batch_done", batch, start, end, label }); + } finally { + writer.releaseLock(); + } +} + +async function emitCrashAndResume( + crashAfterBatch: number, + resumeFromBatch: number, + batchSize: number +) { + "use step"; + + const writer = getWritable().getWriter(); + + try { + const nextRecord = crashAfterBatch * batchSize + 1; + await writer.write({ + type: "crash", + afterBatch: crashAfterBatch, + message: `Simulated crash after batch ${crashAfterBatch}. Resume continues at record ${formatNumber(nextRecord)}.`, + }); + + // Demo: brief pause to simulate downtime + await delay(800); + + await writer.write({ + type: "resume", + fromBatch: resumeFromBatch, + }); + } finally { + writer.releaseLock(); + } +} + +async function emitDone(totalBatches: number, processedRecords: number) { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", status: "done", totalBatches, processedRecords }); + } finally { + writer.releaseLock(); + } +} + +async function emitComplete(totalBatches: number, processedRecords: number) { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "complete", totalBatches, processedRecords }); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/competing-consumers.mdx b/docs/content/docs/cookbook/data-processing/competing-consumers.mdx new file mode 100644 index 0000000000..4247e133b6 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/competing-consumers.mdx @@ -0,0 +1,194 @@ +--- +title: Competing Consumers +description: Multiple workers consume the same kind of work for throughput and scale-out. +type: guide +summary: Multiple workflow instances race to claim items from a shared queue — only one wins each item. +--- + +Multiple workflow instances race to claim items from a shared queue — only one wins each item. Use this pattern when you need exactly-once processing with multiple parallel consumers. + +## Pattern + +For each item in the queue, multiple consumers attempt to claim it. The runtime's deterministic workflow IDs ensure that duplicate starts are no-ops — only the first consumer to claim an item processes it. + +### Simplified + +```typescript lineNumbers +declare function processItem(itemId: string, consumers: string[]): Promise<{ itemId: string; claimedBy: string }>; // @setup +declare function recordResults(results: { itemId: string; claimedBy: string }[]): Promise<{ processed: number }>; // @setup + +export async function competingConsumers(items: string[], consumers: string[]) { + "use workflow"; + + const results = []; + + for (const itemId of items) { + const result = await processItem(itemId, consumers); + results.push(result); + } + + return recordResults(results); +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable } from "workflow"; + +export type ItemId = string; +export type ConsumerId = string; + +export type CCEvent = + | { type: "claiming"; itemId: string; consumerId: string } + | { type: "claimed"; itemId: string; consumerId: string } + | { type: "duplicate"; itemId: string; consumerId: string; wonBy: string } + | { type: "processing"; itemId: string; consumerId: string } + | { type: "processed"; itemId: string; consumerId: string } + | { type: "done"; summary: { processed: number; duplicatesBlocked: number } }; + +type ItemResult = { + itemId: string; + claimedBy: string; + duplicateAttempts: number; + status: "processed"; +}; + +type QueueReport = { + status: "done"; + results: ItemResult[]; + summary: { + processed: number; + duplicatesBlocked: number; + }; +}; + +// Demo: simulated processing latency so the UI can show progress +const CLAIM_DELAY_MS = 400; +const PROCESS_DELAY_MS = 800; +const SUMMARY_DELAY_MS = 500; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +// Each item is processed exactly once. Multiple consumers attempt to claim +// the same item, but deterministic workflow IDs mean duplicate starts are +// no-ops — the runtime deduplicates at the execution level. +export async function competingConsumers( + items: string[], + consumers: string[] +): Promise { + "use workflow"; + + const results: ItemResult[] = []; + + // Simulate consumers racing to claim each item. In production, + // start() with id: `process-item-${itemId}` deduplicates automatically. + for (const itemId of items) { + const result = await processItem(itemId, consumers); + results.push(result); + } + + return recordResults(results); +} + +async function processItem( + itemId: string, + consumers: string[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + // Simulate multiple consumers trying to claim the same item. + // The first consumer wins; the rest are blocked as duplicates. + const winnerIndex = Math.abs(hashCode(itemId)) % consumers.length; + const winner = consumers[winnerIndex]; + let duplicateAttempts = 0; + + for (let i = 0; i < consumers.length; i++) { + const consumerId = consumers[i]; + await writer.write({ type: "claiming", itemId, consumerId }); + await delay(CLAIM_DELAY_MS); + + if (i === winnerIndex) { + await writer.write({ type: "claimed", itemId, consumerId }); + } else { + duplicateAttempts++; + await writer.write({ + type: "duplicate", + itemId, + consumerId, + wonBy: winner, + }); + } + } + + // The winning consumer processes the item + await writer.write({ type: "processing", itemId, consumerId: winner }); + await delay(PROCESS_DELAY_MS); + await writer.write({ type: "processed", itemId, consumerId: winner }); + + return { + itemId, + claimedBy: winner, + duplicateAttempts, + status: "processed", + }; + } finally { + writer.releaseLock(); + } +} + +async function recordResults( + results: ItemResult[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await delay(SUMMARY_DELAY_MS); + + const processed = results.length; + const duplicatesBlocked = results.reduce( + (sum, r) => sum + r.duplicateAttempts, + 0 + ); + + const report: QueueReport = { + status: "done", + results, + summary: { processed, duplicatesBlocked }, + }; + + await writer.write({ type: "done", summary: report.summary }); + return report; + } finally { + writer.releaseLock(); + } +} + +// Simple deterministic hash for assigning items to consumers +function hashCode(str: string): number { + let hash = 0; + for (let i = 0; i < str.length; i++) { + const char = str.charCodeAt(i); + hash = (hash << 5) - hash + char; + hash |= 0; + } + return hash; +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`getStepMetadata()`](/docs/api-reference/workflow/get-step-metadata) — access step attempt number and metadata +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/map-reduce.mdx b/docs/content/docs/cookbook/data-processing/map-reduce.mdx new file mode 100644 index 0000000000..74abbd5ee7 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/map-reduce.mdx @@ -0,0 +1,201 @@ +--- +title: Map-Reduce +description: Map work in parallel, then reduce partial results into a single answer. +type: guide +summary: Partition a large analytics dataset into chunks, process in parallel, and merge into one report. +--- + +Partition a large analytics dataset into chunks, process in parallel, and merge into one report. Use this pattern when you can split work into independent partitions that are combined at the end. + +## Pattern + +Partition the input, fan out with `Promise.all()` so each partition runs as a parallel step, then pass all partial results into a reduce step that merges them into a single output. + +### Simplified + +```typescript lineNumbers +declare function mapPartition(index: number, chunk: number[]): Promise<{ sum: number; count: number }>; // @setup +declare function reduceResults(jobId: string, results: { sum: number; count: number }[]): Promise<{ totalSum: number; average: number }>; // @setup + +export async function mapReduce( + jobId: string, + items: number[] = [10, 20, 30, 40, 50, 60, 70, 80, 90], + chunkSize: number = 3 +) { + "use workflow"; + + // Partition + const partitions: number[][] = []; + for (let i = 0; i < items.length; i += chunkSize) { + partitions.push(items.slice(i, i + chunkSize)); + } + + // Map in parallel + const results = await Promise.all( + partitions.map((chunk, i) => mapPartition(i, chunk)) + ); + + // Reduce + return reduceResults(jobId, results); +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type MapReduceEvent = + | { type: "partitioning"; totalItems: number; chunkCount: number } + | { type: "partition_created"; partitionIndex: number; itemCount: number } + | { type: "mapping"; partitionIndex: number } + | { type: "mapped"; partitionIndex: number; partialSum: number; partialCount: number } + | { type: "reducing" } + | { type: "done"; summary: { totalSum: number; totalCount: number; average: number } }; + +type PartitionResult = { + partitionIndex: number; + sum: number; + count: number; +}; + +type MapReduceReport = { + jobId: string; + status: "done"; + partitions: PartitionResult[]; + summary: { + totalSum: number; + totalCount: number; + average: number; + }; +}; + +// Demo: simulate processing latency so the UI can show progress. +// In production, these delays would be replaced by actual computation. +const PARTITION_DELAY_MS = 400; +const MAP_DELAY_MS = 600; +const REDUCE_DELAY_MS = 500; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +const DEFAULT_CHUNK_SIZE = 3; + +export function partitionInput(items: number[], chunkSize: number = DEFAULT_CHUNK_SIZE): number[][] { + const chunks: number[][] = []; + for (let i = 0; i < items.length; i += chunkSize) { + chunks.push(items.slice(i, i + chunkSize)); + } + return chunks; +} + +// Demo entry point. Partitions input, maps partitions in parallel with +// Promise.all(), and reduces into a single aggregate result. +export async function mapReduce( + jobId: string, + items: number[] = [10, 20, 30, 40, 50, 60, 70, 80, 90], + chunkSize: number = DEFAULT_CHUNK_SIZE +): Promise { + "use workflow"; + + const writer = getWritable().getWriter(); + + try { + const partitions = partitionInput(items, chunkSize); + + await writer.write({ + type: "partitioning", + totalItems: items.length, + chunkCount: partitions.length, + }); + + await delay(PARTITION_DELAY_MS); + + for (let i = 0; i < partitions.length; i++) { + await writer.write({ + type: "partition_created", + partitionIndex: i, + itemCount: partitions[i].length, + }); + } + + // Map: process all partitions in parallel with Promise.all() + const partitionResults = await Promise.all( + partitions.map((chunk, index) => mapPartition(index, chunk)) + ); + + // Reduce: combine all partition results into a final aggregate + return reduceResults(jobId, partitionResults); + } finally { + writer.releaseLock(); + } +} + +async function mapPartition( + partitionIndex: number, + chunk: number[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "mapping", partitionIndex }); + await delay(MAP_DELAY_MS); + + const sum = chunk.reduce((acc, val) => acc + val, 0); + const count = chunk.length; + + await writer.write({ + type: "mapped", + partitionIndex, + partialSum: sum, + partialCount: count, + }); + + return { partitionIndex, sum, count }; + } finally { + writer.releaseLock(); + } +} + +async function reduceResults( + jobId: string, + partitionResults: PartitionResult[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "reducing" }); + await delay(REDUCE_DELAY_MS); + + const totalSum = partitionResults.reduce((acc, r) => acc + r.sum, 0); + const totalCount = partitionResults.reduce((acc, r) => acc + r.count, 0); + const average = totalCount > 0 ? totalSum / totalCount : 0; + + const report: MapReduceReport = { + jobId, + status: "done", + partitions: partitionResults, + summary: { totalSum, totalCount, average }, + }; + + await writer.write({ type: "done", summary: report.summary }); + + return report; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/meta.json b/docs/content/docs/cookbook/data-processing/meta.json new file mode 100644 index 0000000000..ac5bfda779 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/meta.json @@ -0,0 +1,14 @@ +{ + "title": "Data Processing", + "pages": [ + "pipeline", + "batch-processor", + "map-reduce", + "scatter-gather", + "aggregator", + "splitter", + "resequencer", + "competing-consumers", + "priority-queue" + ] +} diff --git a/docs/content/docs/cookbook/data-processing/pipeline.mdx b/docs/content/docs/cookbook/data-processing/pipeline.mdx new file mode 100644 index 0000000000..d9a24c85ba --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/pipeline.mdx @@ -0,0 +1,157 @@ +--- +title: Pipeline +description: Linear chain of stages — each step's output feeds the next. +type: guide +summary: Run a 4-stage ETL (extract, transform, validate, load) with live progress streaming. +--- + +Run a 4-stage ETL (extract, transform, validate, load) with live progress streaming. Use this pattern when work must flow through a fixed sequence of stages where each stage depends on the previous one. + +## Pattern + +Define an ordered list of stages and loop through them sequentially. Each stage runs as its own step, so a failure at any point is retried independently without re-running earlier stages. + +### Simplified + +```typescript lineNumbers +declare function runPipelineStep(name: string, index: number, total: number): Promise; // @setup + +export async function pipeline(documentId: string) { + "use workflow"; + + const steps = ["Extract", "Transform", "Validate", "Load"]; + + for (let i = 0; i < steps.length; i++) { + await runPipelineStep(steps[i], i, steps.length); + } + + return { status: "completed", steps: steps.length }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable } from "workflow"; + +export type PipelineEvent = + | { type: "step_start"; step: string; index: number; total: number } + | { type: "step_progress"; step: string; percent: number; message: string } + | { + type: "step_done"; + step: string; + index: number; + total: number; + durationMs: number; + } + | { type: "pipeline_done"; totalMs: number }; + +export async function pipeline( + documentId: string +): Promise<{ status: "completed"; steps: number }> { + "use workflow"; + + void documentId; + const steps = ["Extract", "Transform", "Validate", "Load"]; + const startMs = Date.now(); + + for (let i = 0; i < steps.length; i++) { + await runPipelineStep(steps[i], i, steps.length); + } + + await emitPipelineDone(startMs); + + return { status: "completed", steps: steps.length }; +} + +async function emitPipelineDone(startMs: number): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "pipeline_done", totalMs: Date.now() - startMs }); + } finally { + writer.releaseLock(); + } +} + +async function runPipelineStep( + name: string, + index: number, + total: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const startMs = Date.now(); + + try { + await writer.write({ type: "step_start", step: name, index, total }); + + // Simulate work with progress updates + for (let pct = 0; pct <= 100; pct += 20) { + await new Promise((r) => setTimeout(r, 150)); + await writer.write({ + type: "step_progress", + step: name, + percent: pct, + message: getProgressMessage(name, pct), + }); + } + + await writer.write({ + type: "step_done", + step: name, + index, + total, + durationMs: Date.now() - startMs, + }); + } finally { + writer.releaseLock(); + } +} + +function getProgressMessage(step: string, pct: number): string { + const messages: Record = { + Extract: [ + "Connecting to source...", + "Reading metadata...", + "Parsing fields...", + "Extracting content...", + "Buffering records...", + "Extract complete", + ], + Transform: [ + "Initializing rules...", + "Mapping schemas...", + "Converting types...", + "Normalizing values...", + "Applying transforms...", + "Transform complete", + ], + Validate: [ + "Loading constraints...", + "Checking required fields...", + "Validating types...", + "Running business rules...", + "Final validation...", + "Validation complete", + ], + Load: [ + "Connecting to target...", + "Preparing batch...", + "Writing records...", + "Updating indexes...", + "Flushing buffers...", + "Load complete", + ], + }; + return messages[step]?.[pct / 20] ?? "Processing..."; +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/priority-queue.mdx b/docs/content/docs/cookbook/data-processing/priority-queue.mdx new file mode 100644 index 0000000000..2787705bc2 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/priority-queue.mdx @@ -0,0 +1,169 @@ +--- +title: Priority Queue +description: Prefer higher-priority work when multiple items are waiting. +type: guide +summary: Process enterprise-tier jobs before free-tier jobs when the queue is backed up. +--- + +Process enterprise-tier jobs before free-tier jobs when the queue is backed up. Use this pattern when work items have different urgency levels and higher-priority items should be processed first. + +## Pattern + +Accept a list of tasks with priority labels, sort them by priority in a step, then process each task sequentially in priority order. The sort step ensures the highest-priority items are always handled first. + +### Simplified + +```typescript lineNumbers +type Priority = "urgent" | "high" | "medium" | "low"; +type TaskItem = { id: string; label: string; priority: Priority }; + +declare function sortTasks(tasks: TaskItem[]): Promise; // @setup +declare function processTask(task: TaskItem, position: number): Promise; // @setup + +export async function priorityQueueFlow(tasks: TaskItem[]) { + "use workflow"; + + const sorted = await sortTasks(tasks); + + for (let i = 0; i < sorted.length; i++) { + await processTask(sorted[i], i + 1); + } + + return { processed: sorted.length }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type Priority = "urgent" | "high" | "medium" | "low"; + +export type QueueEvent = + | { type: "tasks_received"; count: number; priorities: Record } + | { type: "sorting"; strategy: string } + | { type: "sorted"; order: string[] } + | { type: "processing_task"; taskId: string; priority: Priority; position: number } + | { type: "task_complete"; taskId: string; priority: Priority; result: string } + | { type: "done"; processed: number; summary: Record }; + +export interface PriorityQueueResult { + processed: number; + summary: Record; +} + +export type TaskItem = { + id: string; + label: string; + priority: Priority; +}; + +const PRIORITY_ORDER: Record = { + urgent: 0, + high: 1, + medium: 2, + low: 3, +}; + +// Demo timing +const SORT_DELAY_MS = 600; +const PROCESS_DELAY_MS = 500; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function sortByPriority(tasks: TaskItem[]): TaskItem[] { + return [...tasks].sort( + (a, b) => PRIORITY_ORDER[a.priority] - PRIORITY_ORDER[b.priority] + ); +} + +export async function priorityQueueFlow( + tasks: TaskItem[] +): Promise { + "use workflow"; + + // Step 1: Receive tasks and tally priorities + const priorities: Record = { urgent: 0, high: 0, medium: 0, low: 0 }; + for (const task of tasks) { + priorities[task.priority] += 1; + } + await emitEvent({ type: "tasks_received", count: tasks.length, priorities }); + + // Step 2: Sort tasks by priority + const sorted = await sortTasks(tasks); + + // Step 3: Process each task in priority order + const summary: Record = { urgent: 0, high: 0, medium: 0, low: 0 }; + for (let i = 0; i < sorted.length; i++) { + await processTask(sorted[i], i + 1); + summary[sorted[i].priority] += 1; + } + + // Step 4: Emit completion + await emitEvent({ type: "done", processed: sorted.length, summary }); + + return { processed: sorted.length, summary }; +} + +async function sortTasks(tasks: TaskItem[]): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "sorting", strategy: "priority-weighted" }); + await delay(SORT_DELAY_MS); + + const sorted = sortByPriority(tasks); + await writer.write({ type: "sorted", order: sorted.map((t) => t.id) }); + return sorted; + } finally { + writer.releaseLock(); + } +} + +async function processTask( + task: TaskItem, + position: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ + type: "processing_task", + taskId: task.id, + priority: task.priority, + position, + }); + await delay(PROCESS_DELAY_MS); + + await writer.write({ + type: "task_complete", + taskId: task.id, + priority: task.priority, + result: `${task.label} completed`, + }); + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: QueueEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/resequencer.mdx b/docs/content/docs/cookbook/data-processing/resequencer.mdx new file mode 100644 index 0000000000..8639a3f9c5 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/resequencer.mdx @@ -0,0 +1,205 @@ +--- +title: Resequencer +description: Buffer and reorder out-of-order messages before the next stage. +type: guide +summary: Buffer out-of-order webhook fragments and release them in the correct sequence. +--- + +Buffer out-of-order webhook fragments and release them in the correct sequence. Use this pattern when messages arrive out of order but downstream processing requires them in sequence. + +## Pattern + +Create one hook per expected fragment. As fragments arrive (in any order), buffer them. When the next expected sequence number arrives, release it and drain any contiguous buffered fragments. This guarantees in-order delivery regardless of arrival order. + +### Simplified + +```typescript lineNumbers +import { defineHook, FatalError } from "workflow"; + +export const fragmentHook = defineHook<{ seq: number; payload: string }>(); + +export async function resequencer(batchId: string, expectedCount: number) { + "use workflow"; + + const hooks = []; + for (let i = 1; i <= expectedCount; i++) { + hooks.push({ seq: i, hook: fragmentHook.create({ token: `resequencer:${batchId}:${i}` }) }); + } + + const buffer = new Map(); + const ordered: string[] = []; + let nextExpected = 1; + + const pending = new Map( + hooks.map(({ seq, hook }) => [seq, hook.then((data) => ({ seq, payload: data.payload }))]) + ); + + while (ordered.length < expectedCount) { + const result = await Promise.race([...pending.values()]); + pending.delete(result.seq); + + if (result.seq === nextExpected) { + ordered.push(result.payload); + nextExpected++; + while (buffer.has(nextExpected)) { + ordered.push(buffer.get(nextExpected)!); + buffer.delete(nextExpected); + nextExpected++; + } + } else { + buffer.set(result.seq, result.payload); + } + } + + return { batchId, ordered, status: "complete" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable, FatalError } from "workflow"; + +// Typed events streamed to the UI via getWritable() +export type ResequencerEvent = + | { type: "waiting"; batchId: string; expectedCount: number; tokens: string[] } + | { type: "fragment_received"; batchId: string; seq: number; payload: string } + | { type: "fragment_buffered"; batchId: string; seq: number; bufferSize: number } + | { type: "fragment_released"; batchId: string; seq: number; payload: string; nextExpected: number } + | { type: "error"; batchId: string; message: string } + | { type: "done"; batchId: string; ordered: string[] }; + +export type FragmentPayload = { + seq: number; + payload: string; +}; + +export const fragmentHook = defineHook(); + +export async function resequencer( + batchId: string, + expectedCount: number +) { + "use workflow"; + + // Create one hook per expected fragment + const tokens: string[] = []; + const hooks = []; + for (let i = 1; i <= expectedCount; i++) { + const token = `resequencer:${batchId}:${i}`; + tokens.push(token); + hooks.push({ seq: i, hook: fragmentHook.create({ token }), token }); + } + + await emit({ + type: "waiting", + batchId, + expectedCount, + tokens, + }); + + // Buffer for out-of-order fragments + const buffer = new Map(); + const ordered: string[] = []; + let nextExpected = 1; + + // Wait for all fragments — they can arrive in any order + const pending = new Map( + hooks.map(({ seq, hook }) => [seq, hook.then((data) => ({ seq, payload: data.payload }))]) + ); + + while (ordered.length < expectedCount) { + // Race all still-pending hooks + const result = await Promise.race([...pending.values()]); + pending.delete(result.seq); + + await emit({ + type: "fragment_received", + batchId, + seq: result.seq, + payload: result.payload, + }); + + // Guard: duplicate sequence (already released or buffered) + if (ordered[result.seq - 1] !== undefined || buffer.has(result.seq)) { + throw new FatalError( + `Duplicate sequence ${result.seq} in batch ${batchId}` + ); + } + + // Guard: sequence out of range + if (result.seq < 1 || result.seq > expectedCount) { + throw new FatalError( + `Sequence ${result.seq} out of range [1, ${expectedCount}] in batch ${batchId}` + ); + } + + if (result.seq === nextExpected) { + // Fragment is the one we need — release immediately + ordered.push(result.payload); + nextExpected++; + + await emit({ + type: "fragment_released", + batchId, + seq: result.seq, + payload: result.payload, + nextExpected, + }); + + // Drain any contiguous buffered fragments + while (buffer.has(nextExpected)) { + const bufferedPayload = buffer.get(nextExpected)!; + buffer.delete(nextExpected); + ordered.push(bufferedPayload); + + await emit({ + type: "fragment_released", + batchId, + seq: nextExpected, + payload: bufferedPayload, + nextExpected: nextExpected + 1, + }); + + nextExpected++; + } + } else { + // Out of order — buffer it + buffer.set(result.seq, result.payload); + + await emit({ + type: "fragment_buffered", + batchId, + seq: result.seq, + bufferSize: buffer.size, + }); + } + } + + await emit({ type: "done", batchId, ordered }); + + return { batchId, ordered, status: "complete" as const }; +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retries on permanent failures +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/scatter-gather.mdx b/docs/content/docs/cookbook/data-processing/scatter-gather.mdx new file mode 100644 index 0000000000..7853e9b4ee --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/scatter-gather.mdx @@ -0,0 +1,254 @@ +--- +title: Scatter-Gather +description: Fan out to many workers, then collect and merge their replies. +type: guide +summary: Query 4 shipping providers for quotes in parallel and pick the cheapest one that responds. +--- + +Query 4 shipping providers for quotes in parallel and pick the cheapest one that responds. Use this pattern when you need to fan out the same request to multiple providers and select the best result. + +## Pattern + +Launch one step per provider using `Promise.allSettled()` so failures don't cancel the others. A final gather step picks the winner from the successful results. + +### Simplified + +```typescript lineNumbers +declare function fetchFedExQuote(packageId: string): Promise<{ provider: string; price: number; days: number }>; // @setup +declare function fetchUpsQuote(packageId: string): Promise<{ provider: string; price: number; days: number }>; // @setup +declare function fetchDhlQuote(packageId: string): Promise<{ provider: string; price: number; days: number }>; // @setup +declare function fetchUspsQuote(packageId: string): Promise<{ provider: string; price: number; days: number }>; // @setup + +export async function scatterGather(packageId: string) { + "use workflow"; + + const settled = await Promise.allSettled([ + fetchFedExQuote(packageId), + fetchUpsQuote(packageId), + fetchDhlQuote(packageId), + fetchUspsQuote(packageId), + ]); + + const quotes = settled + .filter((r): r is PromiseFulfilledResult<{ provider: string; price: number; days: number }> => r.status === "fulfilled") + .map((r) => r.value); + + const winner = quotes.length > 0 + ? quotes.reduce((best, cur) => (cur.price < best.price ? cur : best)) + : null; + + return { packageId, winner }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need this unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type ProviderId = "fedex" | "ups" | "dhl" | "usps"; + +export type ProviderEvent = + | { type: "provider_querying"; provider: string } + | { type: "provider_quoted"; provider: string; price: number; days: number } + | { type: "provider_failed"; provider: string; error: string } + | { type: "gathering" } + | { type: "done"; winner: { provider: string; price: number; days: number } | null }; + +type ProviderQuote = { + provider: ProviderId; + price: number; + days: number; +}; + +type ProviderResult = { + provider: ProviderId; + status: "quoted" | "failed"; + price?: number; + days?: number; + error?: string; +}; + +type ScatterGatherResult = { + packageId: string; + status: "done"; + results: ProviderResult[]; + winner: ProviderQuote | null; +}; + +// Demo: simulate real-world network latency so the UI can show progress. +const PROVIDER_DELAY_MS: Record = { + fedex: 700, + ups: 900, + dhl: 1100, + usps: 1300, +}; + +const PROVIDER_QUOTES: Record = { + fedex: { price: 24.99, days: 2 }, + ups: { price: 19.50, days: 3 }, + dhl: { price: 31.00, days: 4 }, + usps: { price: 12.75, days: 5 }, +}; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function scatterGather( + packageId: string, + failProviders: ProviderId[] = [] +): Promise { + "use workflow"; + + const providers: Array<{ + provider: ProviderId; + fetch: () => Promise; + }> = [ + { provider: "fedex", fetch: () => fetchFedExQuote(packageId, failProviders) }, + { provider: "ups", fetch: () => fetchUpsQuote(packageId, failProviders) }, + { provider: "dhl", fetch: () => fetchDhlQuote(packageId, failProviders) }, + { provider: "usps", fetch: () => fetchUspsQuote(packageId, failProviders) }, + ]; + + const settled = await Promise.allSettled( + providers.map((p) => p.fetch()) + ); + + const results: ProviderResult[] = settled.map((result, index) => { + const provider = providers[index].provider; + + if (result.status === "fulfilled") { + return { + provider, + status: "quoted", + price: result.value.price, + days: result.value.days, + }; + } + + return { + provider, + status: "failed", + error: result.reason instanceof Error ? result.reason.message : "Unknown error", + }; + }); + + return gatherBestQuote(packageId, results); +} + +async function fetchProviderQuote( + provider: ProviderId, + packageId: string, + failProviders: ProviderId[] +): Promise { + // Demo: stream progress events to the UI via getWritable() + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "provider_querying", provider }); + await delay(PROVIDER_DELAY_MS[provider]); + + if (failProviders.includes(provider)) { + const error = `${provider.toUpperCase()} service unavailable`; + await writer.write({ type: "provider_failed", provider, error }); + throw new Error(error); + } + + const quote = PROVIDER_QUOTES[provider]; + await writer.write({ + type: "provider_quoted", + provider, + price: quote.price, + days: quote.days, + }); + + return { provider, price: quote.price, days: quote.days }; + } finally { + writer.releaseLock(); + } +} + +async function fetchFedExQuote( + packageId: string, + failProviders: ProviderId[] +): Promise { + "use step"; + return fetchProviderQuote("fedex", packageId, failProviders); +} + +async function fetchUpsQuote( + packageId: string, + failProviders: ProviderId[] +): Promise { + "use step"; + return fetchProviderQuote("ups", packageId, failProviders); +} + +async function fetchDhlQuote( + packageId: string, + failProviders: ProviderId[] +): Promise { + "use step"; + return fetchProviderQuote("dhl", packageId, failProviders); +} + +async function fetchUspsQuote( + packageId: string, + failProviders: ProviderId[] +): Promise { + "use step"; + return fetchProviderQuote("usps", packageId, failProviders); +} + +async function gatherBestQuote( + packageId: string, + results: ProviderResult[] +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "gathering" }); + await delay(500); + + const quotes = results.filter( + (r): r is ProviderResult & { price: number; days: number } => + r.status === "quoted" && r.price !== undefined && r.days !== undefined + ); + + const winner = + quotes.length > 0 + ? quotes.reduce((best, current) => + current.price < best.price ? current : best + ) + : null; + + await writer.write({ + type: "done", + winner: winner + ? { provider: winner.provider, price: winner.price, days: winner.days } + : null, + }); + + return { + packageId, + status: "done", + results, + winner: winner + ? { provider: winner.provider, price: winner.price, days: winner.days } + : null, + }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/splitter.mdx b/docs/content/docs/cookbook/data-processing/splitter.mdx new file mode 100644 index 0000000000..ae3f102af6 --- /dev/null +++ b/docs/content/docs/cookbook/data-processing/splitter.mdx @@ -0,0 +1,240 @@ +--- +title: Splitter +description: Break one compound message into many smaller messages for downstream steps. +type: guide +summary: Split a multi-item order into individual line items for independent validation and fulfillment. +--- + +Split a multi-item order into individual line items for independent validation and fulfillment. Use this pattern when a single input contains multiple items that should be processed independently. + +## Pattern + +Iterate over the items in the composite message, processing each as its own step. Each item goes through validation, reservation, and fulfillment independently. Failures in one item do not prevent the others from completing. + +### Simplified + +```typescript lineNumbers +import { FatalError } from "workflow"; + +type LineItem = { sku: string; name: string; quantity: number; warehouse: string }; +type Order = { orderId: string; items: LineItem[] }; + +declare function processLineItem(orderId: string, item: LineItem, index: number): Promise<{ sku: string; status: "fulfilled" | "failed" }>; // @setup + +export async function orderSplitter(order: Order) { + "use workflow"; + + const results = []; + + for (let i = 0; i < order.items.length; i++) { + const result = await processLineItem(order.orderId, order.items[i], i); + results.push(result); + } + + const fulfilled = results.filter((r) => r.status === "fulfilled").length; + return { orderId: order.orderId, fulfilled, failed: results.length - fulfilled }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getWritable } from "workflow"; + +// Local FatalError — prevents the SDK's automatic retry for permanent failures. +// The workflow package does not export this class, so we define it here. +class FatalError extends Error { + constructor(message: string) { + super(message); + this.name = "FatalError"; + } +} + +export type LineItem = { + sku: string; + name: string; + quantity: number; + warehouse: string; +}; + +export type Order = { + orderId: string; + items: LineItem[]; +}; + +export type SplitterEvent = + | { type: "splitting"; orderId: string; itemCount: number } + | { type: "item_processing"; index: number; sku: string; name: string } + | { type: "item_validated"; index: number; sku: string } + | { type: "item_reserved"; index: number; sku: string; warehouse: string } + | { type: "item_fulfilled"; index: number; sku: string; hookToken: string } + | { type: "item_failed"; index: number; sku: string; error: string } + | { type: "aggregating" } + | { + type: "done"; + summary: { fulfilled: number; failed: number; total: number }; + }; + +type ItemResult = { + index: number; + sku: string; + status: "fulfilled" | "failed"; + hookToken?: string; + error?: string; +}; + +type SplitterReport = { + orderId: string; + status: "done"; + results: ItemResult[]; + summary: { fulfilled: number; failed: number; total: number }; +}; + +// Demo: configures which item indices should fail for the interactive UI. +export type DemoFailures = { + failIndices: number[]; +}; + +const NO_FAILURES: DemoFailures = { failIndices: [] }; + +const ITEM_DELAY_MS = 600; +const AGGREGATE_DELAY_MS = 400; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function emit(writer: WritableStreamDefaultWriter) { + return async (event: SplitterEvent) => { + await writer.write(event); + }; +} + +// The splitter pattern: receives a composite order, splits it into +// individual line items, and processes each one through validation, +// reservation, and fulfillment steps. +export async function orderSplitter( + order: Order, + failures: DemoFailures = NO_FAILURES +): Promise { + "use workflow"; + + const writer = getWritable().getWriter(); + const send = emit(writer); + + try { + await send({ + type: "splitting", + orderId: order.orderId, + itemCount: order.items.length, + }); + + // Split: process each line item as its own step sequence + const results: ItemResult[] = []; + for (let i = 0; i < order.items.length; i++) { + const item = order.items[i]; + const shouldFail = failures.failIndices.includes(i); + const result = await processLineItem( + order.orderId, + item, + i, + shouldFail + ); + results.push(result); + } + + // Aggregate results + await send({ type: "aggregating" }); + await delay(AGGREGATE_DELAY_MS); + + const fulfilled = results.filter((r) => r.status === "fulfilled").length; + const failed = results.length - fulfilled; + const summary = { fulfilled, failed, total: results.length }; + + await send({ type: "done", summary }); + + return { + orderId: order.orderId, + status: "done", + results, + summary, + }; + } finally { + writer.releaseLock(); + } +} + +async function processLineItem( + orderId: string, + item: LineItem, + index: number, + shouldFail: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const send = emit(writer); + + try { + await send({ + type: "item_processing", + index, + sku: item.sku, + name: item.name, + }); + await delay(ITEM_DELAY_MS); + + // Validate + await send({ type: "item_validated", index, sku: item.sku }); + await delay(ITEM_DELAY_MS / 2); + + // Simulate failure for demo + if (shouldFail) { + const error = `Insufficient stock for ${item.sku} at ${item.warehouse}`; + await send({ type: "item_failed", index, sku: item.sku, error }); + throw new FatalError(error); + } + + // Reserve inventory + await send({ + type: "item_reserved", + index, + sku: item.sku, + warehouse: item.warehouse, + }); + await delay(ITEM_DELAY_MS / 2); + + // Fulfill — deterministic hook token based on orderId + itemIndex + const hookToken = `${orderId}_item_${index}_${item.sku}`; + await send({ + type: "item_fulfilled", + index, + sku: item.sku, + hookToken, + }); + + return { index, sku: item.sku, status: "fulfilled", hookToken }; + } catch (err) { + if (err instanceof FatalError) { + return { + index, + sku: item.sku, + status: "failed", + error: err.message, + }; + } + throw err; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retries on permanent failures +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/index.mdx b/docs/content/docs/cookbook/index.mdx new file mode 100644 index 0000000000..b0b70d20cd --- /dev/null +++ b/docs/content/docs/cookbook/index.mdx @@ -0,0 +1,7 @@ +--- +title: Cookbook +description: Find the right workflow pattern for your use case. Browse 50 recipes with code examples. +type: overview +--- + + diff --git a/docs/content/docs/cookbook/meta.json b/docs/content/docs/cookbook/meta.json new file mode 100644 index 0000000000..c640701cee --- /dev/null +++ b/docs/content/docs/cookbook/meta.json @@ -0,0 +1,22 @@ +{ + "title": "Cookbook", + "defaultOpen": true, + "pages": [ + "---Payments & Orders---", + "payments", + "---Approvals---", + "approvals", + "---Resilience---", + "resilience", + "---Notifications---", + "notifications", + "---Webhooks & Callbacks---", + "webhooks", + "---Data Processing---", + "data-processing", + "---Routing---", + "routing", + "---Observability---", + "observability" + ] +} diff --git a/docs/content/docs/cookbook/notifications/fan-out.mdx b/docs/content/docs/cookbook/notifications/fan-out.mdx new file mode 100644 index 0000000000..bd375e48fb --- /dev/null +++ b/docs/content/docs/cookbook/notifications/fan-out.mdx @@ -0,0 +1,298 @@ +--- +title: Fan-Out +description: One trigger fans out to parallel branches (often paired with gather/aggregate). +type: guide +summary: Broadcast an incident alert to Slack, email, SMS, and PagerDuty in parallel. +--- + +Broadcast an incident alert to Slack, email, SMS, and PagerDuty in parallel. + +## Pattern + +The workflow defines one step per notification channel and launches them all with `Promise.allSettled()`. Each channel runs independently — a failure in one does not block the others. Results are aggregated after all channels settle. + +### Simplified + +```typescript lineNumbers +declare function sendSlackAlert(incidentId: string, message: string): Promise<{ providerId: string }>; // @setup +declare function sendEmailAlert(incidentId: string, message: string): Promise<{ providerId: string }>; // @setup +declare function sendSmsAlert(incidentId: string, message: string): Promise<{ providerId: string }>; // @setup +declare function sendPagerDutyAlert(incidentId: string, message: string): Promise<{ providerId: string }>; // @setup + +export async function incidentFanOut(incidentId: string, message: string) { + "use workflow"; + + const channels = [ + { name: "slack", send: () => sendSlackAlert(incidentId, message) }, + { name: "email", send: () => sendEmailAlert(incidentId, message) }, + { name: "sms", send: () => sendSmsAlert(incidentId, message) }, + { name: "pagerduty", send: () => sendPagerDutyAlert(incidentId, message) }, + ]; + + const settled = await Promise.allSettled( + channels.map((ch) => ch.send()) + ); + + const ok = settled.filter((r) => r.status === "fulfilled").length; + return { incidentId, ok, failed: settled.length - ok }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable } from "workflow"; + +// Local FatalError — prevents the SDK's automatic retry for permanent failures. +// The workflow package does not export this class, so we define it here. +class FatalError extends Error { + constructor(message: string) { + super(message); + this.name = "FatalError"; + } +} + +export type NotificationChannel = "slack" | "email" | "sms" | "pagerduty"; + +// Demo-only: configures which channels should fail (and how) in the +// interactive UI. In a real workflow you'd remove this entirely — your +// steps would call real APIs and failures would be organic. +export type DemoFailures = { + transient: NotificationChannel[]; + permanent: NotificationChannel[]; +}; + +export type ChannelEvent = + | { type: "channel_sending"; channel: string } + | { type: "channel_sent"; channel: string; providerId: string } + | { type: "channel_failed"; channel: string; error: string; attempt: number } + | { type: "channel_retrying"; channel: string; attempt: number } + | { type: "aggregating" } + | { type: "done"; summary: { ok: number; failed: number } }; + +type ChannelResult = { + channel: NotificationChannel; + status: "sent" | "failed"; + providerId?: string; + error?: string; +}; + +type IncidentReport = { + incidentId: string; + message: string; + status: "done"; + deliveries: ChannelResult[]; + summary: { + ok: number; + failed: number; + }; +}; + +const CHANNEL_ERROR_MESSAGES: Record = { + slack: "Slack API rate limit exceeded", + email: "Email provider returned 503", + sms: "SMS delivery failed: invalid number", + pagerduty: "PagerDuty integration is not configured", +}; + +// Demo: simulate real-world network latency so the UI can show progress. +// In production, these delays would be replaced by actual API calls. +const CHANNEL_DELAY_MS: Record = { + slack: 650, + pagerduty: 750, + email: 900, + sms: 1150, +}; + +const AGGREGATE_DELAY_MS = 500; + +// setTimeout is available here because delay() is only called from +// "use step" functions, which have full Node.js runtime access. +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +const NO_FAILURES: DemoFailures = { transient: [], permanent: [] }; + +// Demo entry point. The `failures` parameter is only used by the interactive +// UI to let users toggle simulated failures — strip it out when adapting +// this workflow for production use. +export async function incidentFanOut( + incidentId: string, + message: string, + failures: DemoFailures = NO_FAILURES +): Promise { + "use workflow"; + + const fanOutTargets = [ + { + channel: "slack" as const, + send: () => sendSlackAlert(incidentId, message, failures), + }, + { + channel: "email" as const, + send: () => sendEmailAlert(incidentId, message, failures), + }, + { + channel: "sms" as const, + send: () => sendSmsAlert(incidentId, message, failures), + }, + { + channel: "pagerduty" as const, + send: () => sendPagerDutyAlert(incidentId, message, failures), + }, + ]; + + const settled = await Promise.allSettled( + fanOutTargets.map((target) => target.send()) + ); + + const deliveries: ChannelResult[] = settled.map((result, index) => { + const channel = fanOutTargets[index].channel; + + if (result.status === "fulfilled") { + return { + channel, + status: "sent", + providerId: result.value.providerId, + }; + } + + return { + channel, + status: "failed", + error: `${channel}: ${errorMessage(result.reason)}`, + }; + }); + + return aggregateResults(incidentId, message, deliveries); +} + +function errorMessage(reason: unknown): string { + if (reason instanceof Error) return reason.message; + if (typeof reason === "string") return reason; + return "Unknown delivery failure"; +} + +// Demo: shared implementation for all channel steps. In production you'd +// replace the delay + simulated failures with a real API call per channel. +// The getWritable() streaming and getStepMetadata() calls are also demo-only +// — they power the live execution log in the UI. +async function sendChannelAlert( + channel: NotificationChannel, + incidentId: string, + message: string, + failures: DemoFailures +): Promise<{ providerId: string }> { + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "channel_retrying", channel, attempt }); + } + + await writer.write({ type: "channel_sending", channel }); + await delay(CHANNEL_DELAY_MS[channel]); + + // Permanent failure — FatalError prevents the SDK's automatic retry, + // so the channel stays failed in Promise.allSettled(). + if (failures.permanent.includes(channel)) { + const error = CHANNEL_ERROR_MESSAGES[channel]; + await writer.write({ type: "channel_failed", channel, error, attempt }); + throw new FatalError(error); + } + + // Transient failure — throws a regular Error on attempt 1 so the SDK + // auto-retries. The retry will succeed, showing the recovery path. + if (attempt === 1 && failures.transient.includes(channel)) { + throw new Error(CHANNEL_ERROR_MESSAGES[channel]); + } + + const providerId = `${channel}_${incidentId}_${message.length}_${attempt}`; + await writer.write({ type: "channel_sent", channel, providerId }); + + return { providerId }; + } finally { + writer.releaseLock(); + } +} + +async function sendSlackAlert( + incidentId: string, + message: string, + failures: DemoFailures +): Promise<{ providerId: string }> { + "use step"; + return sendChannelAlert("slack", incidentId, message, failures); +} + +async function sendEmailAlert( + incidentId: string, + message: string, + failures: DemoFailures +): Promise<{ providerId: string }> { + "use step"; + return sendChannelAlert("email", incidentId, message, failures); +} + +async function sendSmsAlert( + incidentId: string, + message: string, + failures: DemoFailures +): Promise<{ providerId: string }> { + "use step"; + return sendChannelAlert("sms", incidentId, message, failures); +} + +async function sendPagerDutyAlert( + incidentId: string, + message: string, + failures: DemoFailures +): Promise<{ providerId: string }> { + "use step"; + return sendChannelAlert("pagerduty", incidentId, message, failures); +} + +async function aggregateResults( + incidentId: string, + message: string, + deliveries: ChannelResult[] +): Promise { + "use step"; + // Demo: stream aggregation progress to the UI + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "aggregating" }); + await delay(AGGREGATE_DELAY_MS); + + const ok = deliveries.filter((delivery) => delivery.status === "sent").length; + const failed = deliveries.length - ok; + const report: IncidentReport = { + incidentId, + message, + status: "done", + deliveries, + summary: { ok, failed }, + }; + + await writer.write({ type: "done", summary: report.summary }); + + return report; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — fans out to all channels, isolating failures +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retry for permanent failures +- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/meta.json b/docs/content/docs/cookbook/notifications/meta.json new file mode 100644 index 0000000000..872bd3d190 --- /dev/null +++ b/docs/content/docs/cookbook/notifications/meta.json @@ -0,0 +1,11 @@ +{ + "title": "Notifications", + "pages": [ + "fan-out", + "publish-subscribe", + "recipient-list", + "onboarding-drip", + "wakeable-reminder", + "scheduled-digest" + ] +} diff --git a/docs/content/docs/cookbook/notifications/onboarding-drip.mdx b/docs/content/docs/cookbook/notifications/onboarding-drip.mdx new file mode 100644 index 0000000000..1ea32ac489 --- /dev/null +++ b/docs/content/docs/cookbook/notifications/onboarding-drip.mdx @@ -0,0 +1,139 @@ +--- +title: Onboarding Drip +description: Time-delayed sequence with durable waits between steps. +type: guide +summary: Send a welcome email on signup, a tips email after 2 days, and a check-in after a week. +--- + +Send a welcome email on signup, a tips email after 2 days, and a check-in after a week. + +## Pattern + +The workflow sends emails at scheduled intervals using `sleep()` between each step. Because `sleep()` is durable, the workflow survives cold starts and restarts — even across days or weeks of waiting. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function sendEmail(email: string, template: string): Promise; // @setup + +export async function onboardingDrip(email: string) { + "use workflow"; + + await sendEmail(email, "welcome"); + + await sleep("1d"); + await sendEmail(email, "getting-started-tips"); + + await sleep("2d"); + await sendEmail(email, "feature-highlights"); + + await sleep("4d"); + await sendEmail(email, "follow-up"); + + return { email, status: "completed", totalDays: 7 }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type DripEvent = + | { type: "email_sending"; day: number; label: string } + | { type: "email_sent"; day: number; label: string } + | { type: "sleeping"; duration: string; fromDay: number; toDay: number } + | { type: "done" }; + +const SEND_DELAY_MS = 600; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function runOnboardingDrip(email: string) { + "use workflow"; + + // Day 0: Welcome email + await sendWelcomeEmail(email); + + // Day 1: Getting started tips + await sleep("1d"); + await sendGettingStartedEmail(email); + + // Day 3: Feature highlights + await sleep("2d"); + await sendFeatureHighlightsEmail(email); + + // Day 7: Follow-up + await sleep("4d"); + await sendFollowUpEmail(email); + + return { email, status: "completed", totalDays: 7 }; +} + +async function sendWelcomeEmail(email: string) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "email_sending", day: 0, label: "Welcome Email" }); + await delay(SEND_DELAY_MS); + await writer.write({ type: "email_sent", day: 0, label: "Welcome Email" }); + await writer.write({ type: "sleeping", duration: "1d", fromDay: 0, toDay: 1 }); + return { sent: true, day: 0 }; + } finally { + writer.releaseLock(); + } +} + +async function sendGettingStartedEmail(email: string) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "email_sending", day: 1, label: "Getting Started Tips" }); + await delay(SEND_DELAY_MS); + await writer.write({ type: "email_sent", day: 1, label: "Getting Started Tips" }); + await writer.write({ type: "sleeping", duration: "2d", fromDay: 1, toDay: 3 }); + return { sent: true, day: 1 }; + } finally { + writer.releaseLock(); + } +} + +async function sendFeatureHighlightsEmail(email: string) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "email_sending", day: 3, label: "Feature Highlights" }); + await delay(SEND_DELAY_MS); + await writer.write({ type: "email_sent", day: 3, label: "Feature Highlights" }); + await writer.write({ type: "sleeping", duration: "4d", fromDay: 3, toDay: 7 }); + return { sent: true, day: 3 }; + } finally { + writer.releaseLock(); + } +} + +async function sendFollowUpEmail(email: string) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "email_sending", day: 7, label: "Follow-up & Feedback" }); + await delay(SEND_DELAY_MS); + await writer.write({ type: "email_sent", day: 7, label: "Follow-up & Feedback" }); + await writer.write({ type: "done" }); + return { sent: true, day: 7 }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable wait between drip emails (days/weeks) +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/publish-subscribe.mdx b/docs/content/docs/cookbook/notifications/publish-subscribe.mdx new file mode 100644 index 0000000000..81ba433c2e --- /dev/null +++ b/docs/content/docs/cookbook/notifications/publish-subscribe.mdx @@ -0,0 +1,211 @@ +--- +title: Publish-Subscribe +description: One publisher, many subscribers — broadcast-style distribution. +type: guide +summary: A product-update event triggers email, push notification, and analytics subscribers independently. +--- + +A product-update event triggers email, push notification, and analytics subscribers independently. + +## Pattern + +The workflow looks up subscribers from a registry, filters by topic, then delivers the message to each matching subscriber. Non-matching subscribers are skipped. This decouples the publisher from the subscriber list — adding a new subscriber is a registry change, not a code change. + +### Simplified + +```typescript lineNumbers +type Subscriber = { id: string; name: string; topics: string[] }; + +declare function registerSubscribers(): Promise; // @setup +declare function deliverToSubscriber(subscriberId: string, topic: string, payload: string): Promise; // @setup + +export async function publishSubscribe(topic: string, payload: string) { + "use workflow"; + + const subscribers = await registerSubscribers(); + const matched = subscribers.filter((sub) => sub.topics.includes(topic)); + + for (const sub of matched) { + await deliverToSubscriber(sub.id, topic, payload); + } + + return { + topic, + delivered: matched.length, + skipped: subscribers.length - matched.length, + }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type Topic = "orders" | "inventory" | "shipping" | "analytics"; + +export type Subscriber = { + id: string; + name: string; + topics: Topic[]; +}; + +export type PubSubEvent = + | { type: "subscribers_registered"; subscribers: Subscriber[] } + | { type: "message_published"; topic: Topic; payload: string } + | { type: "filtering"; topic: Topic; total: number; matched: number } + | { type: "delivering"; subscriberId: string; subscriberName: string; topic: Topic } + | { type: "delivered"; subscriberId: string; subscriberName: string; topic: Topic } + | { type: "subscriber_skipped"; subscriberId: string; subscriberName: string; topic: Topic } + | { type: "done"; topic: Topic; delivered: number; skipped: number }; + +export interface PubSubResult { + topic: Topic; + delivered: number; + skipped: number; +} + +// Simulated subscriber registry — each subscriber listens to specific topics. +// In production this would come from a database or configuration service. +const SUBSCRIBER_REGISTRY: Subscriber[] = [ + { id: "sub-1", name: "Order Service", topics: ["orders", "inventory"] }, + { id: "sub-2", name: "Warehouse API", topics: ["inventory", "shipping"] }, + { id: "sub-3", name: "Email Notifier", topics: ["orders", "shipping"] }, + { id: "sub-4", name: "Analytics Pipeline", topics: ["orders", "inventory", "shipping", "analytics"] }, + { id: "sub-5", name: "Billing Service", topics: ["orders"] }, +]; + +// Demo timing +const REGISTER_DELAY_MS = 400; +const FILTER_DELAY_MS = 500; +const DELIVER_DELAY_MS = 600; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function publishSubscribeFlow( + topic: Topic, + payload: string +): Promise { + "use workflow"; + + // Step 1: Register subscribers from the registry + const subscribers = await registerSubscribers(); + + // Step 2: Publish message and filter by topic subscription + const matched = await filterSubscribers(topic, payload, subscribers); + + // Step 3: Deliver to each matching subscriber + const delivered = await deliverToSubscribers(topic, matched); + + // Step 4: Summarize results + return summarizeDelivery(topic, delivered, subscribers.length - matched.length); +} + +async function registerSubscribers(): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await delay(REGISTER_DELAY_MS); + await writer.write({ + type: "subscribers_registered", + subscribers: SUBSCRIBER_REGISTRY, + }); + return SUBSCRIBER_REGISTRY; + } finally { + writer.releaseLock(); + } +} + +async function filterSubscribers( + topic: Topic, + payload: string, + subscribers: Subscriber[] +): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "message_published", topic, payload }); + await delay(FILTER_DELAY_MS); + + const matched = subscribers.filter((sub) => sub.topics.includes(topic)); + + await writer.write({ + type: "filtering", + topic, + total: subscribers.length, + matched: matched.length, + }); + + // Emit skip events for non-matching subscribers + for (const sub of subscribers) { + if (!sub.topics.includes(topic)) { + await writer.write({ + type: "subscriber_skipped", + subscriberId: sub.id, + subscriberName: sub.name, + topic, + }); + } + } + + return matched; + } finally { + writer.releaseLock(); + } +} + +async function deliverToSubscribers( + topic: Topic, + subscribers: Subscriber[] +): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + let delivered = 0; + + for (const sub of subscribers) { + await writer.write({ + type: "delivering", + subscriberId: sub.id, + subscriberName: sub.name, + topic, + }); + await delay(DELIVER_DELAY_MS); + await writer.write({ + type: "delivered", + subscriberId: sub.id, + subscriberName: sub.name, + topic, + }); + delivered += 1; + } + + return delivered; + } finally { + writer.releaseLock(); + } +} + +async function summarizeDelivery( + topic: Topic, + delivered: number, + skipped: number +): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", topic, delivered, skipped }); + return { topic, delivered, skipped }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/recipient-list.mdx b/docs/content/docs/cookbook/notifications/recipient-list.mdx new file mode 100644 index 0000000000..f335df187f --- /dev/null +++ b/docs/content/docs/cookbook/notifications/recipient-list.mdx @@ -0,0 +1,264 @@ +--- +title: Recipient List +description: Same logical message delivered to a list of recipients (static or dynamic). +type: guide +summary: Evaluate severity rules at runtime and alert matching channels (Slack, email, PagerDuty). +--- + +Evaluate severity rules at runtime and alert matching channels (Slack, email, PagerDuty). + +## Pattern + +The workflow evaluates routing rules against the alert severity to build a dynamic recipient list. Matched channels receive the alert in parallel via `Promise.allSettled()`, while unmatched channels are skipped. This lets you add or change routing rules without modifying the delivery logic. + +### Simplified + +```typescript lineNumbers +type Severity = "info" | "warning" | "critical"; + +const RULES = [ + { channel: "slack", match: () => true }, + { channel: "email", match: (s: Severity) => s === "warning" || s === "critical" }, + { channel: "pagerduty", match: (s: Severity) => s === "critical" }, +]; + +declare function deliverToChannel(channel: string, alertId: string, message: string): Promise; // @setup + +export async function recipientList( + alertId: string, + message: string, + severity: Severity +) { + "use workflow"; + + const matched = RULES.filter((r) => r.match(severity)).map((r) => r.channel); + + await Promise.allSettled( + matched.map((channel) => deliverToChannel(channel, alertId, message)) + ); + + return { alertId, severity, matched }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable } from "workflow"; + +// Local FatalError — prevents the SDK's automatic retry for permanent failures. +// The workflow package does not export this class, so we define it here. +class FatalError extends Error { + constructor(message: string) { + super(message); + this.name = "FatalError"; + } +} + +export type RecipientChannel = "slack" | "email" | "pagerduty" | "webhook"; +export type Severity = "info" | "warning" | "critical"; + +export type RoutingRule = { + channel: RecipientChannel; + match: (severity: Severity) => boolean; +}; + +// Demo-only: configures which channels should fail (and how) in the +// interactive UI. In a real workflow you'd remove this entirely — your +// steps would call real APIs and failures would be organic. +export type DemoFailures = { + transient: RecipientChannel[]; + permanent: RecipientChannel[]; +}; + +export type RecipientEvent = + | { type: "rules_evaluated"; matched: string[]; skipped: string[] } + | { type: "delivering"; channel: string } + | { type: "delivered"; channel: string; durationMs: number } + | { type: "delivery_failed"; channel: string; error: string; attempt: number } + | { type: "delivery_retrying"; channel: string; attempt: number } + | { type: "done"; summary: { delivered: number; failed: number; skipped: number } }; + +// ── Routing rules (evaluated at runtime) ─────────────────────────────── +export const RULES: RoutingRule[] = [ + { channel: "slack", match: () => true }, + { channel: "email", match: (s) => s === "warning" || s === "critical" }, + { channel: "pagerduty", match: (s) => s === "critical" }, + { channel: "webhook", match: (s) => s !== "info" }, +]; + +const CHANNEL_ERROR_MESSAGES: Record = { + slack: "Slack API rate limit exceeded", + email: "Email provider returned 503", + pagerduty: "PagerDuty integration is not configured", + webhook: "Webhook endpoint timed out", +}; + +// Demo: simulate real-world network latency so the UI can show progress. +// In production, these delays would be replaced by actual API calls. +const CHANNEL_DELAY_MS: Record = { + slack: 650, + email: 900, + pagerduty: 750, + webhook: 1100, +}; + +// setTimeout is available here because delay() is only called from +// "use step" functions, which have full Node.js runtime access. +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +type DeliveryResult = { + channel: RecipientChannel; + status: "delivered" | "failed"; + durationMs?: number; + error?: string; +}; + +export type RecipientListReport = { + alertId: string; + message: string; + severity: Severity; + status: "done"; + matched: RecipientChannel[]; + skipped: RecipientChannel[]; + deliveries: DeliveryResult[]; + summary: { delivered: number; failed: number; skipped: number }; +}; + +const NO_FAILURES: DemoFailures = { transient: [], permanent: [] }; + +// Demo entry point. The `failures` parameter is only used by the interactive +// UI to let users toggle simulated failures — strip it out when adapting +// this workflow for production use. +export async function recipientList( + alertId: string, + message: string, + severity: Severity = "warning", + failures: DemoFailures = NO_FAILURES +): Promise { + "use workflow"; + + const matched = RULES.filter((r) => r.match(severity)).map((r) => r.channel); + const skipped = RULES.filter((r) => !r.match(severity)).map((r) => r.channel); + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "rules_evaluated", matched, skipped }); + } finally { + writer.releaseLock(); + } + + const settled = await Promise.allSettled( + matched.map((channel) => deliverToRecipient(channel, alertId, message, failures)) + ); + + const deliveries: DeliveryResult[] = settled.map((result, index) => { + const channel = matched[index]; + if (result.status === "fulfilled") { + return { channel, status: "delivered", durationMs: result.value.durationMs }; + } + return { channel, status: "failed", error: `${channel}: ${errorMessage(result.reason)}` }; + }); + + return aggregateResults(alertId, message, severity, matched, skipped, deliveries); +} + +function errorMessage(reason: unknown): string { + if (reason instanceof Error) return reason.message; + if (typeof reason === "string") return reason; + return "Unknown delivery failure"; +} + +// Demo: shared delivery implementation for all channel steps. In production +// you'd replace the delay + simulated failures with a real API call per channel. +// The getWritable() streaming and getStepMetadata() calls are also demo-only +// — they power the live execution log in the UI. +async function deliverToRecipient( + channel: RecipientChannel, + alertId: string, + message: string, + failures: DemoFailures +): Promise<{ durationMs: number }> { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + const start = Date.now(); + + try { + if (attempt > 1) { + await writer.write({ type: "delivery_retrying", channel, attempt }); + } + + await writer.write({ type: "delivering", channel }); + await delay(CHANNEL_DELAY_MS[channel]); + + // Permanent failure — FatalError prevents the SDK's automatic retry, + // so the channel stays failed in Promise.allSettled(). + if (failures.permanent.includes(channel)) { + const error = CHANNEL_ERROR_MESSAGES[channel]; + await writer.write({ type: "delivery_failed", channel, error, attempt }); + throw new FatalError(error); + } + + // Transient failure — throws a regular Error on attempt 1 so the SDK + // auto-retries. The retry will succeed, showing the recovery path. + if (attempt === 1 && failures.transient.includes(channel)) { + throw new Error(CHANNEL_ERROR_MESSAGES[channel]); + } + + const durationMs = Date.now() - start; + await writer.write({ type: "delivered", channel, durationMs }); + return { durationMs }; + } finally { + writer.releaseLock(); + } +} + +async function aggregateResults( + alertId: string, + message: string, + severity: Severity, + matched: RecipientChannel[], + skipped: RecipientChannel[], + deliveries: DeliveryResult[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + const delivered = deliveries.filter((d) => d.status === "delivered").length; + const failed = deliveries.length - delivered; + const summary = { delivered, failed, skipped: skipped.length }; + + await writer.write({ type: "done", summary }); + + return { + alertId, + message, + severity, + status: "done", + matched, + skipped, + deliveries, + summary, + }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — delivers to all matched recipients in parallel +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retry for permanent failures +- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/scheduled-digest.mdx b/docs/content/docs/cookbook/notifications/scheduled-digest.mdx new file mode 100644 index 0000000000..08376aa1ce --- /dev/null +++ b/docs/content/docs/cookbook/notifications/scheduled-digest.mdx @@ -0,0 +1,234 @@ +--- +title: Scheduled Digest +description: Accumulate activity and emit a summary on a schedule. +type: guide +summary: Open a 1-hour collection window for events, then email a digest when the window closes. +--- + +Open a 1-hour collection window for events, then email a digest when the window closes. + +## Pattern + +The workflow opens a time window using `sleep()` and races it against incoming events from a `defineHook`. Events accumulate in an array until the window closes. After the window, if any events were collected, a digest email is sent. The hook can receive multiple events during the window because it is awaited in a loop. + +### Simplified + +```typescript lineNumbers +import { sleep, defineHook } from "workflow"; + +type EventPayload = { type: string; message: string }; + +declare function sendDigestEmail(userId: string, events: EventPayload[]): Promise; // @setup + +export const digestEvent = defineHook(); + +export async function collectAndSendDigest( + digestId: string, + userId: string, + windowMs: number = 3_600_000 +) { + "use workflow"; + + const hook = digestEvent.create({ token: `digest:${digestId}` }); + const windowClosed = sleep(`${windowMs}ms`).then(() => ({ + kind: "window_closed" as const, + })); + const events: EventPayload[] = []; + + while (true) { + const outcome = await Promise.race([ + hook.then((payload) => ({ kind: "event" as const, payload })), + windowClosed, + ]); + + if (outcome.kind === "window_closed") break; + events.push(outcome.payload); + } + + if (events.length > 0) { + await sendDigestEmail(userId, events); + } + + return { digestId, status: events.length > 0 ? "sent" : "empty", eventCount: events.length }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { sleep, defineHook, getWritable } from "workflow"; + +export type DigestEventPayload = { + type: string; + message: string; +}; + +export const digestEvent = defineHook(); + +export type DigestStreamEvent = + | { type: "window_open"; token: string; windowMs: number } + | { type: "event_received"; event: DigestEventPayload; eventCount: number } + | { type: "sleep_tick" } + | { type: "window_closed"; eventCount: number } + | { type: "sending_digest"; eventCount: number } + | { type: "digest_sent"; eventCount: number } + | { type: "digest_empty" } + | { type: "done"; status: "sent" | "empty"; eventCount: number }; + +export interface DigestResult { + digestId: string; + userId: string; + token: string; + status: "sent" | "empty"; + eventCount: number; + windowMs: number; +} + +const DEMO_WINDOW_MS = 6_000; + +export async function collectAndSendDigest( + digestId: string, + userId: string, + windowMs: number = DEMO_WINDOW_MS +): Promise { + "use workflow"; + + const token = `digest:${digestId}`; + const hook = digestEvent.create({ token }); + const windowClosed = sleep(`${windowMs}ms`).then(() => ({ + kind: "window_closed" as const, + })); + const events: DigestEventPayload[] = []; + + await emitWindowOpen(token, windowMs); + + while (true) { + const outcome = await Promise.race([ + hook.then((payload) => ({ + kind: "event" as const, + payload, + })), + windowClosed, + ]); + + if (outcome.kind === "window_closed") { + await emitWindowClosed(events.length); + break; + } + + events.push(outcome.payload); + await emitEventReceived(outcome.payload, events.length); + } + + if (events.length === 0) { + await emitDigestEmpty(); + return { + digestId, + userId, + token, + status: "empty", + eventCount: 0, + windowMs, + }; + } + + await sendDigestEmail(userId, events); + + await emitDone("sent", events.length); + + return { + digestId, + userId, + token, + status: "sent", + eventCount: events.length, + windowMs, + }; +} + +async function emitWindowOpen(token: string, windowMs: number) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "window_open", token, windowMs }); + } finally { + writer.releaseLock(); + } +} + +async function emitEventReceived(event: DigestEventPayload, eventCount: number) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "event_received", event, eventCount }); + } finally { + writer.releaseLock(); + } +} + +async function emitWindowClosed(eventCount: number) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "window_closed", eventCount }); + } finally { + writer.releaseLock(); + } +} + +async function emitDigestEmpty() { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "digest_empty" }); + await writer.write({ type: "done", status: "empty", eventCount: 0 }); + } finally { + writer.releaseLock(); + } +} + +async function emitDone(status: "sent" | "empty", eventCount: number) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", status, eventCount }); + } finally { + writer.releaseLock(); + } +} + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function sendDigestEmail( + userId: string, + events: DigestEventPayload[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "sending_digest", eventCount: events.length }); + await delay(500); + await writer.write({ type: "digest_sent", eventCount: events.length }); + } finally { + writer.releaseLock(); + } + + console.info("[scheduled-digest] send_digest", { + userId, + eventCount: events.length, + types: events.map((e) => e.type), + }); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer for the collection window +- [`defineHook`](/docs/api-reference/workflow/define-hook) — receives events from external systems during the window +- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — races incoming events against the window closing +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx b/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx new file mode 100644 index 0000000000..7e1eae2d54 --- /dev/null +++ b/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx @@ -0,0 +1,183 @@ +--- +title: Wakeable Reminder +description: Sleep until a deadline or wake early when an external event arrives. +type: guide +summary: Schedule a payment reminder for 3 days out, but let the user cancel, snooze, or pay early via webhook. +--- + +Schedule a payment reminder for 3 days out, but let the user cancel, snooze, or pay early via webhook. + +## Pattern + +The workflow races a durable `sleep()` against a `defineHook` that listens for external events. If the hook fires before the sleep completes, the workflow handles the action (cancel, snooze, or send now). This lets external systems interrupt a long-running wait without polling. + +### Simplified + +```typescript lineNumbers +import { defineHook, sleep } from "workflow"; + +type ReminderAction = + | { type: "cancel" } + | { type: "send_now" } + | { type: "snooze"; seconds: number }; + +declare function sendReminderEmail(userId: string): Promise; // @setup + +export const reminderActionHook = defineHook(); + +export async function scheduleReminder(userId: string, delayMs: number) { + "use workflow"; + + let sendAt = new Date(Date.now() + delayMs); + const action = reminderActionHook.create({ token: `reminder:${userId}` }); + + const outcome = await Promise.race([ + sleep(sendAt).then(() => ({ kind: "time" as const })), + action.then((payload) => ({ kind: "action" as const, payload })), + ]); + + if (outcome.kind === "action") { + if (outcome.payload.type === "cancel") { + return { userId, status: "cancelled" }; + } + if (outcome.payload.type === "snooze") { + sendAt = new Date(Date.now() + outcome.payload.seconds * 1000); + await sleep(sendAt); + } + } + + await sendReminderEmail(userId); + return { userId, status: "sent" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable, sleep } from "workflow"; + +export type ReminderAction = + | { type: "cancel" } + | { type: "send_now" } + | { type: "snooze"; seconds: number }; + +export type ReminderResult = { + userId: string; + status: "sent" | "cancelled"; + sentAt?: string; + token: string; +}; + +export type ReminderEvent = + | { type: "scheduled"; userId: string; sendAtMs: number; token: string; metadata: { userId: string; initialSendAt: string; channel: string } } + | { type: "sleeping"; sendAtMs: number } + | { type: "action_received"; action: ReminderAction } + | { type: "snoozed"; sendAtMs: number } + | { type: "woke" } + | { type: "sending" } + | { type: "sent" } + | { type: "cancelled" } + | { type: "done"; status: "sent" | "cancelled" }; + +export const reminderActionHook = defineHook(); + +export async function scheduleReminder( + userId: string, + delayMs: number +): Promise { + "use workflow"; + + let sendAt = new Date(Date.now() + delayMs); + + const action = reminderActionHook.create({ + token: `reminder:${userId}`, + metadata: { + userId, + initialSendAt: sendAt.toISOString(), + channel: "email", + }, + }); + + await emit({ + type: "scheduled", + userId, + sendAtMs: sendAt.getTime(), + token: action.token, + metadata: { + userId, + initialSendAt: sendAt.toISOString(), + channel: "email", + }, + }); + await emit({ type: "sleeping", sendAtMs: sendAt.getTime() }); + + const outcome = await Promise.race([ + sleep(sendAt).then(() => ({ kind: "time" as const })), + action.then((payload) => ({ kind: "action" as const, payload })), + ]); + + if (outcome.kind === "action") { + await emit({ type: "action_received", action: outcome.payload }); + + if (outcome.payload.type === "cancel") { + await emit({ type: "cancelled" }); + await emit({ type: "done", status: "cancelled" }); + return { userId, status: "cancelled", token: action.token }; + } + + if (outcome.payload.type === "snooze") { + sendAt = new Date(Date.now() + outcome.payload.seconds * 1000); + await emit({ type: "snoozed", sendAtMs: sendAt.getTime() }); + await sleep(sendAt); + } + + if (outcome.payload.type === "send_now") { + await emit({ type: "woke" }); + } + } + + await emit({ type: "sending" }); + await sendReminderEmail(userId, sendAt); + await emit({ type: "sent" }); + await emit({ type: "done", status: "sent" }); + + return { + userId, + status: "sent", + sentAt: sendAt.toISOString(), + token: action.token, + }; +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +async function sendReminderEmail(userId: string, sendAt: Date) { + "use step"; + await new Promise((resolve) => setTimeout(resolve, 500)); + console.info("[wakeable-reminder] send_email", { + userId, + scheduledFor: sendAt.toISOString(), + }); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable wait until a deadline +- [`defineHook`](/docs/api-reference/workflow/define-hook) — creates a hook that external systems can trigger +- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — races the sleep against the hook +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/observability/correlation-identifier.mdx b/docs/content/docs/cookbook/observability/correlation-identifier.mdx new file mode 100644 index 0000000000..5a45ec370a --- /dev/null +++ b/docs/content/docs/cookbook/observability/correlation-identifier.mdx @@ -0,0 +1,274 @@ +--- +title: Correlation Identifier +description: Tie outbound requests to the right workflow run when async replies arrive. +type: guide +summary: Tag outbound API calls with a correlation ID so async responses match back to the right order. +--- + +When your workflow sends requests to external services that respond asynchronously, you need a way to match responses back to the originating request. A correlation identifier tags each outbound call with a unique ID that the response carries back. + +## Pattern + +The workflow generates a unique correlation ID, attaches it to the outbound request, then durably waits for the async response using `sleep()`. When the response arrives, the correlation ID is verified to match the original request before delivering the result. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function generateCorrelationId(requestId: string): Promise; // @setup +declare function sendRequest(requestId: string, correlationId: string, service: string, payload: string): Promise; // @setup +declare function awaitResponse(requestId: string, correlationId: string): Promise; // @setup +declare function matchAndDeliver(requestId: string, correlationId: string, service: string): Promise; // @setup + +export async function correlationIdentifierFlow( + requestId: string, + service: string, + payload: string, +) { + "use workflow"; + + const correlationId = await generateCorrelationId(requestId); + await sendRequest(requestId, correlationId, service, payload); + + const matched = await awaitResponse(requestId, correlationId); + if (!matched) { + return { requestId, correlationId, status: "timeout" }; + } + + await matchAndDeliver(requestId, correlationId, service); + return { requestId, correlationId, status: "delivered" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type RequestStatus = + | "pending" + | "sent" + | "awaiting_response" + | "matched" + | "delivered" + | "timeout"; + +export type CorrelationEvent = + | { type: "correlation_id_generated"; requestId: string; correlationId: string } + | { type: "request_sent"; requestId: string; correlationId: string; service: string } + | { type: "awaiting_response"; requestId: string; correlationId: string; timeoutMs: number } + | { type: "response_received"; requestId: string; correlationId: string; responseService: string; latencyMs: number } + | { type: "correlation_matched"; requestId: string; correlationId: string; requestPayloadHash: string; responsePayloadHash: string } + | { type: "delivery_complete"; requestId: string; correlationId: string; destination: string } + | { type: "timeout_expired"; requestId: string; correlationId: string } + | { type: "done"; requestId: string; correlationId: string; status: RequestStatus; totalSteps: number }; + +export interface CorrelationIdentifierResult { + requestId: string; + correlationId: string; + status: RequestStatus; + totalSteps: number; +} + +// Simulated external services +const SERVICES = ["payment-api", "inventory-api", "shipping-api", "notification-api"] as const; +export type ServiceName = (typeof SERVICES)[number]; + +// Demo timing +const GENERATE_DELAY_MS = 400; +const SEND_DELAY_MS = 600; +const RESPONSE_MIN_MS = 500; +const RESPONSE_MAX_MS = 1500; +const MATCH_DELAY_MS = 300; +const DELIVER_DELAY_MS = 400; +const RESPONSE_TIMEOUT_MS = 5000; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function generateId(): string { + return `corr-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`; +} + +function hashPayload(payload: string): string { + let hash = 0; + for (let i = 0; i < payload.length; i++) { + hash = ((hash << 5) - hash + payload.charCodeAt(i)) | 0; + } + return Math.abs(hash).toString(16).padStart(8, "0"); +} + +export async function correlationIdentifierFlow( + requestId: string, + service: ServiceName, + payload: string +): Promise { + "use workflow"; + + // Step 1: Generate a unique correlation ID for this request + const correlationId = await generateCorrelationId(requestId); + + // Step 2: Send the request with correlation ID attached + await sendRequest(requestId, correlationId, service, payload); + + // Step 3: Await and match the async response using correlation ID + const matched = await awaitResponse(requestId, correlationId, service, payload); + + if (!matched) { + await emitEvent({ + type: "done", + requestId, + correlationId, + status: "timeout", + totalSteps: 3, + }); + return { requestId, correlationId, status: "timeout", totalSteps: 3 }; + } + + // Step 4: Deliver the matched response to the caller + await matchAndDeliver(requestId, correlationId, service, payload); + + await emitEvent({ + type: "done", + requestId, + correlationId, + status: "delivered", + totalSteps: 4, + }); + + return { requestId, correlationId, status: "delivered", totalSteps: 4 }; +} + +async function generateCorrelationId(requestId: string): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await delay(GENERATE_DELAY_MS); + const correlationId = generateId(); + await writer.write({ + type: "correlation_id_generated", + requestId, + correlationId, + }); + return correlationId; + } finally { + writer.releaseLock(); + } +} + +async function sendRequest( + requestId: string, + correlationId: string, + service: ServiceName, + _payload: string +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await delay(SEND_DELAY_MS); + await writer.write({ + type: "request_sent", + requestId, + correlationId, + service, + }); + } finally { + writer.releaseLock(); + } +} + +async function awaitResponse( + requestId: string, + correlationId: string, + _service: ServiceName, + _payload: string +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ + type: "awaiting_response", + requestId, + correlationId, + timeoutMs: RESPONSE_TIMEOUT_MS, + }); + + // Simulate waiting for async response with durable sleep + const responseLatency = + RESPONSE_MIN_MS + Math.random() * (RESPONSE_MAX_MS - RESPONSE_MIN_MS); + + await sleep(`${Math.round(responseLatency)}ms`); + + // Simulate: response arrives before timeout (demo always succeeds) + await writer.write({ + type: "response_received", + requestId, + correlationId, + responseService: _service, + latencyMs: Math.round(responseLatency), + }); + + return true; + } finally { + writer.releaseLock(); + } +} + +async function matchAndDeliver( + requestId: string, + correlationId: string, + service: ServiceName, + payload: string +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + // Match: verify correlation ID on response matches the original request + const requestHash = hashPayload(payload); + const responseHash = hashPayload(`response-for-${payload}`); + + await delay(MATCH_DELAY_MS); + await writer.write({ + type: "correlation_matched", + requestId, + correlationId, + requestPayloadHash: requestHash, + responsePayloadHash: responseHash, + }); + + // Deliver the matched response to the caller + await delay(DELIVER_DELAY_MS); + await writer.write({ + type: "delivery_complete", + requestId, + correlationId, + destination: `${service}-callback`, + }); + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: CorrelationEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each phase as a durable step +- [`sleep()`](/docs/api-reference/workflow/sleep) — durably waits for the async response +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams correlation events to the client diff --git a/docs/content/docs/cookbook/observability/event-sourcing.mdx b/docs/content/docs/cookbook/observability/event-sourcing.mdx new file mode 100644 index 0000000000..d3ddcec749 --- /dev/null +++ b/docs/content/docs/cookbook/observability/event-sourcing.mdx @@ -0,0 +1,307 @@ +--- +title: Event Sourcing +description: Drive behavior from an append-only event log; rebuild or audit state from history. +type: guide +summary: Append domain events to an immutable log and replay them to detect bugs or migrate projections. +--- + +When you need a complete, auditable record of every state change -- and the ability to rebuild state by replaying history -- use event sourcing. Commands are validated against the current projection, domain events are appended to an immutable log, and the projection is rebuilt from scratch to verify consistency. + +## Pattern + +The workflow accepts a sequence of commands. A step function validates each command against the current projection, converts valid commands to domain events, and appends them to the log. A second step replays the entire event log from scratch to rebuild the projection and verify it matches. + +### Simplified + +```typescript lineNumbers +type CommandType = "CreateOrder" | "AuthorizePayment" | "ReserveInventory" | "ShipOrder"; +type DomainEvent = { kind: string; orderId: string; timestamp: number; [key: string]: unknown }; +type Projection = { orderId: string; status: string }; + +declare function processCommands(id: string, commands: CommandType[]): Promise<{ eventLog: DomainEvent[]; projection: Projection }>; // @setup +declare function replayEventLog(id: string, eventLog: DomainEvent[]): Promise<{ projection: Projection }>; // @setup + +export async function eventSourcing( + aggregateId: string, + commands: CommandType[], +) { + "use workflow"; + + const result = await processCommands(aggregateId, commands); + const replay = await replayEventLog(aggregateId, result.eventLog); + + return { aggregateId, eventLog: result.eventLog, projection: replay.projection }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type CommandType = + | "CreateOrder" + | "AuthorizePayment" + | "ReserveInventory" + | "ShipOrder" + | "CancelOrder"; + +export type DomainEvent = + | { kind: "OrderCreated"; orderId: string; timestamp: number } + | { kind: "PaymentAuthorized"; orderId: string; amount: number; timestamp: number } + | { kind: "InventoryReserved"; orderId: string; sku: string; timestamp: number } + | { kind: "OrderShipped"; orderId: string; trackingId: string; timestamp: number } + | { kind: "OrderCancelled"; orderId: string; reason: string; timestamp: number }; + +export type Projection = { + orderId: string; + status: "none" | "created" | "authorized" | "reserved" | "shipped" | "cancelled"; + paymentAuthorized: boolean; + inventoryReserved: boolean; + trackingId: string | null; +}; + +export type ESEvent = + | { type: "command_endpoint_ready"; aggregateId: string } + | { type: "command_received"; command: CommandType; aggregateId: string } + | { type: "event_appended"; event: DomainEvent; index: number } + | { type: "projection_updated"; projection: Projection } + | { type: "invalid_command"; command: CommandType; reason: string } + | { type: "replay_started"; eventCount: number } + | { type: "replay_progress"; index: number; event: DomainEvent; projection: Projection } + | { type: "replay_completed"; projection: Projection } + | { type: "done"; eventLog: DomainEvent[]; projection: Projection }; + +type AggregateReport = { + status: "done"; + aggregateId: string; + eventLog: DomainEvent[]; + projection: Projection; +}; + +// Demo: simulated processing latency so the UI can show progress +const COMMAND_DELAY_MS = 300; +const REPLAY_STEP_DELAY_MS = 400; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function emptyProjection(orderId: string): Projection { + return { + orderId, + status: "none", + paymentAuthorized: false, + inventoryReserved: false, + trackingId: null, + }; +} + +export function applyDomainEvent(projection: Projection, event: DomainEvent): Projection { + switch (event.kind) { + case "OrderCreated": + return { ...projection, status: "created" }; + case "PaymentAuthorized": + return { ...projection, status: "authorized", paymentAuthorized: true }; + case "InventoryReserved": + return { ...projection, status: "reserved", inventoryReserved: true }; + case "OrderShipped": + return { ...projection, status: "shipped", trackingId: event.trackingId }; + case "OrderCancelled": + return { ...projection, status: "cancelled" }; + default: + return projection; + } +} + +export function validateCommand( + command: CommandType, + projection: Projection +): { valid: true } | { valid: false; reason: string } { + switch (command) { + case "CreateOrder": + if (projection.status !== "none") + return { valid: false, reason: "Order already exists" }; + return { valid: true }; + case "AuthorizePayment": + if (projection.status !== "created") + return { valid: false, reason: "Order must be created first" }; + return { valid: true }; + case "ReserveInventory": + if (projection.status !== "authorized") + return { valid: false, reason: "Payment must be authorized first" }; + return { valid: true }; + case "ShipOrder": + if (projection.status !== "reserved") + return { valid: false, reason: "Inventory must be reserved first" }; + return { valid: true }; + case "CancelOrder": + if (projection.status === "none") + return { valid: false, reason: "No order to cancel" }; + if (projection.status === "shipped") + return { valid: false, reason: "Cannot cancel a shipped order" }; + if (projection.status === "cancelled") + return { valid: false, reason: "Order already cancelled" }; + return { valid: true }; + default: + return { valid: false, reason: `Unknown command: ${command}` }; + } +} + +function commandToEvent(command: CommandType, orderId: string): DomainEvent { + const timestamp = Date.now(); + switch (command) { + case "CreateOrder": + return { kind: "OrderCreated", orderId, timestamp }; + case "AuthorizePayment": + return { kind: "PaymentAuthorized", orderId, amount: 99.99, timestamp }; + case "ReserveInventory": + return { kind: "InventoryReserved", orderId, sku: "SKU-001", timestamp }; + case "ShipOrder": + return { kind: "OrderShipped", orderId, trackingId: `TRK-${Date.now()}`, timestamp }; + case "CancelOrder": + return { kind: "OrderCancelled", orderId, reason: "Customer requested", timestamp }; + } +} + +// The workflow accepts a sequence of commands and processes them against +// an append-only event log with projection rebuild. +export async function eventSourcing( + aggregateId: string, + commands: CommandType[] +): Promise { + "use workflow"; + + const eventLog: DomainEvent[] = []; + let projection = emptyProjection(aggregateId); + + // Process each command against the current projection + const processResult = await processCommands( + aggregateId, + commands, + eventLog, + projection + ); + projection = processResult.projection; + + // Replay: rebuild projection from the event log to verify consistency + const replayResult = await replayEventLog(aggregateId, processResult.eventLog); + + return finalizeAggregate(aggregateId, processResult.eventLog, replayResult.projection); +} + +async function processCommands( + aggregateId: string, + commands: CommandType[], + eventLog: DomainEvent[], + projection: Projection +): Promise<{ eventLog: DomainEvent[]; projection: Projection }> { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "command_endpoint_ready", aggregateId }); + + for (const command of commands) { + await writer.write({ type: "command_received", command, aggregateId }); + await delay(COMMAND_DELAY_MS); + + const validation = validateCommand(command, projection); + + if (!validation.valid) { + await writer.write({ + type: "invalid_command", + command, + reason: validation.reason, + }); + continue; + } + + const domainEvent = commandToEvent(command, aggregateId); + eventLog.push(domainEvent); + + await writer.write({ + type: "event_appended", + event: domainEvent, + index: eventLog.length - 1, + }); + + projection = applyDomainEvent(projection, domainEvent); + + await writer.write({ + type: "projection_updated", + projection, + }); + } + + return { eventLog: [...eventLog], projection }; + } finally { + writer.releaseLock(); + } +} + +async function replayEventLog( + aggregateId: string, + eventLog: DomainEvent[] +): Promise<{ projection: Projection }> { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "replay_started", + eventCount: eventLog.length, + }); + + let projection = emptyProjection(aggregateId); + + for (let i = 0; i < eventLog.length; i++) { + await delay(REPLAY_STEP_DELAY_MS); + projection = applyDomainEvent(projection, eventLog[i]); + await writer.write({ + type: "replay_progress", + index: i, + event: eventLog[i], + projection, + }); + } + + await writer.write({ + type: "replay_completed", + projection, + }); + + return { projection }; + } finally { + writer.releaseLock(); + } +} + +async function finalizeAggregate( + aggregateId: string, + eventLog: DomainEvent[], + projection: Projection +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await delay(200); + await writer.write({ type: "done", eventLog, projection }); + return { status: "done", aggregateId, eventLog, projection }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks command processing, replay, and finalization as durable steps +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams command and replay progress to the client diff --git a/docs/content/docs/cookbook/observability/message-history.mdx b/docs/content/docs/cookbook/observability/message-history.mdx new file mode 100644 index 0000000000..8637811543 --- /dev/null +++ b/docs/content/docs/cookbook/observability/message-history.mdx @@ -0,0 +1,567 @@ +--- +title: Message History +description: Keep an audit trail of what passed through the flow and in what order. +type: guide +summary: Track a support ticket through normalize, classify, route, dispatch with full history at each step. +--- + +When you need a complete audit trail showing every transformation a message underwent and in what order, use message history. Each step appends an entry to a history array carried alongside the payload, so the final result contains the full processing record. + +## Pattern + +The workflow passes an envelope containing both the payload and a history array through each step. Every step appends its action, timestamp, and outcome to the history before returning the updated envelope. On failure, the error is recorded in the history before the workflow completes with a failed status. + +### Simplified + +```typescript lineNumbers +type HistoryEntry = { step: string; action: string; timestamp: string; correlationId: string }; +type Envelope = { payload: Record; history: HistoryEntry[]; status: string }; + +declare function normalizeTicket(envelope: Envelope): Promise; // @setup +declare function classifySeverity(envelope: Envelope): Promise; // @setup +declare function chooseRoute(envelope: Envelope): Promise; // @setup +declare function dispatchTicket(envelope: Envelope): Promise; // @setup + +export async function messageHistory( + correlationId: string, + subject: string, + body: string, +) { + "use workflow"; + + let envelope: Envelope = { + payload: { correlationId, subject, body }, + history: [], + status: "processing", + }; + + envelope = await normalizeTicket(envelope); + envelope = await classifySeverity(envelope); + envelope = await chooseRoute(envelope); + envelope = await dispatchTicket(envelope); + + return { ...envelope, status: "completed" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need this unless it has its own streaming UI. +import { getWritable } from "workflow"; + +// ── Types ─────────────────────────────────────────────────────────────── + +export type Severity = "low" | "medium" | "high" | "critical"; +export type Route = "self-service" | "tier-1" | "tier-2" | "escalation"; + +export type HistoryEntry = { + step: string; + action: "started" | "succeeded" | "failed" | "decision"; + timestamp: string; + attempt: number; + correlationId: string; + detail?: Record; + error?: ApiError; +}; + +export type ApiError = { + code: string; + message: string; +}; + +export type TicketEnvelope = { + payload: { + correlationId: string; + subject: string; + body: string; + severity: Severity | null; + route: Route | null; + dispatchedTo: string | null; + }; + history: HistoryEntry[]; + status: "processing" | "completed" | "failed"; +}; + +export type HistoryEvent = + | { type: "step_started"; step: string; message: string } + | { type: "step_succeeded"; step: string; message: string } + | { + type: "step_failed"; + step: string; + message: string; + error: ApiError; + } + | { + type: "decision"; + step: string; + message: string; + detail: Record; + } + | { type: "done"; envelope: TicketEnvelope }; + +// ── Helpers ───────────────────────────────────────────────────────────── + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function appendHistory( + envelope: TicketEnvelope, + entry: Omit +): TicketEnvelope { + return { + ...envelope, + history: [ + ...envelope.history, + { ...entry, timestamp: new Date().toISOString() }, + ], + }; +} + +// ── Demo configuration ────────────────────────────────────────────────── + +const NORMALIZE_DELAY_MS = 400; +const CLASSIFY_DELAY_MS = 600; +const ROUTE_DELAY_MS = 300; +const DISPATCH_DELAY_MS = 700; +const FINALIZE_DELAY_MS = 200; + +const SEVERITY_KEYWORDS: Record = { + crash: "critical", + down: "critical", + outage: "critical", + urgent: "high", + broken: "high", + error: "medium", + bug: "medium", + slow: "low", + question: "low", +}; + +const ROUTE_MAP: Record = { + low: "self-service", + medium: "tier-1", + high: "tier-2", + critical: "escalation", +}; + +const DISPATCH_TARGETS: Record = { + "self-service": "Knowledge Base Bot", + "tier-1": "Support Agent Pool", + "tier-2": "Senior Engineer On-Call", + escalation: "Incident Commander", +}; + +// ── Workflow ───────────────────────────────────────────────────────────── + +export async function messageHistory( + correlationId: string, + subject: string, + body: string, + failAtStep: string | null = null +): Promise { + "use workflow"; + + let envelope: TicketEnvelope = { + payload: { + correlationId, + subject, + body, + severity: null, + route: null, + dispatchedTo: null, + }, + history: [], + status: "processing", + }; + + try { + envelope = await createEnvelope(envelope); + envelope = await normalizeTicket(envelope, failAtStep); + envelope = await classifySeverity(envelope, failAtStep); + envelope = await chooseRoute(envelope, failAtStep); + envelope = await dispatchTicket(envelope, failAtStep); + envelope = await finalizeSuccess(envelope); + } catch (err) { + const apiError: ApiError = + err instanceof Error + ? { code: "WORKFLOW_FAILED", message: err.message } + : { code: "UNKNOWN", message: "Unknown error" }; + envelope = await finalizeFailure(envelope, apiError); + } + + return envelope; +} + +// ── Steps ──────────────────────────────────────────────────────────────── + +async function createEnvelope( + envelope: TicketEnvelope +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "createEnvelope", + message: `Creating envelope for ticket ${envelope.payload.correlationId}`, + }); + + const result = appendHistory(envelope, { + step: "createEnvelope", + action: "succeeded", + attempt: 1, + correlationId: envelope.payload.correlationId, + detail: { correlationId: envelope.payload.correlationId }, + }); + + await writer.write({ + type: "step_succeeded", + step: "createEnvelope", + message: `Envelope created with correlationId ${envelope.payload.correlationId}`, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function normalizeTicket( + envelope: TicketEnvelope, + failAtStep: string | null +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "normalizeTicket", + message: "Normalizing ticket text", + }); + + await delay(NORMALIZE_DELAY_MS); + + if (failAtStep === "normalizeTicket") { + const apiError: ApiError = { + code: "SERVICE_UNAVAILABLE", + message: "Normalization service unavailable", + }; + const failed = appendHistory(envelope, { + step: "normalizeTicket", + action: "failed", + attempt: 1, + correlationId: envelope.payload.correlationId, + error: apiError, + }); + await writer.write({ + type: "step_failed", + step: "normalizeTicket", + message: "Normalization service unavailable", + error: apiError, + }); + throw new Error(apiError.message); + } + + const normalizedSubject = envelope.payload.subject.trim().toLowerCase(); + const normalizedBody = envelope.payload.body.trim().toLowerCase(); + + const result = appendHistory( + { + ...envelope, + payload: { + ...envelope.payload, + subject: normalizedSubject, + body: normalizedBody, + }, + }, + { + step: "normalizeTicket", + action: "succeeded", + attempt: 1, + correlationId: envelope.payload.correlationId, + detail: { normalizedSubject }, + } + ); + + await writer.write({ + type: "step_succeeded", + step: "normalizeTicket", + message: `Normalized: "${normalizedSubject}"`, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function classifySeverity( + envelope: TicketEnvelope, + failAtStep: string | null +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "classifySeverity", + message: "Classifying ticket severity", + }); + + await delay(CLASSIFY_DELAY_MS); + + if (failAtStep === "classifySeverity") { + const apiError: ApiError = { + code: "MODEL_TIMEOUT", + message: "Classification model timeout", + }; + appendHistory(envelope, { + step: "classifySeverity", + action: "failed", + attempt: 1, + correlationId: envelope.payload.correlationId, + error: apiError, + }); + await writer.write({ + type: "step_failed", + step: "classifySeverity", + message: "Classification model timeout", + error: apiError, + }); + throw new Error(apiError.message); + } + + const combined = `${envelope.payload.subject} ${envelope.payload.body}`; + let severity: Severity = "low"; + for (const [keyword, level] of Object.entries(SEVERITY_KEYWORDS)) { + if (combined.includes(keyword)) { + severity = level; + break; + } + } + + const result = appendHistory( + { ...envelope, payload: { ...envelope.payload, severity } }, + { + step: "classifySeverity", + action: "decision", + attempt: 1, + correlationId: envelope.payload.correlationId, + detail: { severity, matchedText: combined.slice(0, 80) }, + } + ); + + await writer.write({ + type: "decision", + step: "classifySeverity", + message: `Classified as ${severity}`, + detail: { severity }, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function chooseRoute( + envelope: TicketEnvelope, + failAtStep: string | null +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "chooseRoute", + message: "Choosing routing destination", + }); + + await delay(ROUTE_DELAY_MS); + + if (failAtStep === "chooseRoute") { + const apiError: ApiError = { + code: "ROUTING_UNAVAILABLE", + message: "Routing table unavailable", + }; + appendHistory(envelope, { + step: "chooseRoute", + action: "failed", + attempt: 1, + correlationId: envelope.payload.correlationId, + error: apiError, + }); + await writer.write({ + type: "step_failed", + step: "chooseRoute", + message: "Routing table unavailable", + error: apiError, + }); + throw new Error(apiError.message); + } + + const route = ROUTE_MAP[envelope.payload.severity ?? "low"]; + + const result = appendHistory( + { ...envelope, payload: { ...envelope.payload, route } }, + { + step: "chooseRoute", + action: "decision", + attempt: 1, + correlationId: envelope.payload.correlationId, + detail: { route, basedOnSeverity: envelope.payload.severity }, + } + ); + + await writer.write({ + type: "decision", + step: "chooseRoute", + message: `Routed to ${route}`, + detail: { route, basedOnSeverity: envelope.payload.severity }, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function dispatchTicket( + envelope: TicketEnvelope, + failAtStep: string | null +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + const target = + DISPATCH_TARGETS[envelope.payload.route ?? "self-service"]; + + await writer.write({ + type: "step_started", + step: "dispatchTicket", + message: `Dispatching to ${target}`, + }); + + await delay(DISPATCH_DELAY_MS); + + if (failAtStep === "dispatchTicket") { + const apiError: ApiError = { + code: "DISPATCH_FAILED", + message: `Failed to dispatch to ${target}`, + }; + appendHistory(envelope, { + step: "dispatchTicket", + action: "failed", + attempt: 1, + correlationId: envelope.payload.correlationId, + error: apiError, + }); + await writer.write({ + type: "step_failed", + step: "dispatchTicket", + message: `Failed to dispatch to ${target}`, + error: apiError, + }); + throw new Error(apiError.message); + } + + const result = appendHistory( + { ...envelope, payload: { ...envelope.payload, dispatchedTo: target } }, + { + step: "dispatchTicket", + action: "succeeded", + attempt: 1, + correlationId: envelope.payload.correlationId, + detail: { target, route: envelope.payload.route }, + } + ); + + await writer.write({ + type: "step_succeeded", + step: "dispatchTicket", + message: `Dispatched to ${target}`, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function finalizeSuccess( + envelope: TicketEnvelope +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await delay(FINALIZE_DELAY_MS); + + const result = appendHistory( + { ...envelope, status: "completed" as const }, + { + step: "finalizeSuccess", + action: "succeeded", + attempt: 1, + correlationId: envelope.payload.correlationId, + detail: { totalSteps: envelope.history.length + 1 }, + } + ); + + await writer.write({ + type: "done", + envelope: result, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function finalizeFailure( + envelope: TicketEnvelope, + apiError: ApiError +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await delay(FINALIZE_DELAY_MS); + + const result = appendHistory( + { ...envelope, status: "failed" as const }, + { + step: "finalizeFailure", + action: "failed", + attempt: 1, + correlationId: envelope.payload.correlationId, + error: apiError, + } + ); + + await writer.write({ + type: "done", + envelope: result, + }); + + return result; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each processing stage as a durable step +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams step-level history events to the client diff --git a/docs/content/docs/cookbook/observability/meta.json b/docs/content/docs/cookbook/observability/meta.json new file mode 100644 index 0000000000..df52d4b52b --- /dev/null +++ b/docs/content/docs/cookbook/observability/meta.json @@ -0,0 +1,10 @@ +{ + "title": "Observability", + "pages": [ + "wire-tap", + "message-history", + "correlation-identifier", + "event-sourcing", + "namespaced-streams" + ] +} diff --git a/docs/content/docs/cookbook/observability/namespaced-streams.mdx b/docs/content/docs/cookbook/observability/namespaced-streams.mdx new file mode 100644 index 0000000000..deefc0f5fd --- /dev/null +++ b/docs/content/docs/cookbook/observability/namespaced-streams.mdx @@ -0,0 +1,125 @@ +--- +title: Namespaced Streams +description: Separate streams per tenant or topic so clients only see relevant events. +type: guide +summary: Emit workflow events to separate UI and ops-telemetry streams simultaneously. +--- + +When different consumers need different views of workflow progress -- a UI showing content drafts while an ops dashboard tracks token usage and latency -- use namespaced streams. Each namespace is an independent writable stream that clients can subscribe to separately. + +## Pattern + +The workflow opens multiple `getWritable()` streams with different `namespace` values. Each step writes to the appropriate stream based on the event type. Clients subscribe only to the namespaces they care about. + +### Simplified + +```typescript lineNumbers +import { getWritable, getWorkflowMetadata } from "workflow"; + +type DraftEvent = { type: "chunk"; text: string; idx: number }; +type TelemetryEvent = { type: "start" | "tokens" | "done"; [key: string]: unknown }; + +declare function buildOutline(topic: string): Promise; // @setup +declare function writeSections(topic: string, outline: string): Promise; // @setup + +export async function generatePost(topic: string) { + "use workflow"; + + const { workflowRunId } = getWorkflowMetadata(); + const draft = getWritable({ namespace: "draft" }).getWriter(); + const telemetry = getWritable({ namespace: "telemetry" }).getWriter(); + + try { + await telemetry.write({ type: "start", runId: workflowRunId }); + + const outline = await buildOutline(topic); + await draft.write({ type: "chunk", idx: 0, text: outline }); + + const sections = await writeSections(topic, outline); + for (let i = 0; i < sections.length; i++) { + await draft.write({ type: "chunk", idx: i + 1, text: sections[i] }); + } + + await telemetry.write({ type: "done", totalTokens: 945 }); + return { status: "completed", workflowRunId, sectionCount: sections.length + 1 }; + } finally { + draft.releaseLock(); + telemetry.releaseLock(); + } +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, getWorkflowMetadata } from "workflow"; + +export type DraftEvent = { type: "chunk"; text: string; idx: number }; + +export type TelemetryEvent = + | { type: "start"; runId: string; name: string } + | { type: "tokens"; input: number; output: number } + | { type: "done"; totalMs: number; totalTokens: number }; + +export type GenerateResult = { + status: "completed"; + workflowRunId: string; + sectionCount: number; +}; + +export async function generatePost(topic: string): Promise { + "use workflow"; + + const { workflowRunId } = getWorkflowMetadata(); + + const draft = getWritable({ namespace: "draft" }).getWriter(); + const telemetry = getWritable({ namespace: "telemetry" }).getWriter(); + + const startedAt = Date.now(); + + try { + await telemetry.write({ type: "start", runId: workflowRunId, name: "generatePost" }); + + const outline = await buildOutline(topic); + await draft.write({ type: "chunk", idx: 0, text: outline }); + await telemetry.write({ type: "tokens", input: 45, output: 120 }); + + const sections = await writeSections(topic, outline); + for (let i = 0; i < sections.length; i++) { + await draft.write({ type: "chunk", idx: i + 1, text: sections[i] }); + await telemetry.write({ type: "tokens", input: 80 + i * 10, output: 150 + i * 30 }); + } + + const totalMs = Date.now() - startedAt; + await telemetry.write({ type: "done", totalMs, totalTokens: 945 }); + + return { status: "completed", workflowRunId, sectionCount: sections.length + 1 }; + } finally { + draft.releaseLock(); + telemetry.releaseLock(); + } +} + +async function buildOutline(topic: string): Promise { + "use step"; + return `# ${topic}\n\n## Outline\n1. Introduction\n2. Key Concepts\n3. Implementation\n4. Best Practices`; +} + +async function writeSections(topic: string, outline: string): Promise { + "use step"; + void outline; + return [ + `## Introduction\nAn overview of ${topic} and why it matters for modern applications...`, + `## Key Concepts\nThe fundamental building blocks: durable execution, deterministic replay, and step boundaries...`, + `## Implementation\nHere's how to build it step by step with proper error handling and idempotency...`, + `## Best Practices\nTesting strategies, monitoring, and production deployment patterns...`, + ]; +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks content generation as durable steps +- [`getWritable()`](/docs/api-reference/step/get-writable) — creates namespaced streams with `{ namespace: "..." }` +- [`getWorkflowMetadata()`](/docs/api-reference/workflow/get-workflow-metadata) — retrieves the workflow run ID for telemetry correlation diff --git a/docs/content/docs/cookbook/observability/wire-tap.mdx b/docs/content/docs/cookbook/observability/wire-tap.mdx new file mode 100644 index 0000000000..6ec8081874 --- /dev/null +++ b/docs/content/docs/cookbook/observability/wire-tap.mdx @@ -0,0 +1,259 @@ +--- +title: Wire Tap +description: Observe or copy messages in flight for logging/debugging without changing the main path. +type: guide +summary: Mirror production order events to a debug logger without touching the main processing path. +--- + +When you need to inspect messages flowing through a pipeline for debugging or auditing without altering the main processing path, use a wire tap. Each step captures a snapshot of the message state and appends it to an audit trail alongside normal processing. + +## Pattern + +Each step function processes the message normally, then copies a snapshot to an audit trail array. The tap is non-invasive -- it never mutates the message it captures, only reads it. The audit trail accumulates across all steps and is returned with the final result. + +### Simplified + +```typescript lineNumbers +type OrderPayload = { orderId: string; item: string; quantity: number; [key: string]: unknown }; + +declare function validateOrder(message: OrderPayload, audit: unknown[]): Promise; // @setup +declare function enrichOrder(message: OrderPayload, audit: unknown[]): Promise; // @setup +declare function transformOrder(message: OrderPayload, audit: unknown[]): Promise; // @setup +declare function deliverOrder(message: OrderPayload, audit: unknown[]): Promise; // @setup + +export async function wireTap(orderId: string, item: string, quantity: number) { + "use workflow"; + + const auditTrail: unknown[] = []; + let message: OrderPayload = { orderId, item, quantity }; + + // Each step processes AND taps a snapshot to the audit trail + message = await validateOrder(message, auditTrail); + message = await enrichOrder(message, auditTrail); + message = await transformOrder(message, auditTrail); + message = await deliverOrder(message, auditTrail); + + return { orderId, status: "completed", auditTrail }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need this unless it has its own streaming UI. +import { getWritable } from "workflow"; + +// Local FatalError — prevents the SDK's automatic retry for permanent failures. +// The workflow package does not export this class, so we define it here. +class FatalError extends Error { + constructor(message: string) { + super(message); + this.name = "FatalError"; + } +} + +export type StageName = "validate" | "enrich" | "transform" | "deliver"; + +export type WireTapEvent = + | { type: "stage_start"; stage: string } + | { type: "tap_captured"; stage: string; snapshot: Record } + | { type: "stage_done"; stage: string; durationMs: number } + | { type: "done"; auditCount: number; totalMs: number }; + +type OrderPayload = { + orderId: string; + item: string; + quantity: number; + validated?: boolean; + price?: number; + total?: number; + format?: string; + deliveredTo?: string; +}; + +type ProcessingResult = { + orderId: string; + status: "completed"; + auditTrail: Array<{ stage: string; snapshot: Record }>; + totalMs: number; +}; + +// Demo: simulate network latency so the UI can show each stage. +const STAGE_DELAY_MS: Record = { + validate: 600, + enrich: 800, + transform: 500, + deliver: 700, +}; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +// Wire Tap pattern: each processing stage is tapped — a copy of the message +// is sent to an audit channel without altering the main flow. Think of it +// as a network packet sniffer for your workflow messages. +export async function wireTap( + orderId: string, + item: string, + quantity: number +): Promise { + "use workflow"; + + const startMs = Date.now(); + const auditTrail: Array<{ stage: string; snapshot: Record }> = []; + + let message: OrderPayload = { orderId, item, quantity }; + + message = await validateOrder(message, auditTrail); + message = await enrichOrder(message, auditTrail); + message = await transformOrder(message, auditTrail); + message = await deliverOrder(message, auditTrail); + + await emitDone(auditTrail.length, startMs); + + return { + orderId, + status: "completed", + auditTrail, + totalMs: Date.now() - startMs, + }; +} + +// Each step processes the message AND taps a snapshot to the audit trail. +// The tap is non-invasive — it copies, never mutates the message it captures. + +async function validateOrder( + message: OrderPayload, + auditTrail: Array<{ stage: string; snapshot: Record }> +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "stage_start", stage: "validate" }); + await delay(STAGE_DELAY_MS.validate); + + if (message.quantity <= 0) { + throw new FatalError("Invalid quantity: must be greater than 0"); + } + + const result: OrderPayload = { ...message, validated: true }; + + // Wire tap: capture a snapshot without altering the flow + const snapshot = { ...result } as unknown as Record; + auditTrail.push({ stage: "validate", snapshot }); + await writer.write({ type: "tap_captured", stage: "validate", snapshot }); + await writer.write({ type: "stage_done", stage: "validate", durationMs: STAGE_DELAY_MS.validate }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function enrichOrder( + message: OrderPayload, + auditTrail: Array<{ stage: string; snapshot: Record }> +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "stage_start", stage: "enrich" }); + await delay(STAGE_DELAY_MS.enrich); + + // Simulate price lookup + const unitPrice = 29.99; + const result: OrderPayload = { + ...message, + price: unitPrice, + total: unitPrice * message.quantity, + }; + + // Wire tap: capture enriched state + const snapshot = { ...result } as unknown as Record; + auditTrail.push({ stage: "enrich", snapshot }); + await writer.write({ type: "tap_captured", stage: "enrich", snapshot }); + await writer.write({ type: "stage_done", stage: "enrich", durationMs: STAGE_DELAY_MS.enrich }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function transformOrder( + message: OrderPayload, + auditTrail: Array<{ stage: string; snapshot: Record }> +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "stage_start", stage: "transform" }); + await delay(STAGE_DELAY_MS.transform); + + // Simulate format transformation + const result: OrderPayload = { ...message, format: "canonical-v2" }; + + // Wire tap: capture transformed state + const snapshot = { ...result } as unknown as Record; + auditTrail.push({ stage: "transform", snapshot }); + await writer.write({ type: "tap_captured", stage: "transform", snapshot }); + await writer.write({ type: "stage_done", stage: "transform", durationMs: STAGE_DELAY_MS.transform }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function deliverOrder( + message: OrderPayload, + auditTrail: Array<{ stage: string; snapshot: Record }> +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "stage_start", stage: "deliver" }); + await delay(STAGE_DELAY_MS.deliver); + + // Simulate delivery + const result: OrderPayload = { ...message, deliveredTo: "warehouse-us-east-1" }; + + // Wire tap: capture final delivery state + const snapshot = { ...result } as unknown as Record; + auditTrail.push({ stage: "deliver", snapshot }); + await writer.write({ type: "tap_captured", stage: "deliver", snapshot }); + await writer.write({ type: "stage_done", stage: "deliver", durationMs: STAGE_DELAY_MS.deliver }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function emitDone( + auditCount: number, + startMs: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", auditCount, totalMs: Date.now() - startMs }); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each processing stage as a durable step +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — stops retries on permanent validation failures +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams tap snapshots to the client diff --git a/docs/content/docs/cookbook/payments/choreography.mdx b/docs/content/docs/cookbook/payments/choreography.mdx new file mode 100644 index 0000000000..8eb5c234d1 --- /dev/null +++ b/docs/content/docs/cookbook/payments/choreography.mdx @@ -0,0 +1,632 @@ +--- +title: Choreography +description: Peers react to events independently — no central orchestrator. +type: guide +summary: Order flow where inventory, payment, and shipping react to events with automatic compensation on failure. +--- + +Use choreography when multiple services should react to events independently. Each participant handles its own logic and emits events for the next participant, with compensation if any step fails. + +## Pattern + +The workflow acts as a durable event bus. Each participant step reacts to the previous event, performs its work, and emits a new event. If a participant fails, upstream participants run compensation steps in reverse to restore consistency. Durable `sleep()` simulates real-world async handoff latency between services. + +### Simplified + +```typescript lineNumbers +import { FatalError, sleep } from "workflow"; + +declare function placeOrder(orderId: string, items: string[]): Promise; // @setup +declare function reserveInventory(items: string[]): Promise; // @setup +declare function chargePayment(orderId: string): Promise; // @setup +declare function shipOrder(orderId: string, items: string[]): Promise; // @setup +declare function compensateInventory(items: string[]): Promise; // @setup +declare function compensatePayment(orderId: string): Promise; // @setup +declare function compensateOrder(orderId: string): Promise; // @setup + +export async function choreography(orderId: string, items: string[]) { + "use workflow"; + + // Participant 1: Order Service places the order + await placeOrder(orderId, items); + + // Participant 2: Inventory Service reacts to order_placed + const inventoryOk = await reserveInventory(items); + if (!inventoryOk) { + await compensateOrder(orderId); + return { outcome: "compensated", failedService: "inventory" }; + } + + // Durable sleep: simulate async handoff between participants + await sleep("3s"); + + // Participant 3: Payment Service reacts to inventory_reserved + const paymentOk = await chargePayment(orderId); + if (!paymentOk) { + await compensateInventory(items); + await compensateOrder(orderId); + return { outcome: "compensated", failedService: "payment" }; + } + + // Participant 4: Shipping Service reacts to payment_processed + const shippingOk = await shipOrder(orderId, items); + if (!shippingOk) { + await compensatePayment(orderId); + await compensateInventory(items); + await compensateOrder(orderId); + return { outcome: "compensated", failedService: "shipping" }; + } + + return { outcome: "fulfilled" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable, sleep } from "workflow"; +import { FatalError } from "workflow"; + +export type ParticipantId = + | "order-service" + | "inventory-service" + | "payment-service" + | "shipping-service"; + +export type ChoreographyEvent = + | { type: "event_emitted"; participant: ParticipantId; event: string; correlationId: string; message: string } + | { type: "event_received"; participant: ParticipantId; event: string; correlationId: string; message: string } + | { type: "step_started"; participant: ParticipantId; message: string } + | { type: "step_completed"; participant: ParticipantId; message: string } + | { type: "step_retrying"; participant: ParticipantId; attempt: number } + | { type: "compensation_started"; participant: ParticipantId; reason: string; correlationId: string } + | { type: "compensation_completed"; participant: ParticipantId; message: string; correlationId: string } + | { type: "sleeping"; participant: ParticipantId; duration: string; reason: string } + | { type: "done"; correlationId: string; outcome: "fulfilled" | "compensated"; summary: ChoreographySummary }; + +type ChoreographySummary = { + correlationId: string; + outcome: "fulfilled" | "compensated"; + participantsInvolved: ParticipantId[]; + eventsEmitted: number; + trackingId: string | null; + failedService: ParticipantId | null; +}; + +type OrderItem = { name: string; qty: number }; + +// Demo: simulate real-world latency so the UI can show progress. +const STEP_DELAY_MS: Record = { + orderService: 400, + inventoryService: 700, + paymentService: 800, + shippingService: 600, + compensation: 500, +}; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function choreography( + orderId: string, + items: OrderItem[], + failService: string | null +): Promise { + "use workflow"; + + const correlationId = `COR-${orderId}`; + let eventsEmitted = 0; + const participantsInvolved: ParticipantId[] = []; + + // Participant 1: Order Service places the order and emits "order_placed" + const orderPlaced = await orderServicePlaceOrder(correlationId, orderId, items); + eventsEmitted += orderPlaced.events; + participantsInvolved.push("order-service"); + + // Participant 2: Inventory Service reacts to "order_placed", reserves stock + const inventoryResult = await inventoryServiceReserve( + correlationId, + items, + failService === "inventory" + ); + eventsEmitted += inventoryResult.events; + participantsInvolved.push("inventory-service"); + + if (!inventoryResult.success) { + // Compensation: Order Service rolls back + const comp = await orderServiceCompensate(correlationId, orderId, "inventory_failed"); + eventsEmitted += comp.events; + + return finalizeOutcome(correlationId, "compensated", participantsInvolved, eventsEmitted, null, "inventory-service"); + } + + // Durable sleep: simulate async handoff latency between participants + await emitSleeping("inventory-service", "3s", "Awaiting payment-service to react to inventory_reserved event"); + await sleep("3s"); + + // Participant 3: Payment Service reacts to "inventory_reserved", charges customer + const paymentResult = await paymentServiceCharge( + correlationId, + orderId, + failService === "payment" + ); + eventsEmitted += paymentResult.events; + participantsInvolved.push("payment-service"); + + if (!paymentResult.success) { + // Compensation: Inventory Service releases stock, then Order Service rolls back + const invComp = await inventoryServiceCompensate(correlationId, items, "payment_failed"); + eventsEmitted += invComp.events; + const orderComp = await orderServiceCompensate(correlationId, orderId, "payment_failed"); + eventsEmitted += orderComp.events; + + return finalizeOutcome(correlationId, "compensated", participantsInvolved, eventsEmitted, null, "payment-service"); + } + + // Participant 4: Shipping Service reacts to "payment_processed", ships order + const shippingResult = await shippingServiceShip( + correlationId, + orderId, + items, + failService === "shipping" + ); + eventsEmitted += shippingResult.events; + participantsInvolved.push("shipping-service"); + + if (!shippingResult.success) { + // Compensation: Payment refund, Inventory release, Order rollback + const payComp = await paymentServiceCompensate(correlationId, orderId, "shipping_failed"); + eventsEmitted += payComp.events; + const invComp = await inventoryServiceCompensate(correlationId, items, "shipping_failed"); + eventsEmitted += invComp.events; + const orderComp = await orderServiceCompensate(correlationId, orderId, "shipping_failed"); + eventsEmitted += orderComp.events; + + return finalizeOutcome(correlationId, "compensated", participantsInvolved, eventsEmitted, null, "shipping-service"); + } + + // All participants succeeded — order fulfilled + return finalizeOutcome( + correlationId, + "fulfilled", + participantsInvolved, + eventsEmitted, + `TRK-${orderId}-${Date.now().toString(36)}`, + null + ); +} + +// --------------------------------------------------------------------------- +// Participant: Order Service +// --------------------------------------------------------------------------- + +async function orderServicePlaceOrder( + correlationId: string, + orderId: string, + items: OrderItem[] +): Promise<{ events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", participant: "order-service", attempt }); + } + + await writer.write({ + type: "step_started", + participant: "order-service", + message: `Placing order ${orderId} with ${items.length} item(s)`, + }); + + await delay(STEP_DELAY_MS.orderService); + + await writer.write({ + type: "event_emitted", + participant: "order-service", + event: "order_placed", + correlationId, + message: `Order ${orderId} placed — notifying downstream participants`, + }); + + await writer.write({ + type: "step_completed", + participant: "order-service", + message: `Order ${orderId} accepted and order_placed event emitted`, + }); + + return { events: 1 }; + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Participant: Inventory Service +// --------------------------------------------------------------------------- + +async function inventoryServiceReserve( + correlationId: string, + items: OrderItem[], + simulateFail: boolean +): Promise<{ success: boolean; events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", participant: "inventory-service", attempt }); + } + + await writer.write({ + type: "event_received", + participant: "inventory-service", + event: "order_placed", + correlationId, + message: "Received order_placed — checking stock levels", + }); + + await writer.write({ + type: "step_started", + participant: "inventory-service", + message: `Reserving ${items.length} item(s) in warehouse`, + }); + + await delay(STEP_DELAY_MS.inventoryService); + + if (simulateFail) { + await writer.write({ + type: "event_emitted", + participant: "inventory-service", + event: "inventory_failed", + correlationId, + message: "Insufficient stock — emitting inventory_failed event", + }); + + throw new FatalError("Inventory reservation failed: insufficient stock"); + } + + await writer.write({ + type: "event_emitted", + participant: "inventory-service", + event: "inventory_reserved", + correlationId, + message: "Stock reserved — emitting inventory_reserved event", + }); + + await writer.write({ + type: "step_completed", + participant: "inventory-service", + message: `${items.length} item(s) reserved successfully`, + }); + + return { success: true, events: 2 }; + } catch (err) { + if (err instanceof FatalError) { + return { success: false, events: 2 }; + } + throw err; + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Participant: Payment Service +// --------------------------------------------------------------------------- + +async function paymentServiceCharge( + correlationId: string, + orderId: string, + simulateFail: boolean +): Promise<{ success: boolean; events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", participant: "payment-service", attempt }); + } + + await writer.write({ + type: "event_received", + participant: "payment-service", + event: "inventory_reserved", + correlationId, + message: "Received inventory_reserved — processing payment", + }); + + await writer.write({ + type: "step_started", + participant: "payment-service", + message: `Charging payment for order ${orderId}`, + }); + + await delay(STEP_DELAY_MS.paymentService); + + if (simulateFail) { + await writer.write({ + type: "event_emitted", + participant: "payment-service", + event: "payment_failed", + correlationId, + message: "Payment declined — emitting payment_failed event", + }); + + throw new FatalError("Payment processing failed: card declined"); + } + + await writer.write({ + type: "event_emitted", + participant: "payment-service", + event: "payment_processed", + correlationId, + message: "Payment charged — emitting payment_processed event", + }); + + await writer.write({ + type: "step_completed", + participant: "payment-service", + message: `Payment for order ${orderId} processed successfully`, + }); + + return { success: true, events: 2 }; + } catch (err) { + if (err instanceof FatalError) { + return { success: false, events: 2 }; + } + throw err; + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Participant: Shipping Service +// --------------------------------------------------------------------------- + +async function shippingServiceShip( + correlationId: string, + orderId: string, + items: OrderItem[], + simulateFail: boolean +): Promise<{ success: boolean; events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", participant: "shipping-service", attempt }); + } + + await writer.write({ + type: "event_received", + participant: "shipping-service", + event: "payment_processed", + correlationId, + message: "Received payment_processed — preparing shipment", + }); + + await writer.write({ + type: "step_started", + participant: "shipping-service", + message: `Shipping ${items.length} item(s) for order ${orderId}`, + }); + + await delay(STEP_DELAY_MS.shippingService); + + if (simulateFail) { + await writer.write({ + type: "event_emitted", + participant: "shipping-service", + event: "shipping_failed", + correlationId, + message: "Shipment failed — emitting shipping_failed event", + }); + + throw new FatalError("Shipping failed: carrier unavailable"); + } + + await writer.write({ + type: "event_emitted", + participant: "shipping-service", + event: "order_shipped", + correlationId, + message: `Order ${orderId} shipped — emitting order_shipped event`, + }); + + await writer.write({ + type: "step_completed", + participant: "shipping-service", + message: `Order ${orderId} shipped with tracking`, + }); + + return { success: true, events: 2 }; + } catch (err) { + if (err instanceof FatalError) { + return { success: false, events: 2 }; + } + throw err; + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Compensation steps +// --------------------------------------------------------------------------- + +async function orderServiceCompensate( + correlationId: string, + orderId: string, + reason: string +): Promise<{ events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "compensation_started", + participant: "order-service", + reason, + correlationId, + }); + + await delay(STEP_DELAY_MS.compensation); + + await writer.write({ + type: "compensation_completed", + participant: "order-service", + message: `Order ${orderId} rolled back`, + correlationId, + }); + + return { events: 1 }; + } finally { + writer.releaseLock(); + } +} + +async function inventoryServiceCompensate( + correlationId: string, + items: OrderItem[], + reason: string +): Promise<{ events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "compensation_started", + participant: "inventory-service", + reason, + correlationId, + }); + + await delay(STEP_DELAY_MS.compensation); + + await writer.write({ + type: "compensation_completed", + participant: "inventory-service", + message: `${items.length} item(s) released back to stock`, + correlationId, + }); + + return { events: 1 }; + } finally { + writer.releaseLock(); + } +} + +async function paymentServiceCompensate( + correlationId: string, + orderId: string, + reason: string +): Promise<{ events: number }> { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "compensation_started", + participant: "payment-service", + reason, + correlationId, + }); + + await delay(STEP_DELAY_MS.compensation); + + await writer.write({ + type: "compensation_completed", + participant: "payment-service", + message: `Payment for order ${orderId} refunded`, + correlationId, + }); + + return { events: 1 }; + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Finalize +// --------------------------------------------------------------------------- + +async function finalizeOutcome( + correlationId: string, + outcome: "fulfilled" | "compensated", + participantsInvolved: ParticipantId[], + eventsEmitted: number, + trackingId: string | null, + failedService: ParticipantId | null +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + const summary: ChoreographySummary = { + correlationId, + outcome, + participantsInvolved, + eventsEmitted, + trackingId, + failedService, + }; + + await writer.write({ + type: "done", + correlationId, + outcome, + summary, + }); + + return summary; + } finally { + writer.releaseLock(); + } +} + +// --------------------------------------------------------------------------- +// Helper: emit sleeping event +// --------------------------------------------------------------------------- + +async function emitSleeping( + participant: ParticipantId, + duration: string, + reason: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "sleeping", participant, duration, reason }); + } finally { + writer.releaseLock(); + } +} + +emitSleeping.maxRetries = 0; +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — non-retryable error that triggers compensation +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx b/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx new file mode 100644 index 0000000000..9e8345a681 --- /dev/null +++ b/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx @@ -0,0 +1,185 @@ +--- +title: Guaranteed Delivery +description: Persist-and-retry semantics so work isn't lost across crashes or restarts. +type: guide +summary: Ensure a payment confirmation is delivered even if the server restarts mid-send. +--- + +Use guaranteed delivery when messages must reach their destination even if the process crashes mid-send. The workflow persists each message before attempting delivery and automatically retries on failure. + +## Pattern + +Each message is persisted as a durable step, then delivery is attempted with built-in retry. The workflow runtime automatically retries failed steps (up to `maxRetries`), so if the server restarts mid-delivery the message is replayed from the event log and retried until it succeeds or exhausts attempts. + +### Simplified + +```typescript lineNumbers +declare function deliverMessage(messageId: string): Promise<{ status: "delivered" | "failed"; attempts: number }>; // @setup + +export async function guaranteedDelivery(messages: string[]) { + "use workflow"; + + const results = []; + + for (const messageId of messages) { + // Each step is persisted — delivery resumes after any crash + const result = await deliverMessage(messageId); + results.push(result); + } + + const delivered = results.filter((r) => r.status === "delivered").length; + return { delivered, failed: results.length - delivered }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable } from "workflow"; + +export type MessageId = string; + +export type GDEvent = + | { type: "persist"; messageId: string } + | { type: "send"; messageId: string; attempt: number } + | { type: "ack"; messageId: string; attempt: number } + | { type: "retry"; messageId: string; attempt: number; error: string } + | { type: "confirm"; messageId: string; attempt: number } + | { type: "fail"; messageId: string; error: string; attempts: number } + | { type: "done"; summary: { delivered: number; failed: number } }; + +type MessageResult = { + messageId: string; + status: "delivered" | "failed"; + attempts: number; + error?: string; +}; + +type DeliveryReport = { + status: "done"; + results: MessageResult[]; + summary: { + delivered: number; + failed: number; + }; +}; + +// Demo: per-step latency so the UI can show progress +const PERSIST_DELAY_MS = 400; +const SEND_DELAY_MS = 600; +const CONFIRM_DELAY_MS = 300; +const MAX_ATTEMPTS = 3; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function guaranteedDelivery( + messages: string[], + failMessages: string[] = [] +): Promise { + "use workflow"; + + const results: MessageResult[] = []; + + for (const messageId of messages) { + const shouldFail = failMessages.includes(messageId); + const result = await deliverMessage(messageId, shouldFail); + results.push(result); + } + + return finalizeDelivery(results); +} + +async function deliverMessage( + messageId: string, + shouldFail: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + await writer.write({ type: "persist", messageId }); + await delay(PERSIST_DELAY_MS); + + await writer.write({ type: "send", messageId, attempt }); + await delay(SEND_DELAY_MS); + + if (shouldFail) { + throw new Error(`Delivery failed: recipient unreachable for ${messageId}`); + } + + await writer.write({ type: "ack", messageId, attempt }); + await delay(CONFIRM_DELAY_MS); + + await writer.write({ type: "confirm", messageId, attempt }); + return { messageId, status: "delivered", attempts: attempt }; + } catch (error: unknown) { + const message = + error instanceof Error ? error.message : "Unknown delivery error"; + + if (attempt >= MAX_ATTEMPTS) { + await writer.write({ + type: "fail", + messageId, + error: message, + attempts: attempt, + }); + return { + messageId, + status: "failed", + attempts: attempt, + error: message, + }; + } + + await writer.write({ + type: "retry", + messageId, + attempt, + error: message, + }); + + throw error instanceof Error ? error : new Error(message); + } finally { + writer.releaseLock(); + } +} + +async function finalizeDelivery( + results: MessageResult[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await delay(CONFIRM_DELAY_MS); + + const delivered = results.filter((r) => r.status === "delivered").length; + const failed = results.length - delivered; + + const report: DeliveryReport = { + status: "done", + results, + summary: { delivered, failed }, + }; + + await writer.write({ type: "done", summary: report.summary }); + return report; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with automatic retry +- [`getStepMetadata()`](/docs/api-reference/workflow/get-step-metadata) — access the current attempt number for retry logic +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams delivery progress to the client diff --git a/docs/content/docs/cookbook/payments/idempotent-receiver.mdx b/docs/content/docs/cookbook/payments/idempotent-receiver.mdx new file mode 100644 index 0000000000..095dc620d4 --- /dev/null +++ b/docs/content/docs/cookbook/payments/idempotent-receiver.mdx @@ -0,0 +1,191 @@ +--- +title: Idempotent Receiver +description: Handle duplicate deliveries safely (same logical operation, same outcome). +type: guide +summary: Detect duplicate payment webhooks with an idempotency key and return the cached result. +--- + +Use the idempotent receiver pattern when your workflow may receive the same request more than once (e.g., webhook retries). The workflow checks an idempotency key before processing; if the key was already seen, it returns the cached result instead of re-executing. + +## Pattern + +The workflow first checks whether the idempotency key has been processed before. If a cached result exists, it short-circuits and returns it. Otherwise, it processes the payment and stores the result keyed by the idempotency key. In production, the workflow's own `runId` can serve as the idempotency key since each run is unique and deterministically replayed. + +### Simplified + +```typescript lineNumbers +declare function checkIdempotencyKey(key: string): Promise<{ transactionId: string; amount: number } | null>; // @setup +declare function processPayment(key: string, amount: number, currency: string): Promise<{ transactionId: string; amount: number }>; // @setup + +export async function idempotentReceiver( + idempotencyKey: string, + amount: number, + currency: string +) { + "use workflow"; + + // Check if this key was already processed + const cached = await checkIdempotencyKey(idempotencyKey); + + if (cached) { + return { idempotencyKey, deduplicated: true, result: cached }; + } + + // First time — process the payment + const result = await processPayment(idempotencyKey, amount, currency); + + return { idempotencyKey, deduplicated: false, result }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need this unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type IdempotentEvent = + | { type: "checking_key"; idempotencyKey: string } + | { type: "duplicate_detected"; idempotencyKey: string; cachedResult: PaymentResult } + | { type: "processing_payment"; idempotencyKey: string; amount: number } + | { type: "payment_processed"; idempotencyKey: string; result: PaymentResult } + | { type: "done"; status: "completed" | "deduplicated"; idempotencyKey: string }; + +export type PaymentResult = { + transactionId: string; + amount: number; + currency: string; + status: "succeeded"; + processedAt: string; +}; + +type PaymentInput = { + idempotencyKey: string; + amount: number; + currency: string; + description: string; +}; + +type IdempotentReport = { + idempotencyKey: string; + deduplicated: boolean; + result: PaymentResult; +}; + +// Demo: in-memory store simulates durable state for idempotency checks. +// In production, this would be the workflow's built-in durable storage +// (the workflow itself is keyed by idempotencyKey via the runId). +const processedKeys = new Map(); + +const CHECK_DELAY_MS = 400; +const PROCESS_DELAY_MS = 800; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function idempotentReceiver( + idempotencyKey: string, + amount: number, + currency: string, + description: string +): Promise { + "use workflow"; + + const cached = await checkIdempotencyKey(idempotencyKey); + + if (cached) { + await emitDuplicateDetected(idempotencyKey, cached); + return { idempotencyKey, deduplicated: true, result: cached }; + } + + const result = await processPayment(idempotencyKey, amount, currency, description); + + await emitCompletion(idempotencyKey); + + return { idempotencyKey, deduplicated: false, result }; +} + +async function checkIdempotencyKey( + idempotencyKey: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "checking_key", idempotencyKey }); + await delay(CHECK_DELAY_MS); // Demo: simulate lookup latency + + const cached = processedKeys.get(idempotencyKey) ?? null; + return cached; + } finally { + writer.releaseLock(); + } +} + +async function emitDuplicateDetected( + idempotencyKey: string, + cachedResult: PaymentResult +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "duplicate_detected", idempotencyKey, cachedResult }); + await delay(200); // Demo: brief pause for UI visibility + await writer.write({ type: "done", status: "deduplicated", idempotencyKey }); + } finally { + writer.releaseLock(); + } +} + +async function processPayment( + idempotencyKey: string, + amount: number, + currency: string, + description: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "processing_payment", idempotencyKey, amount }); + await delay(PROCESS_DELAY_MS); // Demo: simulate payment processing latency + + const result: PaymentResult = { + transactionId: `txn_${idempotencyKey}_${Date.now()}`, + amount, + currency, + status: "succeeded", + processedAt: new Date().toISOString(), + }; + + // Store result for future deduplication + processedKeys.set(idempotencyKey, result); + + await writer.write({ type: "payment_processed", idempotencyKey, result }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function emitCompletion(idempotencyKey: string): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "done", status: "completed", idempotencyKey }); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams deduplication events to the client diff --git a/docs/content/docs/cookbook/payments/meta.json b/docs/content/docs/cookbook/payments/meta.json new file mode 100644 index 0000000000..68d84f12af --- /dev/null +++ b/docs/content/docs/cookbook/payments/meta.json @@ -0,0 +1,11 @@ +{ + "title": "Payments & Orders", + "pages": [ + "saga", + "choreography", + "process-manager", + "guaranteed-delivery", + "transactional-outbox", + "idempotent-receiver" + ] +} diff --git a/docs/content/docs/cookbook/payments/process-manager.mdx b/docs/content/docs/cookbook/payments/process-manager.mdx new file mode 100644 index 0000000000..3fe90e337f --- /dev/null +++ b/docs/content/docs/cookbook/payments/process-manager.mdx @@ -0,0 +1,624 @@ +--- +title: Process Manager +description: Track a multi-step business process and react to events until it completes. +type: guide +summary: Orchestrate payment, inventory, backorder, shipping, and delivery with branching logic. +--- + +Use the process manager pattern when a business process has multiple steps with branching logic based on intermediate results. The workflow maintains a state machine, transitioning between states as each step completes. + +## Pattern + +The workflow tracks `currentState` and advances it through a series of steps. Each step can branch the state machine (e.g., payment failure cancels the order, backordered items trigger a durable sleep and recheck). The durable runtime guarantees the process resumes from the correct state after any interruption. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function initializeOrder(orderId: string): Promise; // @setup +declare function validatePayment(orderId: string): Promise<"validated" | "failed">; // @setup +declare function checkInventory(items: string[]): Promise<"in_stock" | "backordered">; // @setup +declare function reserveInventory(items: string[]): Promise; // @setup +declare function shipOrder(orderId: string): Promise; // @setup +declare function confirmDelivery(orderId: string): Promise; // @setup +declare function cancelOrder(orderId: string, reason: string): Promise; // @setup + +export async function processManager(orderId: string, items: string[]) { + "use workflow"; + + let state = "received"; + + await initializeOrder(orderId); + state = "initialized"; + + const paymentResult = await validatePayment(orderId); + if (paymentResult === "failed") { + await cancelOrder(orderId, "payment_failed"); + return { orderId, finalState: "cancelled" }; + } + state = "payment_validated"; + + const inventoryResult = await checkInventory(items); + if (inventoryResult === "backordered") { + await sleep("5s"); // Wait for restock + await checkInventory(items); // Recheck + } + state = "inventory_checked"; + + await reserveInventory(items); + await shipOrder(orderId); + await confirmDelivery(orderId); + + return { orderId, finalState: "completed" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable, sleep } from "workflow"; + +export type OrderState = + | "received" + | "payment_validated" + | "payment_failed" + | "inventory_checked" + | "inventory_reserved" + | "backordered" + | "shipped" + | "delivery_confirmed" + | "completed" + | "cancelled"; + +export type ProcessManagerEvent = + | { type: "state_transition"; from: OrderState; to: OrderState; step: string } + | { type: "step_started"; step: string; message: string } + | { type: "step_completed"; step: string; message: string } + | { type: "step_retrying"; step: string; attempt: number } + | { type: "branch_taken"; step: string; branch: string; reason: string } + | { type: "sleeping"; step: string; duration: string; reason: string } + | { type: "done"; orderId: string; finalState: OrderState; summary: OrderSummary }; + +type OrderSummary = { + orderId: string; + finalState: OrderState; + stateTransitions: number; + paymentMethod: string; + itemCount: number; + trackingId: string | null; +}; + +type OrderPayload = { + orderId: string; + items: string[]; + paymentMethod: string; + simulatePaymentFail?: boolean; + simulateBackorder?: boolean; +}; + +// Demo: simulate real-world latency so the UI can show progress. +const STEP_DELAY_MS: Record = { + initializeOrder: 400, + validatePayment: 800, + checkInventory: 600, + reserveInventory: 500, + shipOrder: 900, + confirmDelivery: 700, + completeOrder: 400, +}; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function processManager( + orderId: string, + items: string[], + paymentMethod: string, + simulatePaymentFail = false, + simulateBackorder = false +): Promise { + "use workflow"; + + const order: OrderPayload = { + orderId, + items, + paymentMethod, + simulatePaymentFail, + simulateBackorder, + }; + + let currentState: OrderState = "received"; + let stateTransitions = 0; + + // Step 1: Initialize order + currentState = await initializeOrder(order, currentState); + stateTransitions++; + + // Step 2: Validate payment — branches on success/failure + const paymentResult = await validatePayment(order, currentState); + stateTransitions++; + + if (paymentResult === "payment_failed") { + // Branch: payment failed → cancel order + const summary = await cancelOrder(order, paymentResult, stateTransitions); + return summary; + } + currentState = paymentResult; + + // Step 3: Check inventory — branches on available/backorder + const inventoryResult = await checkInventory(order, currentState); + stateTransitions++; + + if (inventoryResult === "backordered") { + // Branch: backordered → sleep and recheck + await emitSleeping( + "checkInventory", + "5s", + "Waiting for backorder restock" + ); + await sleep("5s"); + + // After sleep, recheck — inventory now available + const recheckResult = await recheckInventory(order, "backordered"); + stateTransitions++; + currentState = recheckResult; + } else { + currentState = inventoryResult; + } + + // Step 4: Reserve inventory + currentState = await reserveInventory(order, currentState); + stateTransitions++; + + // Step 5: Ship order + currentState = await shipOrder(order, currentState); + stateTransitions++; + + // Step 6: Confirm delivery + currentState = await confirmDelivery(order, currentState); + stateTransitions++; + + // Step 7: Complete order + return completeOrder(order, currentState, stateTransitions); +} + +async function initializeOrder( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", step: "initializeOrder", attempt }); + } + + await writer.write({ + type: "step_started", + step: "initializeOrder", + message: `Initializing order ${order.orderId} with ${order.items.length} item(s)`, + }); + + await delay(STEP_DELAY_MS.initializeOrder); + + const nextState: OrderState = "received"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "initializeOrder", + }); + + await writer.write({ + type: "step_completed", + step: "initializeOrder", + message: `Order ${order.orderId} initialized`, + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function validatePayment( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", step: "validatePayment", attempt }); + } + + await writer.write({ + type: "step_started", + step: "validatePayment", + message: `Validating ${order.paymentMethod} payment for order ${order.orderId}`, + }); + + await delay(STEP_DELAY_MS.validatePayment); + + if (order.simulatePaymentFail) { + const nextState: OrderState = "payment_failed"; + await writer.write({ + type: "branch_taken", + step: "validatePayment", + branch: "payment_failed", + reason: `Payment declined for ${order.paymentMethod}`, + }); + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "validatePayment", + }); + return nextState; + } + + const nextState: OrderState = "payment_validated"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "validatePayment", + }); + await writer.write({ + type: "step_completed", + step: "validatePayment", + message: `Payment validated via ${order.paymentMethod}`, + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function checkInventory( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", step: "checkInventory", attempt }); + } + + await writer.write({ + type: "step_started", + step: "checkInventory", + message: `Checking inventory for ${order.items.length} item(s)`, + }); + + await delay(STEP_DELAY_MS.checkInventory); + + if (order.simulateBackorder) { + const nextState: OrderState = "backordered"; + await writer.write({ + type: "branch_taken", + step: "checkInventory", + branch: "backordered", + reason: "Items temporarily out of stock, initiating backorder wait", + }); + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "checkInventory", + }); + return nextState; + } + + const nextState: OrderState = "inventory_checked"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "checkInventory", + }); + await writer.write({ + type: "step_completed", + step: "checkInventory", + message: "All items in stock", + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function recheckInventory( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "recheckInventory", + message: "Rechecking inventory after backorder wait", + }); + + await delay(STEP_DELAY_MS.checkInventory); + + const nextState: OrderState = "inventory_checked"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "recheckInventory", + }); + await writer.write({ + type: "step_completed", + step: "recheckInventory", + message: "Backorder resolved — items now available", + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function reserveInventory( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", step: "reserveInventory", attempt }); + } + + await writer.write({ + type: "step_started", + step: "reserveInventory", + message: `Reserving ${order.items.length} item(s) in warehouse`, + }); + + await delay(STEP_DELAY_MS.reserveInventory); + + const nextState: OrderState = "inventory_reserved"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "reserveInventory", + }); + await writer.write({ + type: "step_completed", + step: "reserveInventory", + message: "Inventory reserved successfully", + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function shipOrder( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", step: "shipOrder", attempt }); + } + + await writer.write({ + type: "step_started", + step: "shipOrder", + message: `Shipping order ${order.orderId}`, + }); + + await delay(STEP_DELAY_MS.shipOrder); + + const nextState: OrderState = "shipped"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "shipOrder", + }); + await writer.write({ + type: "step_completed", + step: "shipOrder", + message: `Order shipped — tracking: TRK-${order.orderId}-${Date.now().toString(36)}`, + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function confirmDelivery( + order: OrderPayload, + currentState: OrderState +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + if (attempt > 1) { + await writer.write({ type: "step_retrying", step: "confirmDelivery", attempt }); + } + + await writer.write({ + type: "step_started", + step: "confirmDelivery", + message: "Awaiting delivery confirmation", + }); + + await delay(STEP_DELAY_MS.confirmDelivery); + + const nextState: OrderState = "delivery_confirmed"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "confirmDelivery", + }); + await writer.write({ + type: "step_completed", + step: "confirmDelivery", + message: "Delivery confirmed by recipient", + }); + + return nextState; + } finally { + writer.releaseLock(); + } +} + +async function completeOrder( + order: OrderPayload, + currentState: OrderState, + stateTransitions: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "completeOrder", + message: `Finalizing order ${order.orderId}`, + }); + + await delay(STEP_DELAY_MS.completeOrder); + + const nextState: OrderState = "completed"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "completeOrder", + }); + + const summary: OrderSummary = { + orderId: order.orderId, + finalState: nextState, + stateTransitions: stateTransitions + 1, + paymentMethod: order.paymentMethod, + itemCount: order.items.length, + trackingId: `TRK-${order.orderId}-${Date.now().toString(36)}`, + }; + + await writer.write({ + type: "done", + orderId: order.orderId, + finalState: nextState, + summary, + }); + + return summary; + } finally { + writer.releaseLock(); + } +} + +async function cancelOrder( + order: OrderPayload, + currentState: OrderState, + stateTransitions: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "step_started", + step: "cancelOrder", + message: `Cancelling order ${order.orderId} due to payment failure`, + }); + + await delay(STEP_DELAY_MS.completeOrder); + + const nextState: OrderState = "cancelled"; + await writer.write({ + type: "state_transition", + from: currentState, + to: nextState, + step: "cancelOrder", + }); + + const summary: OrderSummary = { + orderId: order.orderId, + finalState: nextState, + stateTransitions: stateTransitions + 1, + paymentMethod: order.paymentMethod, + itemCount: order.items.length, + trackingId: null, + }; + + await writer.write({ + type: "done", + orderId: order.orderId, + finalState: nextState, + summary, + }); + + return summary; + } finally { + writer.releaseLock(); + } +} + +async function emitSleeping( + step: string, + duration: string, + reason: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "sleeping", step, duration, reason }); + } finally { + writer.releaseLock(); + } +} + +emitSleeping.maxRetries = 0; +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer for backorder wait +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams state transitions to the client diff --git a/docs/content/docs/cookbook/payments/saga.mdx b/docs/content/docs/cookbook/payments/saga.mdx new file mode 100644 index 0000000000..02bfc65364 --- /dev/null +++ b/docs/content/docs/cookbook/payments/saga.mdx @@ -0,0 +1,373 @@ +--- +title: Saga +description: Long-lived transaction across services using forward steps and compensations. +type: guide +summary: Upgrade a subscription (reserve seats, capture invoice, provision) with auto-rollback on failure. +--- + +Use the saga pattern when a business transaction spans multiple services and you need automatic rollback if any step fails. Each forward step registers a compensation, and on failure the workflow unwinds them in reverse order. + +## Pattern + +Each step in the saga returns a result and pushes a compensation handler onto a stack. If a later step throws a `FatalError`, the workflow catches it and executes compensations in LIFO order to restore consistency. + +### Simplified + +```typescript lineNumbers +import { FatalError } from "workflow"; + +declare function reserveSeats(accountId: string, seats: number): Promise; // @setup +declare function releaseSeats(accountId: string, reservationId: string): Promise; // @setup +declare function captureInvoice(accountId: string, seats: number): Promise; // @setup +declare function refundInvoice(accountId: string, invoiceId: string): Promise; // @setup +declare function provisionSeats(accountId: string, seats: number): Promise; // @setup +declare function deprovisionSeats(accountId: string, entitlementId: string): Promise; // @setup +declare function sendConfirmation(accountId: string, invoiceId: string, entitlementId: string): Promise; // @setup + +export async function subscriptionUpgradeSaga(accountId: string, seats: number) { + "use workflow"; + + const compensations: Array<() => Promise> = []; + + try { + const reservationId = await reserveSeats(accountId, seats); + compensations.push(() => releaseSeats(accountId, reservationId)); + + const invoiceId = await captureInvoice(accountId, seats); + compensations.push(() => refundInvoice(accountId, invoiceId)); + + const entitlementId = await provisionSeats(accountId, seats); + compensations.push(() => deprovisionSeats(accountId, entitlementId)); + + await sendConfirmation(accountId, invoiceId, entitlementId); + return { status: "completed" }; + } catch (error) { + if (!(error instanceof FatalError)) throw error; + + // Unwind compensations in reverse order + while (compensations.length > 0) { + await compensations.pop()!(); + } + + return { status: "rolled_back" }; + } +} +``` + +### Full Implementation + +```typescript lineNumbers +import { FatalError, getWritable } from "workflow"; + +type FailAtStep = 1 | 2 | 3 | null; +type CompensationAction = + | "releaseSeats" + | "refundInvoice" + | "deprovisionSeats"; +type CompensationHandler = { + action: CompensationAction; + undo: () => Promise; +}; + +export type SagaEvent = + | { type: "step_running"; step: string; label: string } + | { type: "step_succeeded"; step: string; label: string } + | { type: "step_failed"; step: string; label: string; error: string } + | { type: "step_skipped"; step: string; label: string } + | { type: "compensation_pushed"; action: CompensationAction; forStep: string } + | { type: "rolling_back"; failedStep: number } + | { type: "compensating"; action: CompensationAction } + | { type: "compensated"; action: CompensationAction } + | { type: "done"; status: "completed" | "rolled_back" }; + +export interface SubscriptionUpgradeResult { + accountId: string; + seats: number; + status: "completed" | "rolled_back"; + failedStep: FailAtStep; + compensationOrder: CompensationAction[]; +} + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +const STEP_DELAY_MS: Record = { + reserveSeats: 600, + captureInvoice: 700, + provisionSeats: 800, + sendConfirmation: 500, + releaseSeats: 500, + refundInvoice: 600, + deprovisionSeats: 500, +}; + +export async function subscriptionUpgradeSaga( + accountId: string, + seats: number, + failAtStep: FailAtStep = null +): Promise { + "use workflow"; + + const compensations: CompensationHandler[] = []; + + let reservationId: string | null = null; + let invoiceId: string | null = null; + let entitlementId: string | null = null; + + try { + reservationId = await reserveSeats(accountId, seats, failAtStep === 1); + { + const reservationToRelease = reservationId; + compensations.push({ + action: "releaseSeats", + undo: () => releaseSeats(accountId, reservationToRelease), + }); + } + + invoiceId = await captureInvoice(accountId, seats, failAtStep === 2); + { + const invoiceToRefund = invoiceId; + compensations.push({ + action: "refundInvoice", + undo: () => refundInvoice(accountId, invoiceToRefund), + }); + } + + entitlementId = await provisionSeats(accountId, seats, failAtStep === 3); + { + const entitlementToDeprovision = entitlementId; + compensations.push({ + action: "deprovisionSeats", + undo: () => deprovisionSeats(accountId, entitlementToDeprovision), + }); + } + + await sendConfirmation(accountId, seats, invoiceId, entitlementId); + await emitDone("completed"); + + return { + accountId, + seats, + status: "completed", + failedStep: null, + compensationOrder: compensations.map((compensation) => compensation.action), + }; + } catch (error) { + if (!(error instanceof FatalError)) { + throw error; + } + + const executedCompensations: CompensationAction[] = []; + + while (compensations.length > 0) { + const compensation = compensations.pop()!; + executedCompensations.push(compensation.action); + await compensation.undo(); + } + + await emitDone("rolled_back"); + + return { + accountId, + seats, + status: "rolled_back", + failedStep: failAtStep, + compensationOrder: executedCompensations, + }; + } +} + +async function reserveSeats( + accountId: string, + seats: number, + shouldFail: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", step: "reserveSeats", label: "Reserve seats" }); + await delay(STEP_DELAY_MS.reserveSeats); + + if (shouldFail) { + await writer.write({ type: "step_failed", step: "reserveSeats", label: "Reserve seats", error: `reserveSeats failed for account ${accountId}` }); + await writer.write({ type: "step_skipped", step: "captureInvoice", label: "Capture invoice" }); + await writer.write({ type: "step_skipped", step: "provisionSeats", label: "Provision seats" }); + await writer.write({ type: "step_skipped", step: "sendConfirmation", label: "Send confirmation" }); + throw new FatalError( + `reserveSeats failed for account ${accountId} with ${seats} seats` + ); + } + + await writer.write({ type: "step_succeeded", step: "reserveSeats", label: "Reserve seats" }); + await writer.write({ type: "compensation_pushed", action: "releaseSeats", forStep: "reserveSeats" }); + return `seat_reservation:${accountId}:${seats}`; + } finally { + writer.releaseLock(); + } +} + +async function captureInvoice( + accountId: string, + seats: number, + shouldFail: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", step: "captureInvoice", label: "Capture invoice" }); + await delay(STEP_DELAY_MS.captureInvoice); + + if (shouldFail) { + await writer.write({ type: "step_failed", step: "captureInvoice", label: "Capture invoice", error: `captureInvoice failed for account ${accountId}` }); + await writer.write({ type: "step_skipped", step: "provisionSeats", label: "Provision seats" }); + await writer.write({ type: "step_skipped", step: "sendConfirmation", label: "Send confirmation" }); + throw new FatalError( + `captureInvoice failed for account ${accountId} with ${seats} seats` + ); + } + + await writer.write({ type: "step_succeeded", step: "captureInvoice", label: "Capture invoice" }); + await writer.write({ type: "compensation_pushed", action: "refundInvoice", forStep: "captureInvoice" }); + return `invoice:${accountId}:${seats}`; + } finally { + writer.releaseLock(); + } +} + +async function provisionSeats( + accountId: string, + seats: number, + shouldFail: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", step: "provisionSeats", label: "Provision seats" }); + await delay(STEP_DELAY_MS.provisionSeats); + + if (shouldFail) { + await writer.write({ type: "step_failed", step: "provisionSeats", label: "Provision seats", error: `provisionSeats failed for account ${accountId}` }); + await writer.write({ type: "step_skipped", step: "sendConfirmation", label: "Send confirmation" }); + throw new FatalError( + `provisionSeats failed for account ${accountId} with ${seats} seats` + ); + } + + await writer.write({ type: "step_succeeded", step: "provisionSeats", label: "Provision seats" }); + await writer.write({ type: "compensation_pushed", action: "deprovisionSeats", forStep: "provisionSeats" }); + return `entitlement:${accountId}:${seats}`; + } finally { + writer.releaseLock(); + } +} + +async function sendConfirmation( + accountId: string, + seats: number, + invoiceId: string, + entitlementId: string +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", step: "sendConfirmation", label: "Send confirmation" }); + await delay(STEP_DELAY_MS.sendConfirmation); + await writer.write({ type: "step_succeeded", step: "sendConfirmation", label: "Send confirmation" }); + } finally { + writer.releaseLock(); + } + + console.info("[subscription-upgrade-saga] confirmation_sent", { + accountId, + seats, + invoiceId, + entitlementId, + }); +} + +async function releaseSeats( + accountId: string, + reservationId: string | null +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "compensating", action: "releaseSeats" }); + await delay(STEP_DELAY_MS.releaseSeats); + await writer.write({ type: "compensated", action: "releaseSeats" }); + } finally { + writer.releaseLock(); + } + + console.info("[subscription-upgrade-saga] release_seats", { + accountId, + reservationId, + }); +} + +async function refundInvoice( + accountId: string, + invoiceId: string | null +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "compensating", action: "refundInvoice" }); + await delay(STEP_DELAY_MS.refundInvoice); + await writer.write({ type: "compensated", action: "refundInvoice" }); + } finally { + writer.releaseLock(); + } + + console.info("[subscription-upgrade-saga] refund_invoice", { + accountId, + invoiceId, + }); +} + +async function deprovisionSeats( + accountId: string, + entitlementId: string | null +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "compensating", action: "deprovisionSeats" }); + await delay(STEP_DELAY_MS.deprovisionSeats); + await writer.write({ type: "compensated", action: "deprovisionSeats" }); + } finally { + writer.releaseLock(); + } + + console.info("[subscription-upgrade-saga] deprovision_seats", { + accountId, + entitlementId, + }); +} + +async function emitDone(status: "completed" | "rolled_back"): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", status }); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — non-retryable error that triggers compensation +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/payments/transactional-outbox.mdx b/docs/content/docs/cookbook/payments/transactional-outbox.mdx new file mode 100644 index 0000000000..d1550bec8a --- /dev/null +++ b/docs/content/docs/cookbook/payments/transactional-outbox.mdx @@ -0,0 +1,163 @@ +--- +title: Transactional Outbox +description: Write business data and an outbox event in one transaction, then publish reliably. +type: guide +summary: Persist an order and relay it to a message broker in one transaction for at-least-once delivery. +--- + +Use the transactional outbox pattern when you need to write business data and publish an event atomically. The workflow persists the order and an outbox record together, then a relay step polls the outbox and publishes to the broker. + +## Pattern + +The workflow splits the operation into four durable steps: persist the order with an outbox entry, poll and relay the outbox entry, publish to the broker, and mark the outbox entry as sent. Because each step is persisted in the event log, the relay will resume after any crash, guaranteeing at-least-once delivery. + +### Simplified + +```typescript lineNumbers +declare function persistOrder(orderId: string, payload: string): Promise<{ outboxId: string }>; // @setup +declare function pollRelay(outboxId: string): Promise<{ brokerId: string }>; // @setup +declare function publishEvent(outboxId: string, brokerId: string): Promise; // @setup +declare function markSent(orderId: string, outboxId: string, brokerId: string): Promise<{ status: string }>; // @setup + +export async function transactionalOutbox(orderId: string, payload: string) { + "use workflow"; + + // Step 1: Persist order + outbox entry in one transaction + const { outboxId } = await persistOrder(orderId, payload); + + // Step 2: Relay polls outbox for unsent entries + const { brokerId } = await pollRelay(outboxId); + + // Step 3: Publish to message broker + await publishEvent(outboxId, brokerId); + + // Step 4: Mark outbox entry as sent + return markSent(orderId, outboxId, brokerId); +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type OutboxEvent = + | { type: "persisting"; orderId: string } + | { type: "persisted"; orderId: string; outboxId: string } + | { type: "relaying"; outboxId: string } + | { type: "published"; outboxId: string; brokerId: string } + | { type: "marking_sent"; outboxId: string } + | { type: "confirmed"; outboxId: string } + | { type: "done"; orderId: string; outboxId: string; brokerId: string }; + +type OutboxResult = { + orderId: string; + outboxId: string; + brokerId: string; + status: "confirmed"; +}; + +// Demo: simulate real-world processing latency so the UI can show progress. +const PERSIST_DELAY_MS = 600; +const RELAY_DELAY_MS = 800; +const PUBLISH_DELAY_MS = 700; +const MARK_SENT_DELAY_MS = 400; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function transactionalOutbox( + orderId: string, + payload: string +): Promise { + "use workflow"; + + const { outboxId } = await persistOrder(orderId, payload); + const { brokerId } = await pollRelay(outboxId); + await publishEvent(outboxId, brokerId); + return markSent(orderId, outboxId, brokerId); +} + +async function persistOrder( + orderId: string, + payload: string +): Promise<{ outboxId: string }> { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "persisting", orderId }); + await delay(PERSIST_DELAY_MS); + + const outboxId = `obx_${orderId}_${payload.length}`; + await writer.write({ type: "persisted", orderId, outboxId }); + + return { outboxId }; + } finally { + writer.releaseLock(); + } +} + +async function pollRelay( + outboxId: string +): Promise<{ brokerId: string }> { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "relaying", outboxId }); + await delay(RELAY_DELAY_MS); + + const brokerId = `brk_${outboxId}_${Date.now()}`; + await writer.write({ type: "published", outboxId, brokerId }); + + return { brokerId }; + } finally { + writer.releaseLock(); + } +} + +async function publishEvent( + outboxId: string, + brokerId: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "marking_sent", outboxId }); + await delay(PUBLISH_DELAY_MS); + + await writer.write({ type: "confirmed", outboxId }); + } finally { + writer.releaseLock(); + } +} + +async function markSent( + orderId: string, + outboxId: string, + brokerId: string +): Promise { + "use step"; + const writer = getWritable().getWriter(); + + try { + await delay(MARK_SENT_DELAY_MS); + await writer.write({ type: "done", orderId, outboxId, brokerId }); + + return { orderId, outboxId, brokerId, status: "confirmed" }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with automatic retry +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams outbox progress to the client diff --git a/docs/content/docs/cookbook/resilience/bulkhead.mdx b/docs/content/docs/cookbook/resilience/bulkhead.mdx new file mode 100644 index 0000000000..74bf77b366 --- /dev/null +++ b/docs/content/docs/cookbook/resilience/bulkhead.mdx @@ -0,0 +1,222 @@ +--- +title: Bulkhead +description: Isolate capacity or failure domains so one overloaded path doesn't sink the whole system. +type: guide +summary: Partition order items into isolated groups so one bad SKU doesn't block the rest of the shipment. +--- + +Partition order items into isolated groups so one bad SKU doesn't block the rest of the shipment. + +## Pattern + +The workflow splits items into fixed-size compartments and processes each compartment with `Promise.allSettled()`. Failures in one compartment are isolated — they don't affect items in other compartments. A pacing `sleep()` between compartments prevents overload. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function processItem(item: string, compartment: number): Promise<{ item: string; ok: boolean }>; // @setup + +export async function bulkhead(items: string[], maxConcurrency: number) { + "use workflow"; + + const results = []; + let compartment = 0; + + for (let i = 0; i < items.length; i += maxConcurrency) { + compartment++; + const batch = items.slice(i, i + maxConcurrency); + + const outcomes = await Promise.allSettled( + batch.map((item) => processItem(item, compartment)) + ); + + for (let j = 0; j < outcomes.length; j++) { + const outcome = outcomes[j]; + results.push( + outcome.status === "fulfilled" + ? outcome.value + : { item: batch[j], ok: false } + ); + } + + if (i + maxConcurrency < items.length) { + await sleep("1s"); + } + } + + return results; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need this unless it has its own streaming UI. +import { getWritable, sleep } from "workflow"; + +export type BulkheadEvent = + | { type: "compartment_start"; compartment: number; items: string[] } + | { type: "item_processing"; compartment: number; item: string } + | { type: "item_success"; compartment: number; item: string; durationMs: number } + | { type: "item_failure"; compartment: number; item: string; error: string } + | { type: "pacing"; compartment: number } + | { type: "summarizing" } + | { + type: "done"; + summary: { + total: number; + succeeded: number; + failed: number; + compartments: number; + }; + }; + +type ItemResult = { + item: string; + compartment: number; + ok: boolean; + durationMs?: number; + error?: string; +}; + +type BulkheadResult = { + status: "done"; + total: number; + succeeded: number; + failed: number; + compartments: number; + results: ItemResult[]; +}; + +// Demo: staggered delays per item position for visual progression +const ITEM_DELAY_MS = [600, 750, 900]; + +// Demo: compartment 2, item index 1 fails to show isolation +const FAIL_COMPARTMENT = 2; +const FAIL_INDEX = 1; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function bulkhead( + jobId: string, + items: string[], + maxConcurrency: number +): Promise { + "use workflow"; + + const results: ItemResult[] = []; + let compartmentIndex = 0; + + for (let i = 0; i < items.length; i += maxConcurrency) { + compartmentIndex++; + const batch = items.slice(i, i + maxConcurrency); + + // Run compartment in parallel — failures are isolated + const outcomes = await Promise.allSettled( + batch.map((item, idx) => + processItem(jobId, item, compartmentIndex, idx) + ) + ); + + for (let j = 0; j < outcomes.length; j++) { + const outcome = outcomes[j]; + if (outcome.status === "fulfilled") { + results.push(outcome.value); + } else { + results.push({ + item: batch[j], + compartment: compartmentIndex, + ok: false, + error: String(outcome.reason), + }); + } + } + + // Pacing delay between compartments + if (i + maxConcurrency < items.length) { + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "pacing", compartment: compartmentIndex }); + } finally { + writer.releaseLock(); + } + await sleep("1s"); + } + } + + return summarizeResults(results, compartmentIndex); +} + +async function processItem( + jobId: string, + item: string, + compartment: number, + indexInBatch: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "item_processing", compartment, item }); + + const delayMs = ITEM_DELAY_MS[indexInBatch % ITEM_DELAY_MS.length]; + await delay(delayMs); + + // Demo: deterministic failure in compartment 2, index 1 + if (compartment === FAIL_COMPARTMENT && indexInBatch === FAIL_INDEX) { + const error = `Service unavailable for ${item}`; + await writer.write({ type: "item_failure", compartment, item, error }); + throw new Error(error); + } + + await writer.write({ + type: "item_success", + compartment, + item, + durationMs: delayMs, + }); + + return { item, compartment, ok: true, durationMs: delayMs }; + } finally { + writer.releaseLock(); + } +} + +async function summarizeResults( + results: ItemResult[], + compartments: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ type: "summarizing" }); + await delay(500); + + const succeeded = results.filter((r) => r.ok).length; + const failed = results.length - succeeded; + const summary = { total: results.length, succeeded, failed, compartments }; + + await writer.write({ type: "done", summary }); + + return { status: "done", ...summary, results }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — pacing delay between compartments +- [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — runs items in parallel, isolating failures +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/circuit-breaker.mdx b/docs/content/docs/cookbook/resilience/circuit-breaker.mdx new file mode 100644 index 0000000000..d3b6703483 --- /dev/null +++ b/docs/content/docs/cookbook/resilience/circuit-breaker.mdx @@ -0,0 +1,226 @@ +--- +title: Circuit Breaker +description: Stop calling a failing dependency for a cooldown, then probe for recovery. +type: guide +summary: Stop hammering a down payment gateway after 3 failures, wait 30s, then test with one probe request. +--- + +Stop hammering a down payment gateway after 3 failures, wait 30s, then test with one probe request. + +## Pattern + +The workflow tracks circuit state (`closed`, `open`, `half-open`) and a consecutive failure count. After the failure threshold is reached, the circuit opens and `sleep()` enforces a durable cooldown. The next request after cooldown is a probe — if it succeeds, the circuit closes again. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function callPaymentService(requestNum: number): Promise; // @setup + +export async function circuitBreaker(maxRequests: number = 10) { + "use workflow"; + + let state: "closed" | "open" | "half-open" = "closed"; + let consecutiveFailures = 0; + const FAILURE_THRESHOLD = 3; + const COOLDOWN = "30s"; + + for (let i = 1; i <= maxRequests; i++) { + if (state === "open") { + await sleep(COOLDOWN); + state = "half-open"; + } + + const success = await callPaymentService(i); + + if (success) { + consecutiveFailures = 0; + if (state === "half-open") state = "closed"; + } else { + consecutiveFailures++; + if (consecutiveFailures >= FAILURE_THRESHOLD) { + state = "open"; + consecutiveFailures = 0; + } + } + } + + return { status: state === "closed" ? "recovered" : "failed" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type CircuitState = "closed" | "open" | "half-open"; + +export type CircuitEvent = + | { type: "request_attempt"; requestNum: number; circuitState: CircuitState } + | { type: "request_success"; requestNum: number; circuitState: CircuitState } + | { type: "request_fail"; requestNum: number; circuitState: CircuitState } + | { type: "circuit_open"; requestNum: number } + | { type: "cooldown_start"; requestNum: number; cooldownMs: number } + | { type: "cooldown_end"; requestNum: number } + | { type: "circuit_half_open"; requestNum: number } + | { type: "circuit_closed"; requestNum: number } + | { + type: "done"; + status: "recovered" | "failed"; + totalRequests: number; + totalFailures: number; + circuitOpened: number; + }; + +export interface CircuitBreakerResult { + serviceId: string; + status: "recovered" | "failed"; + totalRequests: number; + totalFailures: number; + circuitOpened: number; +} + +// Demo timing: simulate realistic request latency for the UI +const REQUEST_DELAY_MS = 500; +const COOLDOWN_MS = 3000; +const FAILURE_THRESHOLD = 3; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function circuitBreakerFlow( + serviceId: string, + maxRequests: number = 10, + failStart: number = 4, + failEnd: number = 6 +): Promise { + "use workflow"; + + let state: CircuitState = "closed"; + let consecutiveFailures = 0; + let totalRequests = 0; + let totalFailures = 0; + let circuitOpened = 0; + + for (let i = 1; i <= maxRequests; i++) { + if (state === "open") { + await emitEvent({ + type: "cooldown_start", + requestNum: i, + cooldownMs: COOLDOWN_MS, + }); + await sleep(`${COOLDOWN_MS}ms`); + state = "half-open"; + await emitEvent({ type: "cooldown_end", requestNum: i }); + await emitEvent({ type: "circuit_half_open", requestNum: i }); + } + + const success = await callPaymentService( + serviceId, + i, + state, + failStart, + failEnd + ); + totalRequests++; + + if (success) { + consecutiveFailures = 0; + if (state === "half-open") { + state = "closed"; + await emitEvent({ type: "circuit_closed", requestNum: i }); + } + } else { + totalFailures++; + consecutiveFailures++; + if (consecutiveFailures >= FAILURE_THRESHOLD) { + state = "open"; + circuitOpened++; + consecutiveFailures = 0; + await emitEvent({ type: "circuit_open", requestNum: i }); + } + } + } + + const result: CircuitBreakerResult = { + serviceId, + status: state === "closed" ? "recovered" : "failed", + totalRequests, + totalFailures, + circuitOpened, + }; + + await emitEvent({ + type: "done", + status: result.status, + totalRequests: result.totalRequests, + totalFailures: result.totalFailures, + circuitOpened: result.circuitOpened, + }); + + return result; +} + +async function emitEvent(event: CircuitEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +async function callPaymentService( + serviceId: string, + requestNum: number, + circuitState: CircuitState, + failStart: number, + failEnd: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await writer.write({ + type: "request_attempt", + requestNum, + circuitState, + }); + + await delay(REQUEST_DELAY_MS); + + const shouldFail = requestNum >= failStart && requestNum <= failEnd; + + if (shouldFail) { + await writer.write({ + type: "request_fail", + requestNum, + circuitState, + }); + return false; + } + + await writer.write({ + type: "request_success", + requestNum, + circuitState, + }); + + return true; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable cooldown pause +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx b/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx new file mode 100644 index 0000000000..22c3525cbd --- /dev/null +++ b/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx @@ -0,0 +1,197 @@ +--- +title: Dead Letter Queue +description: After repeated failure, move a message aside for inspection instead of infinite retry. +type: guide +summary: Route undeliverable messages to a dead-letter queue after 3 retries for ops review. +--- + +Route undeliverable messages to a dead-letter queue after 3 retries for ops review. + +## Pattern + +Each message is processed in a step that uses `getStepMetadata()` to track the attempt count. If the step fails and the attempt count reaches the maximum, the message is marked as dead-lettered instead of throwing again. This prevents infinite retry loops while preserving the message for inspection. + +### Simplified + +```typescript lineNumbers +import { getStepMetadata } from "workflow"; + +const MAX_ATTEMPTS = 3; + +export async function deadLetterQueue( + messages: string[], + poisonMessages: string[] = [] +) { + "use workflow"; + + const results = []; + + for (const messageId of messages) { + const isPoison = poisonMessages.includes(messageId); + const result = await processMessage(messageId, isPoison); + results.push(result); + } + + return results; +} + +async function processMessage(messageId: string, isPoison: boolean) { + "use step"; + + const { attempt } = getStepMetadata(); + + if (isPoison) { + if (attempt >= MAX_ATTEMPTS) { + return { messageId, status: "dead_lettered", attempts: attempt }; + } + throw new Error(`Cannot parse message ${messageId}`); + } + + return { messageId, status: "delivered", attempts: attempt }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable + getStepMetadata are used here to stream demo UI events. +// A production workflow wouldn't need these unless it has its own streaming UI. +import { getStepMetadata, getWritable } from "workflow"; + +export type MessageId = string; + +export type DLQEvent = + | { type: "processing"; messageId: string } + | { type: "attempt"; messageId: string; attempt: number } + | { type: "success"; messageId: string; attempt: number } + | { type: "retry"; messageId: string; attempt: number; error: string } + | { type: "dlq"; messageId: string; error: string; attempts: number } + | { type: "done"; summary: { delivered: number; deadLettered: number } }; + +type MessageResult = { + messageId: string; + status: "delivered" | "dead_lettered"; + attempts: number; + error?: string; +}; + +type BatchReport = { + status: "done"; + results: MessageResult[]; + summary: { + delivered: number; + deadLettered: number; + }; +}; + +// Demo: per-message processing latency so the UI can show progress +const PROCESS_DELAY_MS = 600; +const DLQ_DELAY_MS = 500; +const MAX_ATTEMPTS = 3; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function deadLetterQueue( + messages: string[], + poisonMessages: string[] = [] +): Promise { + "use workflow"; + + const results: MessageResult[] = []; + + for (const messageId of messages) { + const isPoison = poisonMessages.includes(messageId); + const result = await processMessage(messageId, isPoison); + results.push(result); + } + + return recordResults(results); +} + +async function processMessage( + messageId: string, + isPoison: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const { attempt } = getStepMetadata(); + + try { + await writer.write({ type: "processing", messageId }); + await writer.write({ type: "attempt", messageId, attempt }); + await delay(PROCESS_DELAY_MS); + + if (isPoison) { + throw new Error(`Malformed payload: cannot parse message ${messageId}`); + } + + await writer.write({ type: "success", messageId, attempt }); + return { messageId, status: "delivered", attempts: attempt }; + } catch (error: unknown) { + const message = + error instanceof Error ? error.message : "Unknown processing error"; + + if (attempt >= MAX_ATTEMPTS) { + await writer.write({ + type: "dlq", + messageId, + error: message, + attempts: attempt, + }); + return { + messageId, + status: "dead_lettered", + attempts: attempt, + error: message, + }; + } + + await writer.write({ + type: "retry", + messageId, + attempt, + error: message, + }); + + throw error instanceof Error ? error : new Error(message); + } finally { + writer.releaseLock(); + } +} + +async function recordResults( + results: MessageResult[] +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await delay(DLQ_DELAY_MS); + + const delivered = results.filter((r) => r.status === "delivered").length; + const deadLettered = results.length - delivered; + + const report: BatchReport = { + status: "done", + results, + summary: { delivered, deadLettered }, + }; + + await writer.write({ type: "done", summary: report.summary }); + return report; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number to decide when to dead-letter +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/hedge-request.mdx b/docs/content/docs/cookbook/resilience/hedge-request.mdx new file mode 100644 index 0000000000..576fc7c32f --- /dev/null +++ b/docs/content/docs/cookbook/resilience/hedge-request.mdx @@ -0,0 +1,170 @@ +--- +title: Hedge Request +description: Send duplicate requests; take the first successful response to cut tail latency. +type: guide +summary: Fire the same search query to two replicas and use whichever responds first. +--- + +Fire the same search query to two replicas and use whichever responds first. + +## Pattern + +The workflow launches the same step against multiple providers using `Promise.race()`. The first provider to respond wins, and the result is returned immediately. Because steps run in parallel, this cuts tail latency at the cost of redundant work. + +### Simplified + +```typescript lineNumbers +export async function hedgeRequest( + query: string, + providers: { name: string }[] +) { + "use workflow"; + + const result = await Promise.race( + providers.map((provider) => callProvider(provider.name, query)) + ); + + return { winner: result.provider, result: result.data }; +} + +async function callProvider(provider: string, query: string) { + "use step"; + const data = await fetch(`https://${provider}.example.com/search?q=${query}`); + return { provider, data: await data.json() }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable } from "workflow"; + +export type HedgeEvent = + | { type: "config"; providers: string[]; query: string } + | { type: "provider_started"; provider: string } + | { type: "provider_responded"; provider: string; latencyMs: number } + | { type: "provider_lost"; provider: string; latencyMs: number } + | { type: "winner"; provider: string; latencyMs: number; result: string } + | { type: "done"; winner: string; latencyMs: number; totalProviders: number }; + +export interface HedgeResult { + winner: string; + latencyMs: number; + totalProviders: number; +} + +export interface HedgeInput { + query: string; + providers: ProviderConfig[]; +} + +export type ProviderConfig = { + name: string; + simulatedLatencyMs: number; +}; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function hedgeRequestFlow( + input: HedgeInput +): Promise { + "use workflow"; + + const { query, providers } = input; + + await emitEvent({ + type: "config", + providers: providers.map((p) => p.name), + query, + }); + + // Launch all providers in parallel, race for fastest + const raceResult = await Promise.race( + providers.map((provider) => callProvider(provider, query)) + ); + + // Mark losers + for (const provider of providers) { + if (provider.name !== raceResult.provider) { + await emitEvent({ + type: "provider_lost", + provider: provider.name, + latencyMs: provider.simulatedLatencyMs, + }); + } + } + + await emitEvent({ + type: "done", + winner: raceResult.provider, + latencyMs: raceResult.latencyMs, + totalProviders: providers.length, + }); + + return { + winner: raceResult.provider, + latencyMs: raceResult.latencyMs, + totalProviders: providers.length, + }; +} + +export async function callProvider( + provider: ProviderConfig, + query: string +): Promise<{ provider: string; latencyMs: number; result: string }> { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ + type: "provider_started", + provider: provider.name, + }); + + // Simulate variable latency + await delay(provider.simulatedLatencyMs); + + const result = `${provider.name} processed "${query}"`; + + await writer.write({ + type: "provider_responded", + provider: provider.name, + latencyMs: provider.simulatedLatencyMs, + }); + + await writer.write({ + type: "winner", + provider: provider.name, + latencyMs: provider.simulatedLatencyMs, + result, + }); + + return { + provider: provider.name, + latencyMs: provider.simulatedLatencyMs, + result, + }; + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: HedgeEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — returns the first provider to respond +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/meta.json b/docs/content/docs/cookbook/resilience/meta.json new file mode 100644 index 0000000000..ff66215ea5 --- /dev/null +++ b/docs/content/docs/cookbook/resilience/meta.json @@ -0,0 +1,12 @@ +{ + "title": "Resilience", + "pages": [ + "retry-backoff", + "retryable-rate-limit", + "throttle", + "circuit-breaker", + "bulkhead", + "hedge-request", + "dead-letter-queue" + ] +} diff --git a/docs/content/docs/cookbook/resilience/retry-backoff.mdx b/docs/content/docs/cookbook/resilience/retry-backoff.mdx new file mode 100644 index 0000000000..06de00087c --- /dev/null +++ b/docs/content/docs/cookbook/resilience/retry-backoff.mdx @@ -0,0 +1,180 @@ +--- +title: Retry with Backoff +description: Retry failed steps with increasing delay to avoid hammering flaky dependencies. +type: guide +summary: Retry a flaky email API with 1s, 2s, 4s backoff instead of failing on the first hiccup. +--- + +Retry a flaky email API with 1s, 2s, 4s backoff instead of failing on the first hiccup. + +## Pattern + +The workflow loops through attempts, calling a step that may fail. On failure, `sleep()` pauses with exponentially increasing delay before the next attempt. Because `sleep()` is durable, the backoff survives cold starts and replays. + +### Simplified + +```typescript lineNumbers +import { sleep, FatalError } from "workflow"; + +declare function syncContactToCrm(contactId: string, attempt: number): Promise; // @setup + +export async function retryBackoff( + contactId: string, + maxAttempts: number = 5, + baseDelayMs: number = 1_000 +) { + "use workflow"; + + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + await syncContactToCrm(contactId, attempt); + return { contactId, status: "completed", attempts: attempt }; + } catch { + if (attempt >= maxAttempts) { + return { contactId, status: "failed", attempts: attempt }; + } + const backoff = Math.min(8_000, baseDelayMs * 2 ** (attempt - 1)); + await sleep(`${backoff}ms`); + } + } +} +``` + +### Full Implementation + +```typescript lineNumbers +import { sleep, getWritable, FatalError } from "workflow"; + +export type RetryEvent = + | { type: "attempt_start"; attempt: number; contactId: string } + | { type: "attempt_fail"; attempt: number; error: string; sleepMs: number } + | { type: "attempt_success"; attempt: number; contactId: string } + | { type: "done"; status: "completed" | "failed"; attempts: number }; + +export interface ContactSyncResult { + contactId: string; + status: "completed" | "failed"; + attempts: number; + lastError?: string; +} + +const MAX_BACKOFF_MS = 8_000; +const STEP_DELAY_MS = 650; // Demo: visual pacing + +function backoffDelayMs(baseMs: number, attempt: number): number { + return Math.min(MAX_BACKOFF_MS, baseMs * 2 ** (attempt - 1)); +} + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function safeWrite( + writer: WritableStreamDefaultWriter, + event: RetryEvent +): Promise { + try { + await writer.write(event); + } catch { + // Best-effort streaming; step logic should continue on stream errors. + } +} + +export async function retryBackoffContactSync( + contactId: string, + maxAttempts: number = 5, + baseDelayMs: number = 1_000, + failuresBeforeSuccess: number = 2 +): Promise { + "use workflow"; + + for (let attempt = 1; attempt <= maxAttempts; attempt += 1) { + const nextSleepMs = + attempt < maxAttempts ? backoffDelayMs(baseDelayMs, attempt) : 0; + + try { + await syncContactToCrm( + contactId, + attempt, + failuresBeforeSuccess, + nextSleepMs + ); + await emitDone("completed", attempt); + return { contactId, status: "completed", attempts: attempt }; + } catch (error) { + const lastError = + error instanceof Error ? error.message : String(error); + + if (attempt >= maxAttempts) { + await emitDone("failed", attempt); + return { + contactId, + status: "failed", + attempts: attempt, + lastError, + }; + } + + await sleep(`${nextSleepMs}ms`); + } + } + + await emitDone("failed", maxAttempts); + return { contactId, status: "failed", attempts: maxAttempts }; +} + +async function emitDone( + status: "completed" | "failed", + attempts: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await safeWrite(writer, { type: "done", status, attempts }); + } finally { + writer.releaseLock(); + } +} + +async function syncContactToCrm( + contactId: string, + attempt: number, + failuresBeforeSuccess: number, + nextSleepMs: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await safeWrite(writer, { type: "attempt_start", attempt, contactId }); + await delay(STEP_DELAY_MS); // Demo: simulate network latency + + if (attempt <= failuresBeforeSuccess) { + const error = "CRM API returned HTTP 503 Service Unavailable"; + await safeWrite(writer, { + type: "attempt_fail", + attempt, + error, + sleepMs: nextSleepMs, + }); + throw new FatalError(error); + } + + await safeWrite(writer, { type: "attempt_success", attempt, contactId }); + } finally { + writer.releaseLock(); + } +} + +syncContactToCrm.maxRetries = 0; +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable pause that survives replay +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retry so the workflow controls retry logic +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx b/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx new file mode 100644 index 0000000000..4a3eb272b6 --- /dev/null +++ b/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx @@ -0,0 +1,157 @@ +--- +title: Retryable Rate Limit +description: On 429 / rate limits, back off and retry instead of failing immediately. +type: guide +summary: Sync contacts to an external CRM and auto-retry when the API returns 429 with retry-after. +--- + +Sync contacts to an external CRM and auto-retry when the API returns 429 with retry-after. + +## Pattern + +A step throws `RetryableError` with a `retryAfter` duration when it receives a 429 response. The Workflow DevKit runtime automatically reschedules the step after the specified delay, using the built-in retry mechanism instead of manual sleep loops. + +### Simplified + +```typescript lineNumbers +import { RetryableError } from "workflow"; + +declare function upsertIntoWarehouse(contactId: string, contact: unknown): Promise; // @setup + +export async function syncCrmContact(contactId: string) { + "use workflow"; + + const contact = await fetchContactFromCrm(contactId); + await upsertIntoWarehouse(contactId, contact); + + return { contactId, status: "synced" }; +} + +async function fetchContactFromCrm(contactId: string) { + "use step"; + + const res = await fetch(`https://crm.example.com/contacts/${contactId}`); + + if (res.status === 429) { + const retryAfterMs = parseInt(res.headers.get("retry-after") || "2000"); + throw new RetryableError("CRM rate-limited (429)", { + retryAfter: retryAfterMs, + }); + } + + return res.json(); +} +``` + +### Full Implementation + +```typescript lineNumbers +import { RetryableError, getStepMetadata, getWritable } from "workflow"; + +export type RateLimitEvent = + | { + type: "attempt_start"; + attempt: number; + contactId: string; + idempotencyKey: string; + } + | { type: "http_429"; attempt: number; retryAfterMs: number } + | { type: "retry_scheduled"; attempt: number; retryAfterMs: number } + | { type: "step_done"; step: "fetch" | "upsert"; attempt: number } + | { + type: "done"; + contactId: string; + status: "synced"; + totalAttempts: number; + }; + +export type SyncResult = { + contactId: string; + status: "synced" | "failed"; + attempts?: number; +}; + +export async function syncCrmContact( + contactId: string, + failuresBeforeSuccess: number = 2 +): Promise { + "use workflow"; + + const contact = await fetchContactFromCrm(contactId, failuresBeforeSuccess); + await upsertIntoWarehouse(contactId, contact); + + return { contactId, status: "synced" }; +} + +async function fetchContactFromCrm( + contactId: string, + failuresBeforeSuccess: number +) { + "use step"; + + const { stepId, attempt } = getStepMetadata(); + const writer = getWritable().getWriter(); + const idempotencyKey = `crm-sync:${contactId}:${stepId}`; + + try { + await writer.write({ + type: "attempt_start", + attempt, + contactId, + idempotencyKey, + }); + + // Simulate CRM API latency + await new Promise((r) => setTimeout(r, 650)); + + if (attempt <= failuresBeforeSuccess) { + const retryAfterMs = + attempt === 1 ? 2000 : attempt === 2 ? 1500 : 1000; + + await writer.write({ type: "http_429", attempt, retryAfterMs }); + await writer.write({ type: "retry_scheduled", attempt, retryAfterMs }); + + throw new RetryableError("CRM rate-limited (429)", { + retryAfter: retryAfterMs, + }); + } + + await writer.write({ type: "step_done", step: "fetch", attempt }); + return { id: contactId, name: "Jane Doe", email: "jane@example.com" }; + } finally { + writer.releaseLock(); + } +} + +async function upsertIntoWarehouse(contactId: string, contact: unknown) { + "use step"; + + const { attempt } = getStepMetadata(); + const writer = getWritable().getWriter(); + + try { + // Simulate warehouse write latency + await new Promise((r) => setTimeout(r, 600)); + + await writer.write({ type: "step_done", step: "upsert", attempt }); + await writer.write({ + type: "done", + contactId, + status: "synced", + totalAttempts: attempt, + }); + } finally { + writer.releaseLock(); + } + + void contact; +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`RetryableError`](/docs/api-reference/workflow/retryable-error) — signals the runtime to retry the step after a delay +- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number and step ID +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/throttle.mdx b/docs/content/docs/cookbook/resilience/throttle.mdx new file mode 100644 index 0000000000..e4c8dacb03 --- /dev/null +++ b/docs/content/docs/cookbook/resilience/throttle.mdx @@ -0,0 +1,195 @@ +--- +title: Throttle +description: Limit how often work runs or how many concurrent operations are allowed. +type: guide +summary: Cap outbound API calls to 10/second so you don't blow your third-party rate limit. +--- + +Cap outbound API calls to 10/second so you don't blow your third-party rate limit. + +## Pattern + +The workflow maintains a token bucket in its orchestrator state. Before processing each request, it checks for available tokens and accepts or rejects accordingly. Since workflow state is durably persisted, the throttle survives restarts. + +### Simplified + +```typescript lineNumbers +export async function throttleFlow( + requests: { id: string }[], + capacity: number, + refillRate: number +) { + "use workflow"; + + let tokens = capacity; + let accepted = 0; + let rejected = 0; + + for (let i = 0; i < requests.length; i++) { + if (tokens > 0) { + tokens--; + accepted++; + await processRequest(requests[i].id); + } else { + rejected++; + } + + // Refill a token every N requests + if ((i + 1) % refillRate === 0 && tokens < capacity) { + tokens++; + } + } + + return { accepted, rejected, total: requests.length }; +} + +async function processRequest(requestId: string) { + "use step"; + // Call your rate-limited API here +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable } from "workflow"; + +export type ThrottleEvent = + | { type: "config"; capacity: number; refillRate: number; requestCount: number } + | { type: "request_received"; requestId: string; position: number } + | { type: "token_check"; requestId: string; tokensAvailable: number } + | { type: "request_accepted"; requestId: string; tokensRemaining: number } + | { type: "request_rejected"; requestId: string; retryAfterMs: number } + | { type: "token_refilled"; tokensAvailable: number } + | { type: "done"; accepted: number; rejected: number; total: number }; + +export interface ThrottleResult { + accepted: number; + rejected: number; + total: number; +} + +export type RequestItem = { + id: string; + label: string; +}; + +export interface ThrottleInput { + capacity: number; + refillRate: number; + requests: RequestItem[]; +} + +// Demo timing +const PROCESS_DELAY_MS = 300; +const CHECK_DELAY_MS = 200; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function throttleFlow( + input: ThrottleInput +): Promise { + "use workflow"; + + const { capacity, refillRate, requests } = input; + let tokens = capacity; + let accepted = 0; + let rejected = 0; + + await emitEvent({ + type: "config", + capacity, + refillRate, + requestCount: requests.length, + }); + + for (let i = 0; i < requests.length; i++) { + const req = requests[i]; + const hasToken = tokens > 0; + + await evaluateRequest(req, i + 1, tokens, refillRate); + + if (hasToken) { + tokens--; + accepted++; + } else { + rejected++; + } + + // Refill: every refillRate requests, add 1 token back (simulates time passing) + if ((i + 1) % refillRate === 0 && tokens < capacity) { + tokens++; + await emitEvent({ type: "token_refilled", tokensAvailable: tokens }); + } + } + + await emitEvent({ + type: "done", + accepted, + rejected, + total: requests.length, + }); + + return { accepted, rejected, total: requests.length }; +} + +async function evaluateRequest( + req: RequestItem, + position: number, + tokens: number, + refillRate: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ + type: "request_received", + requestId: req.id, + position, + }); + await delay(PROCESS_DELAY_MS); + + await writer.write({ + type: "token_check", + requestId: req.id, + tokensAvailable: tokens, + }); + await delay(CHECK_DELAY_MS); + + if (tokens > 0) { + await writer.write({ + type: "request_accepted", + requestId: req.id, + tokensRemaining: tokens - 1, + }); + } else { + await writer.write({ + type: "request_rejected", + requestId: req.id, + retryAfterMs: refillRate * 1000, + }); + } + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: ThrottleEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/routing/content-based-router.mdx b/docs/content/docs/cookbook/routing/content-based-router.mdx new file mode 100644 index 0000000000..c676174e40 --- /dev/null +++ b/docs/content/docs/cookbook/routing/content-based-router.mdx @@ -0,0 +1,283 @@ +--- +title: Content-Based Router +description: Branch to different handlers based on fields inside the message or payload. +type: guide +summary: Classify a support ticket and route it to billing, technical, account, or feedback handlers. +--- + +When incoming messages need different processing paths depending on their content, use a content-based router. A support ticket about a payment issue should go to the billing team, while a bug report goes to engineering. + +## Pattern + +The workflow inspects the payload, classifies it, then branches with a standard `if`/`else` to call the appropriate step handler. Each handler is a separate step function with full Node.js access. + +### Simplified + +```typescript lineNumbers +declare function classifyTicket(ticketId: string, subject: string): Promise<{ ticketType: string; confidence: number }>; // @setup +declare function handleBilling(ticketId: string): Promise; // @setup +declare function handleTechnical(ticketId: string): Promise; // @setup +declare function handleAccount(ticketId: string): Promise; // @setup +declare function handleFeedback(ticketId: string): Promise; // @setup + +export async function contentBasedRouterFlow( + ticketId: string, + subject: string, +) { + "use workflow"; + + const { ticketType } = await classifyTicket(ticketId, subject); + + let totalSteps: number; + if (ticketType === "billing") { + totalSteps = await handleBilling(ticketId); + } else if (ticketType === "technical") { + totalSteps = await handleTechnical(ticketId); + } else if (ticketType === "account") { + totalSteps = await handleAccount(ticketId); + } else { + totalSteps = await handleFeedback(ticketId); + } + + return { ticketId, routedTo: ticketType, totalSteps }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type TicketType = "billing" | "technical" | "account" | "feedback"; +export type TicketPriority = "low" | "medium" | "high" | "urgent"; + +export type RouterEvent = + | { type: "ticket_received"; ticketId: string; subject: string } + | { type: "classifying"; ticketId: string } + | { type: "classified"; ticketId: string; ticketType: TicketType; confidence: number } + | { type: "routing"; ticketId: string; destination: TicketType } + | { type: "handler_processing"; ticketId: string; destination: TicketType; step: string } + | { type: "handler_complete"; ticketId: string; destination: TicketType; resolution: string } + | { type: "done"; ticketId: string; routedTo: TicketType; totalSteps: number }; + +export interface ContentBasedRouterResult { + ticketId: string; + routedTo: TicketType; + totalSteps: number; +} + +// Simulated classification keywords per ticket type +const CLASSIFICATION_RULES: Record = { + billing: ["invoice", "charge", "payment", "refund", "subscription", "billing", "price"], + technical: ["error", "bug", "crash", "timeout", "api", "deploy", "technical", "broken"], + account: ["password", "login", "access", "permissions", "account", "profile", "settings"], + feedback: ["feature", "suggestion", "improvement", "feedback", "request", "wishlist"], +}; + +// Demo timing +const CLASSIFY_DELAY_MS = 800; +const ROUTE_DELAY_MS = 400; +const HANDLER_STEP_DELAY_MS = 600; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function classifyContent(subject: string): { ticketType: TicketType; confidence: number } { + const lower = subject.toLowerCase(); + let bestType: TicketType = "feedback"; + let bestScore = 0; + + for (const [type, keywords] of Object.entries(CLASSIFICATION_RULES) as [TicketType, string[]][]) { + const score = keywords.filter((kw) => lower.includes(kw)).length; + if (score > bestScore) { + bestScore = score; + bestType = type; + } + } + + const confidence = bestScore > 0 ? Math.min(0.99, 0.7 + bestScore * 0.1) : 0.5; + return { ticketType: bestType, confidence }; +} + +export async function contentBasedRouterFlow( + ticketId: string, + subject: string, + priority: TicketPriority = "medium" +): Promise { + "use workflow"; + + // Step 1: Receive ticket + await emitEvent({ type: "ticket_received", ticketId, subject }); + + // Step 2: Classify ticket content + const { ticketType, confidence } = await classifyTicket(ticketId, subject); + + // Step 3: Route to appropriate handler + await emitEvent({ type: "routing", ticketId, destination: ticketType }); + await sleep(`${ROUTE_DELAY_MS}ms`); + + // Step 4: Branch to specialized handler based on classification + let totalSteps: number; + if (ticketType === "billing") { + totalSteps = await handleBilling(ticketId, subject, priority); + } else if (ticketType === "technical") { + totalSteps = await handleTechnical(ticketId, subject, priority); + } else if (ticketType === "account") { + totalSteps = await handleAccount(ticketId, subject, priority); + } else { + totalSteps = await handleFeedback(ticketId, subject, priority); + } + + // Step 5: Emit completion + await emitEvent({ type: "done", ticketId, routedTo: ticketType, totalSteps }); + + return { ticketId, routedTo: ticketType, totalSteps }; +} + +async function classifyTicket( + ticketId: string, + subject: string +): Promise<{ ticketType: TicketType; confidence: number }> { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "classifying", ticketId }); + await delay(CLASSIFY_DELAY_MS); + + const result = classifyContent(subject); + await writer.write({ + type: "classified", + ticketId, + ticketType: result.ticketType, + confidence: result.confidence, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function handleBilling( + ticketId: string, + _subject: string, + _priority: TicketPriority +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const steps = ["Verify account billing status", "Check payment history", "Generate resolution"]; + try { + for (const step of steps) { + await writer.write({ type: "handler_processing", ticketId, destination: "billing", step }); + await delay(HANDLER_STEP_DELAY_MS); + } + await writer.write({ + type: "handler_complete", + ticketId, + destination: "billing", + resolution: "Billing inquiry resolved — invoice adjustment applied", + }); + return steps.length; + } finally { + writer.releaseLock(); + } +} + +async function handleTechnical( + ticketId: string, + _subject: string, + _priority: TicketPriority +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const steps = ["Reproduce issue", "Analyze stack trace", "Apply fix", "Verify resolution"]; + try { + for (const step of steps) { + await writer.write({ type: "handler_processing", ticketId, destination: "technical", step }); + await delay(HANDLER_STEP_DELAY_MS); + } + await writer.write({ + type: "handler_complete", + ticketId, + destination: "technical", + resolution: "Technical issue resolved — patch deployed to staging", + }); + return steps.length; + } finally { + writer.releaseLock(); + } +} + +async function handleAccount( + ticketId: string, + _subject: string, + _priority: TicketPriority +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const steps = ["Verify identity", "Update account settings", "Confirm changes"]; + try { + for (const step of steps) { + await writer.write({ type: "handler_processing", ticketId, destination: "account", step }); + await delay(HANDLER_STEP_DELAY_MS); + } + await writer.write({ + type: "handler_complete", + ticketId, + destination: "account", + resolution: "Account issue resolved — access restored", + }); + return steps.length; + } finally { + writer.releaseLock(); + } +} + +async function handleFeedback( + ticketId: string, + _subject: string, + _priority: TicketPriority +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const steps = ["Log feedback", "Categorize suggestion", "Notify product team"]; + try { + for (const step of steps) { + await writer.write({ type: "handler_processing", ticketId, destination: "feedback", step }); + await delay(HANDLER_STEP_DELAY_MS); + } + await writer.write({ + type: "handler_complete", + ticketId, + destination: "feedback", + resolution: "Feedback logged — added to product backlog", + }); + return steps.length; + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: RouterEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each handler as a durable step +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable delay between routing and handling +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams progress events to the client diff --git a/docs/content/docs/cookbook/routing/content-enricher.mdx b/docs/content/docs/cookbook/routing/content-enricher.mdx new file mode 100644 index 0000000000..f11eb8f916 --- /dev/null +++ b/docs/content/docs/cookbook/routing/content-enricher.mdx @@ -0,0 +1,360 @@ +--- +title: Content Enricher +description: Look up extra data and attach it before the next step sees the message. +type: guide +summary: Enrich a sales lead by querying CRM, social, and Clearbit in parallel before routing to sales. +--- + +When a message arrives with minimal data and downstream steps need a richer picture, use a content enricher. The workflow fetches supplementary data from multiple sources and merges it into the message before passing it along. + +## Pattern + +The workflow looks up a base contact, then fans out to multiple enrichment sources in parallel using `Promise.allSettled`. Results are merged into a single enriched profile. Failed sources degrade gracefully without blocking the pipeline. + +### Simplified + +```typescript lineNumbers +type BaseLead = { email: string; name: string; domain: string }; + +declare function lookupBaseContact(email: string): Promise; // @setup +declare function fetchCrmEnrichment(lead: BaseLead): Promise<{ company: string; title: string }>; // @setup +declare function fetchSocialEnrichment(lead: BaseLead): Promise<{ followers: number }>; // @setup +declare function fetchClearbitEnrichment(lead: BaseLead): Promise<{ score: number }>; // @setup +declare function fetchGitHubEnrichment(lead: BaseLead): Promise<{ username: string }>; // @setup +declare function mergeEnrichmentProfile(lead: BaseLead, sources: Record): Promise>; // @setup + +export async function enrichLeadProfile(email: string) { + "use workflow"; + + const baseLead = await lookupBaseContact(email); + + const [crm, social, clearbit, github] = await Promise.allSettled([ + fetchCrmEnrichment(baseLead), + fetchSocialEnrichment(baseLead), + fetchClearbitEnrichment(baseLead), + fetchGitHubEnrichment(baseLead), + ]); + + const profile = await mergeEnrichmentProfile(baseLead, { + crm: crm.status === "fulfilled" ? crm.value : null, + social: social.status === "fulfilled" ? social.value : null, + clearbit: clearbit.status === "fulfilled" ? clearbit.value : null, + github: github.status === "fulfilled" ? github.value : null, + }); + + return { email, baseLead, profile }; +} +``` + +### Full Implementation + +```typescript lineNumbers +// getWritable is used here to stream demo UI events. +// A production workflow wouldn't need this unless it has its own streaming UI. +import { getWritable } from "workflow"; + +export type EnrichmentSource = "crm" | "social" | "clearbit" | "github"; + +export type EnrichmentEvent = + | { type: "base_lookup" } + | { type: "base_done"; name: string; domain: string } + | { type: "source_start"; source: EnrichmentSource } + | { type: "source_done"; source: EnrichmentSource; data: unknown } + | { type: "source_failed"; source: EnrichmentSource; error: string } + | { type: "merging" } + | { type: "done"; profile: EnrichedLeadProfile }; + +export type BaseLead = { + email: string; + name: string; + domain: string; +}; + +export type CrmEnrichment = { + company: string; + title: string; + segment: "enterprise" | "mid-market"; +}; + +export type SocialEnrichment = { + followers: number; + location: string; + profileUrl: string; +}; + +export type ClearbitEnrichment = { + company: string; + employees: number; + score: number; +}; + +export type GitHubEnrichment = { + username: string; + publicRepos: number; + stars: number; +}; + +export type PartialEnrichmentPayload = { + crm: CrmEnrichment | null; + social: SocialEnrichment | null; + clearbit: ClearbitEnrichment | null; + github: GitHubEnrichment | null; +}; + +export type EnrichedLeadProfile = { + email: string; + name: string; + domain: string; + company: string | null; + title: string | null; + followers: number | null; + location: string | null; + githubUsername: string | null; + githubStars: number | null; + clearbitScore: number | null; + segment: string | null; +}; + +export type LeadEnrichmentResult = { + email: string; + baseLead: BaseLead; + sources: Record; + profile: EnrichedLeadProfile; +}; + +// Demo: simulate real-world network latency so the UI can show progress. +const SOURCE_DELAY_MS: Record = { + crm: 700, + social: 640, + clearbit: 810, + github: 760, +}; + +const BASE_DELAY_MS = 500; +const MERGE_DELAY_MS = 500; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function enrichLeadProfile(email: string): Promise { + "use workflow"; + + const baseLead = await lookupBaseContact(email); + + const [crm, social, clearbit, github] = await Promise.allSettled([ + fetchCrmEnrichment(baseLead), + fetchSocialEnrichment(baseLead), + fetchClearbitEnrichment(baseLead), + fetchGitHubEnrichment(baseLead), + ]); + + const profile = await mergeEnrichmentProfile(baseLead, { + crm: crm.status === "fulfilled" ? crm.value : null, + social: social.status === "fulfilled" ? social.value : null, + clearbit: clearbit.status === "fulfilled" ? clearbit.value : null, + github: github.status === "fulfilled" ? github.value : null, + }); + + return { + email: baseLead.email, + baseLead, + sources: { + crm: crm.status, + social: social.status, + clearbit: clearbit.status, + github: github.status, + }, + profile, + }; +} + +async function lookupBaseContact(email: string): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "base_lookup" }); + await delay(BASE_DELAY_MS); + + const normalized = email.trim().toLowerCase(); + const [localPart = "lead", domain = "example.com"] = normalized.split("@"); + const lead: BaseLead = { + email: normalized, + name: humanizeLocalPart(localPart), + domain, + }; + + await writer.write({ type: "base_done", name: lead.name, domain: lead.domain }); + return lead; + } finally { + writer.releaseLock(); + } +} + +async function fetchCrmEnrichment(baseLead: BaseLead): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "source_start", source: "crm" }); + await delay(SOURCE_DELAY_MS.crm); + + if (shouldForceFailure(baseLead.email, "crm")) { + const error = "CRM enrichment temporarily unavailable"; + await writer.write({ type: "source_failed", source: "crm", error }); + throw new Error(error); + } + + const data: CrmEnrichment = { + company: titleCase(baseLead.domain.split(".")[0] ?? "Example"), + title: "Senior Product Manager", + segment: "mid-market", + }; + + await writer.write({ type: "source_done", source: "crm", data }); + return data; + } finally { + writer.releaseLock(); + } +} + +async function fetchSocialEnrichment(baseLead: BaseLead): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "source_start", source: "social" }); + await delay(SOURCE_DELAY_MS.social); + + if (shouldForceFailure(baseLead.email, "social")) { + const error = "Social enrichment temporarily unavailable"; + await writer.write({ type: "source_failed", source: "social", error }); + throw new Error(error); + } + + const data: SocialEnrichment = { + followers: 1830, + location: "San Francisco, CA", + profileUrl: `https://linkedin.com/in/${baseLead.name.toLowerCase().replace(/\s+/g, "-")}`, + }; + + await writer.write({ type: "source_done", source: "social", data }); + return data; + } finally { + writer.releaseLock(); + } +} + +async function fetchClearbitEnrichment(baseLead: BaseLead): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "source_start", source: "clearbit" }); + await delay(SOURCE_DELAY_MS.clearbit); + + if (shouldForceFailure(baseLead.email, "clearbit")) { + const error = "Clearbit enrichment temporarily unavailable"; + await writer.write({ type: "source_failed", source: "clearbit", error }); + throw new Error(error); + } + + const data: ClearbitEnrichment = { + company: titleCase(baseLead.domain.split(".")[0] ?? "Example"), + employees: 240, + score: 78, + }; + + await writer.write({ type: "source_done", source: "clearbit", data }); + return data; + } finally { + writer.releaseLock(); + } +} + +async function fetchGitHubEnrichment(baseLead: BaseLead): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "source_start", source: "github" }); + await delay(SOURCE_DELAY_MS.github); + + if (shouldForceFailure(baseLead.email, "github")) { + const error = "GitHub enrichment temporarily unavailable"; + await writer.write({ type: "source_failed", source: "github", error }); + throw new Error(error); + } + + const data: GitHubEnrichment = { + username: baseLead.name.toLowerCase().replace(/\s+/g, ""), + publicRepos: 23, + stars: 412, + }; + + await writer.write({ type: "source_done", source: "github", data }); + return data; + } finally { + writer.releaseLock(); + } +} + +async function mergeEnrichmentProfile( + baseLead: BaseLead, + sources: PartialEnrichmentPayload +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "merging" }); + await delay(MERGE_DELAY_MS); + + const profile: EnrichedLeadProfile = { + email: baseLead.email, + name: baseLead.name, + domain: baseLead.domain, + company: sources.crm?.company ?? sources.clearbit?.company ?? null, + title: sources.crm?.title ?? null, + followers: sources.social?.followers ?? null, + location: sources.social?.location ?? null, + githubUsername: sources.github?.username ?? null, + githubStars: sources.github?.stars ?? null, + clearbitScore: sources.clearbit?.score ?? null, + segment: sources.crm?.segment ?? null, + }; + + await writer.write({ type: "done", profile }); + return profile; + } finally { + writer.releaseLock(); + } +} + +function shouldForceFailure(email: string, source: EnrichmentSource): boolean { + const localPart = email.split("@")[0] ?? ""; + const plusSection = localPart.split("+")[1] ?? ""; + const flags = plusSection.split(".").filter(Boolean); + return flags.includes(`fail-${source}`) || flags.includes(`fail${source}`); +} + +function humanizeLocalPart(localPart: string): string { + return localPart + .split(/[._-]+/) + .filter(Boolean) + .map((part) => titleCase(part)) + .join(" "); +} + +function titleCase(value: string): string { + if (!value) return ""; + return value.charAt(0).toUpperCase() + value.slice(1); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each enrichment source lookup as a durable step +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams enrichment progress to the client diff --git a/docs/content/docs/cookbook/routing/detour.mdx b/docs/content/docs/cookbook/routing/detour.mdx new file mode 100644 index 0000000000..58cf6c99d3 --- /dev/null +++ b/docs/content/docs/cookbook/routing/detour.mdx @@ -0,0 +1,201 @@ +--- +title: Detour +description: Temporarily bypass or replace a step (maintenance, A/B, fallback path). +type: guide +summary: Toggle a QA review stage on/off in a deploy pipeline based on a runtime feature flag. +--- + +When you need to conditionally insert or skip processing stages at runtime -- for maintenance windows, A/B tests, or feature flags -- use the detour pattern. A simple boolean controls whether extra steps execute. + +## Pattern + +The workflow uses a standard `if` check on a runtime flag to conditionally call additional step functions. No special APIs are needed -- JavaScript control flow handles the detour. + +### Simplified + +```typescript lineNumbers +declare function runBuild(deployId: string): Promise; // @setup +declare function runLint(deployId: string): Promise; // @setup +declare function runQaDetour(deployId: string): Promise; // @setup +declare function runDeploy(deployId: string): Promise; // @setup + +export async function detourFlow( + deployId: string, + qaMode: boolean = false, +) { + "use workflow"; + + let stepCount = 0; + + stepCount += await runBuild(deployId); + stepCount += await runLint(deployId); + + // Conditional detour — QA stages only when qaMode is true + if (qaMode) { + stepCount += await runQaDetour(deployId); + } + + stepCount += await runDeploy(deployId); + + return { deployId, totalSteps: stepCount, qaMode, status: "done" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type DetourEvent = + | { type: "pipeline_started"; deployId: string; qaMode: boolean } + | { type: "step_running"; deployId: string; step: string } + | { type: "step_complete"; deployId: string; step: string; result: string } + | { type: "detour_entered"; deployId: string } + | { type: "detour_exited"; deployId: string } + | { type: "done"; deployId: string; totalSteps: number; qaMode: boolean }; + +export interface DetourResult { + deployId: string; + totalSteps: number; + qaMode: boolean; + status: "done"; +} + +// Demo timing +const STEP_DELAY_MS = 600; +const QA_STEP_DELAY_MS = 800; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function detourFlow( + deployId: string, + qaMode: boolean = false +): Promise { + "use workflow"; + + let stepCount = 0; + + // Step 1: Emit pipeline start + await emitEvent({ type: "pipeline_started", deployId, qaMode }); + + // Step 2: Build + stepCount += await runBuild(deployId); + + // Step 3: Lint + stepCount += await runLint(deployId); + + // Step 4: Conditional detour — QA stages only when qaMode is true + if (qaMode) { + stepCount += await runQaDetour(deployId); + } + + // Step 5: Deploy + stepCount += await runDeploy(deployId); + + // Step 6: Emit completion + await emitEvent({ type: "done", deployId, totalSteps: stepCount, qaMode }); + + return { deployId, totalSteps: stepCount, qaMode, status: "done" }; +} + +async function runBuild(deployId: string): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", deployId, step: "build" }); + await delay(STEP_DELAY_MS); + await writer.write({ + type: "step_complete", + deployId, + step: "build", + result: "Build succeeded — 42 modules compiled", + }); + return 1; + } finally { + writer.releaseLock(); + } +} + +async function runLint(deployId: string): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", deployId, step: "lint" }); + await delay(STEP_DELAY_MS); + await writer.write({ + type: "step_complete", + deployId, + step: "lint", + result: "Lint passed — 0 warnings, 0 errors", + }); + return 1; + } finally { + writer.releaseLock(); + } +} + +async function runQaDetour(deployId: string): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const qaSteps = [ + { step: "qa-review", result: "QA review approved — all acceptance criteria met" }, + { step: "staging-test", result: "Staging tests passed — 128/128 assertions green" }, + { step: "security-scan", result: "Security scan clear — no vulnerabilities found" }, + ]; + + try { + await writer.write({ type: "detour_entered", deployId }); + + for (const { step, result } of qaSteps) { + await writer.write({ type: "step_running", deployId, step }); + await delay(QA_STEP_DELAY_MS); + await writer.write({ type: "step_complete", deployId, step, result }); + } + + await writer.write({ type: "detour_exited", deployId }); + return qaSteps.length; + } finally { + writer.releaseLock(); + } +} + +async function runDeploy(deployId: string): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "step_running", deployId, step: "deploy" }); + await delay(STEP_DELAY_MS); + await writer.write({ + type: "step_complete", + deployId, + step: "deploy", + result: "Deployed to production — v2.4.1 live", + }); + return 1; + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: DetourEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each pipeline stage as a durable step +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams progress events to the client diff --git a/docs/content/docs/cookbook/routing/message-filter.mdx b/docs/content/docs/cookbook/routing/message-filter.mdx new file mode 100644 index 0000000000..336664f3ef --- /dev/null +++ b/docs/content/docs/cookbook/routing/message-filter.mdx @@ -0,0 +1,262 @@ +--- +title: Message Filter +description: Drop or accept messages based on rules before downstream processing. +type: guide +summary: Drop low-priority log events before they hit the expensive analytics pipeline. +--- + +When you need to discard messages that don't meet criteria before they reach expensive downstream processing, use a message filter. Each filter stage applies a rule and passes only qualifying messages to the next stage. + +## Pattern + +The workflow chains multiple filter steps in sequence. Each step receives a batch of messages, applies a rule (fraud score, minimum amount, allowed region), and returns the messages that pass along with the rejected ones. Results accumulate across stages. + +### Simplified + +```typescript lineNumbers +import { FatalError } from "workflow"; + +type Order = { id: string; amount: number; region: string; fraudScore: number }; + +declare function applyFraudCheck(orders: Order[], threshold: number): Promise<{ passed: Order[]; rejected: { order: Order; reason: string }[] }>; // @setup +declare function applyAmountThreshold(orders: Order[], min: number): Promise<{ passed: Order[]; rejected: { order: Order; reason: string }[] }>; // @setup +declare function applyRegionFilter(orders: Order[], regions: string[]): Promise<{ passed: Order[]; rejected: { order: Order; reason: string }[] }>; // @setup + +export async function orderFilter() { + "use workflow"; + + const orders = SAMPLE_ORDERS; + + const afterFraud = await applyFraudCheck(orders, 70); + const afterAmount = await applyAmountThreshold(afterFraud.passed, 10); + const afterRegion = await applyRegionFilter(afterAmount.passed, ["US", "EU", "CA"]); + + return { + passed: afterRegion.passed, + rejected: [...afterFraud.rejected, ...afterAmount.rejected, ...afterRegion.rejected], + }; +} +``` + +### Full Implementation + +```typescript lineNumbers +"use workflow"; + +import { FatalError } from "workflow"; + +// --- Types --- +export type Order = { + id: string; + amount: number; + region: string; + fraudScore: number; + customer: string; +}; + +export type FilterVerdict = "pass" | "reject"; + +export type FilterEvent = { + type: "filter_start" | "filter_check" | "filter_result" | "filter_done"; + orderId: string; + stage?: string; + verdict?: FilterVerdict; + reason?: string; + passedOrders?: Order[]; + rejectedOrders?: { order: Order; stage: string; reason: string }[]; +}; + +export type DemoConfig = { + fraudThreshold: number; + minAmount: number; + allowedRegions: string[]; +}; + +const DEFAULT_CONFIG: DemoConfig = { + fraudThreshold: 70, + minAmount: 10, + allowedRegions: ["US", "EU", "CA"], +}; + +const SAMPLE_ORDERS: Order[] = [ + { id: "ORD-001", amount: 250, region: "US", fraudScore: 12, customer: "Alice" }, + { id: "ORD-002", amount: 5, region: "EU", fraudScore: 8, customer: "Bob" }, + { id: "ORD-003", amount: 1200, region: "CN", fraudScore: 45, customer: "Charlie" }, + { id: "ORD-004", amount: 89, region: "US", fraudScore: 92, customer: "Diana" }, + { id: "ORD-005", amount: 430, region: "CA", fraudScore: 15, customer: "Eve" }, + { id: "ORD-006", amount: 75, region: "BR", fraudScore: 55, customer: "Frank" }, + { id: "ORD-007", amount: 3, region: "EU", fraudScore: 88, customer: "Grace" }, + { id: "ORD-008", amount: 610, region: "US", fraudScore: 5, customer: "Hank" }, +]; + +// --- Entry point --- +export async function orderFilter(config?: Partial) { + "use workflow"; + const cfg = { ...DEFAULT_CONFIG, ...config }; + const orders = SAMPLE_ORDERS; + + const afterFraud = await applyFraudCheck(orders, cfg.fraudThreshold); + const afterAmount = await applyAmountThreshold(afterFraud.passed, cfg.minAmount); + const afterRegion = await applyRegionFilter(afterAmount.passed, cfg.allowedRegions); + + await emitResults( + afterRegion.passed, + [...afterFraud.rejected, ...afterAmount.rejected, ...afterRegion.rejected] + ); +} + +type StageResult = { + passed: Order[]; + rejected: { order: Order; stage: string; reason: string }[]; +}; + +// --- Step: Fraud check --- +export async function applyFraudCheck(orders: Order[], threshold: number): Promise { + "use step"; + const { getWritable } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + const passed: Order[] = []; + const rejected: StageResult["rejected"] = []; + + for (const order of orders) { + await writer.write({ + type: "filter_check", + orderId: order.id, + stage: "fraud", + }); + await new Promise((r) => setTimeout(r, 300)); + + if (order.fraudScore > threshold) { + rejected.push({ order, stage: "fraud", reason: `Fraud score ${order.fraudScore} > ${threshold}` }); + await writer.write({ + type: "filter_result", + orderId: order.id, + stage: "fraud", + verdict: "reject", + reason: `Fraud score ${order.fraudScore} exceeds threshold ${threshold}`, + }); + } else { + passed.push(order); + await writer.write({ + type: "filter_result", + orderId: order.id, + stage: "fraud", + verdict: "pass", + }); + } + } + + writer.close(); + return { passed, rejected }; +} + +// --- Step: Amount threshold --- +export async function applyAmountThreshold(orders: Order[], minAmount: number): Promise { + "use step"; + const { getWritable } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + const passed: Order[] = []; + const rejected: StageResult["rejected"] = []; + + for (const order of orders) { + await writer.write({ + type: "filter_check", + orderId: order.id, + stage: "amount", + }); + await new Promise((r) => setTimeout(r, 300)); + + if (order.amount < minAmount) { + rejected.push({ order, stage: "amount", reason: `Amount $${order.amount} < $${minAmount}` }); + await writer.write({ + type: "filter_result", + orderId: order.id, + stage: "amount", + verdict: "reject", + reason: `Order amount $${order.amount} below minimum $${minAmount}`, + }); + } else { + passed.push(order); + await writer.write({ + type: "filter_result", + orderId: order.id, + stage: "amount", + verdict: "pass", + }); + } + } + + writer.close(); + return { passed, rejected }; +} + +// --- Step: Region filter --- +export async function applyRegionFilter(orders: Order[], allowedRegions: string[]): Promise { + "use step"; + const { getWritable } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + const passed: Order[] = []; + const rejected: StageResult["rejected"] = []; + + for (const order of orders) { + await writer.write({ + type: "filter_check", + orderId: order.id, + stage: "region", + }); + await new Promise((r) => setTimeout(r, 300)); + + if (!allowedRegions.includes(order.region)) { + rejected.push({ order, stage: "region", reason: `Region ${order.region} not in ${allowedRegions.join(",")}` }); + await writer.write({ + type: "filter_result", + orderId: order.id, + stage: "region", + verdict: "reject", + reason: `Region "${order.region}" not in allowed regions [${allowedRegions.join(", ")}]`, + }); + } else { + passed.push(order); + await writer.write({ + type: "filter_result", + orderId: order.id, + stage: "region", + verdict: "pass", + }); + } + } + + writer.close(); + return { passed, rejected }; +} + +// --- Step: Emit final results --- +export async function emitResults( + passedOrders: Order[], + rejectedOrders: { order: Order; stage: string; reason: string }[] +) { + "use step"; + const { getWritable } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + + await writer.write({ + type: "filter_done", + orderId: "summary", + passedOrders, + rejectedOrders, + }); + + writer.close(); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each filter stage as a durable step +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — available for halting on critical filter failures +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams per-order filter verdicts to the client diff --git a/docs/content/docs/cookbook/routing/message-translator.mdx b/docs/content/docs/cookbook/routing/message-translator.mdx new file mode 100644 index 0000000000..1a5abe704b --- /dev/null +++ b/docs/content/docs/cookbook/routing/message-translator.mdx @@ -0,0 +1,253 @@ +--- +title: Message Translator +description: Convert between external and internal message formats at the boundary. +type: guide +summary: Convert partner XML orders into your internal JSON schema at the API boundary. +--- + +When your system receives messages in formats that don't match your internal schema -- XML from one partner, CSV from another, legacy JSON from a third -- use a message translator at the boundary. The workflow detects the format, maps fields to a canonical shape, validates, and delivers. + +## Pattern + +The workflow chains step functions sequentially: detect the source format, translate fields from source to canonical names, validate the output, and deliver to the target system. Each step is independently retriable. + +### Simplified + +```typescript lineNumbers +type SourceFormat = "xml" | "csv" | "legacy-json"; + +declare function detectSourceFormat(messageId: string, raw: string): Promise<{ sourceFormat: SourceFormat }>; // @setup +declare function translateFields(messageId: string, format: SourceFormat): Promise; // @setup +declare function validateOutput(messageId: string, fieldCount: number): Promise; // @setup +declare function deliverMessage(messageId: string): Promise; // @setup + +export async function messageTranslatorFlow( + messageId: string, + sourceFormat: SourceFormat = "xml", +) { + "use workflow"; + + const rawMessage = SAMPLE_MESSAGES[sourceFormat]; + + const detected = await detectSourceFormat(messageId, rawMessage); + const fieldsTranslated = await translateFields(messageId, detected.sourceFormat); + await validateOutput(messageId, fieldsTranslated); + await deliverMessage(messageId); + + return { messageId, sourceFormat: detected.sourceFormat, fieldsTranslated }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type SourceFormat = "xml" | "csv" | "legacy-json"; + +export type TranslatorEvent = + | { type: "message_received"; messageId: string; sourceFormat: SourceFormat } + | { type: "detecting_format"; messageId: string } + | { type: "format_detected"; messageId: string; sourceFormat: SourceFormat; confidence: number } + | { type: "translating"; messageId: string; sourceFormat: SourceFormat; step: string } + | { type: "field_mapped"; messageId: string; sourceField: string; canonicalField: string; value: string } + | { type: "validating"; messageId: string } + | { type: "validation_passed"; messageId: string; fieldCount: number } + | { type: "delivering"; messageId: string; destination: string } + | { type: "done"; messageId: string; sourceFormat: SourceFormat; fieldsTranslated: number }; + +export interface MessageTranslatorResult { + messageId: string; + sourceFormat: SourceFormat; + fieldsTranslated: number; +} + +// Simulated raw messages from three legacy systems +const SAMPLE_MESSAGES: Record = { + xml: `ORD-5501ACME Corp2499.00USD2026-03-14`, + csv: `order_id,customer_name,total_amount,currency,order_date\nORD-5502,Globex Inc,1875.50,EUR,2026-03-14`, + "legacy-json": `{"oid":"ORD-5503","c_name":"Initech LLC","total":"3200.00","cur":"GBP","dt":"20260314"}`, +}; + +// Field mappings per source format → canonical fields +const FIELD_MAPS: Record> = { + xml: [ + { source: "id", canonical: "orderId" }, + { source: "cust", canonical: "customerName" }, + { source: "amt", canonical: "amount" }, + { source: "curr", canonical: "currency" }, + { source: "date", canonical: "orderDate" }, + ], + csv: [ + { source: "order_id", canonical: "orderId" }, + { source: "customer_name", canonical: "customerName" }, + { source: "total_amount", canonical: "amount" }, + { source: "currency", canonical: "currency" }, + { source: "order_date", canonical: "orderDate" }, + ], + "legacy-json": [ + { source: "oid", canonical: "orderId" }, + { source: "c_name", canonical: "customerName" }, + { source: "total", canonical: "amount" }, + { source: "cur", canonical: "currency" }, + { source: "dt", canonical: "orderDate" }, + ], +}; + +// Simulated values extracted from each format +const EXTRACTED_VALUES: Record = { + xml: ["ORD-5501", "ACME Corp", "2499.00", "USD", "2026-03-14"], + csv: ["ORD-5502", "Globex Inc", "1875.50", "EUR", "2026-03-14"], + "legacy-json": ["ORD-5503", "Initech LLC", "3200.00", "GBP", "2026-03-14"], +}; + +// Demo timing +const DETECT_DELAY_MS = 600; +const FIELD_MAP_DELAY_MS = 400; +const VALIDATE_DELAY_MS = 500; +const DELIVER_DELAY_MS = 600; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function detectFormat(rawMessage: string): { sourceFormat: SourceFormat; confidence: number } { + if (rawMessage.trimStart().startsWith("<")) return { sourceFormat: "xml", confidence: 0.97 }; + if (rawMessage.includes(",") && rawMessage.includes("\n")) return { sourceFormat: "csv", confidence: 0.94 }; + if (rawMessage.trimStart().startsWith("{") && rawMessage.includes("oid")) return { sourceFormat: "legacy-json", confidence: 0.92 }; + return { sourceFormat: "csv", confidence: 0.5 }; +} + +export async function messageTranslatorFlow( + messageId: string, + sourceFormat: SourceFormat = "xml" +): Promise { + "use workflow"; + + const rawMessage = SAMPLE_MESSAGES[sourceFormat]; + + // Step 1: Receive message + await emitEvent({ type: "message_received", messageId, sourceFormat }); + + // Step 2: Detect format + const detected = await detectSourceFormat(messageId, rawMessage); + + // Step 3: Translate fields + const fieldsTranslated = await translateFields(messageId, detected.sourceFormat); + + // Step 4: Validate canonical output + await validateOutput(messageId, fieldsTranslated); + + // Step 5: Deliver to target system + await deliverMessage(messageId); + + // Step 6: Done + await emitEvent({ type: "done", messageId, sourceFormat: detected.sourceFormat, fieldsTranslated }); + + return { messageId, sourceFormat: detected.sourceFormat, fieldsTranslated }; +} + +async function detectSourceFormat( + messageId: string, + rawMessage: string +): Promise<{ sourceFormat: SourceFormat; confidence: number }> { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "detecting_format", messageId }); + await delay(DETECT_DELAY_MS); + + const result = detectFormat(rawMessage); + await writer.write({ + type: "format_detected", + messageId, + sourceFormat: result.sourceFormat, + confidence: result.confidence, + }); + + return result; + } finally { + writer.releaseLock(); + } +} + +async function translateFields( + messageId: string, + sourceFormat: SourceFormat +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const fieldMap = FIELD_MAPS[sourceFormat]; + const values = EXTRACTED_VALUES[sourceFormat]; + + try { + await writer.write({ type: "translating", messageId, sourceFormat, step: "Parsing raw message" }); + await delay(FIELD_MAP_DELAY_MS); + + for (let i = 0; i < fieldMap.length; i++) { + const mapping = fieldMap[i]; + await writer.write({ + type: "field_mapped", + messageId, + sourceField: mapping.source, + canonicalField: mapping.canonical, + value: values[i], + }); + await delay(FIELD_MAP_DELAY_MS); + } + + return fieldMap.length; + } finally { + writer.releaseLock(); + } +} + +async function validateOutput( + messageId: string, + fieldCount: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "validating", messageId }); + await delay(VALIDATE_DELAY_MS); + + await writer.write({ type: "validation_passed", messageId, fieldCount }); + } finally { + writer.releaseLock(); + } +} + +async function deliverMessage( + messageId: string +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "delivering", messageId, destination: "canonical-orders-api" }); + await delay(DELIVER_DELAY_MS); + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: TranslatorEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each translation stage as a durable step +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams field mapping progress to the client diff --git a/docs/content/docs/cookbook/routing/meta.json b/docs/content/docs/cookbook/routing/meta.json new file mode 100644 index 0000000000..6966cea705 --- /dev/null +++ b/docs/content/docs/cookbook/routing/meta.json @@ -0,0 +1,12 @@ +{ + "title": "Routing", + "pages": [ + "content-based-router", + "detour", + "routing-slip", + "message-translator", + "normalizer", + "content-enricher", + "message-filter" + ] +} diff --git a/docs/content/docs/cookbook/routing/normalizer.mdx b/docs/content/docs/cookbook/routing/normalizer.mdx new file mode 100644 index 0000000000..49cb5f5781 --- /dev/null +++ b/docs/content/docs/cookbook/routing/normalizer.mdx @@ -0,0 +1,312 @@ +--- +title: Normalizer +description: Map heterogeneous inputs into one canonical shape before routing. +type: guide +summary: Accept orders as XML, CSV, or legacy JSON and transform them into a single canonical shape. +--- + +When your system ingests messages from multiple sources in different formats, use a normalizer to convert them all into a single canonical shape before further processing. This decouples downstream logic from source-specific quirks. + +## Pattern + +The workflow detects the format of each incoming message, parses it into a canonical structure, and emits the normalized results. Each phase is a separate step function, so format detection and parsing are independently retriable. + +### Simplified + +```typescript lineNumbers +import { FatalError } from "workflow"; + +type RawFormat = "xml" | "csv" | "legacy-json"; +type RawMessage = { id: string; format: RawFormat; payload: string }; +type CanonicalOrder = { orderId: string; customer: string; amount: number; currency: string }; + +declare function detectFormats(messages: RawMessage[]): Promise; // @setup +declare function parseToCanonical(messages: RawMessage[]): Promise<{ successful: CanonicalOrder[]; failed: { messageId: string; error: string }[] }>; // @setup + +export async function normalizer(config?: { strictMode?: boolean }) { + "use workflow"; + + const messages = SAMPLE_MESSAGES; + const detected = await detectFormats(messages); + const parsed = await parseToCanonical(detected); + + if (config?.strictMode && parsed.failed.length > 0) { + throw new FatalError(`${parsed.failed.length} messages failed normalization`); + } + + return parsed; +} +``` + +### Full Implementation + +```typescript lineNumbers +"use workflow"; + + +// --- Types --- +export type RawFormat = "xml" | "csv" | "legacy-json"; + +export type RawMessage = { + id: string; + format: RawFormat; + payload: string; +}; + +export type CanonicalOrder = { + orderId: string; + customer: string; + amount: number; + currency: string; + sourceFormat: RawFormat; +}; + +export type NormalizeEvent = { + type: + | "normalize_detect" + | "normalize_parse" + | "normalize_result" + | "normalize_done"; + messageId: string; + detectedFormat?: RawFormat; + canonical?: CanonicalOrder; + error?: string; + results?: { + successful: CanonicalOrder[]; + failed: { messageId: string; error: string }[]; + }; +}; + +export type DemoConfig = { + strictMode: boolean; // if true, fail on unknown formats; if false, skip them +}; + +const DEFAULT_CONFIG: DemoConfig = { + strictMode: false, +}; + +const SAMPLE_MESSAGES: RawMessage[] = [ + { + id: "MSG-001", + format: "xml", + payload: + 'Alice250.00USD', + }, + { + id: "MSG-002", + format: "csv", + payload: "C-202,Bob,89.50,EUR", + }, + { + id: "MSG-003", + format: "legacy-json", + payload: JSON.stringify({ + order_num: "L-303", + cust_name: "Charlie", + total: 1200, + cur: "GBP", + }), + }, + { + id: "MSG-004", + format: "xml", + payload: + 'Diana430.00USD', + }, + { + id: "MSG-005", + format: "csv", + payload: "C-505,Eve,75.25,CAD", + }, + { + id: "MSG-006", + format: "legacy-json", + payload: JSON.stringify({ + order_num: "L-606", + cust_name: "Frank", + total: 610, + cur: "JPY", + }), + }, +]; + +// --- Entry point --- +export async function normalizer(config?: Partial) { + "use workflow"; + const cfg = { ...DEFAULT_CONFIG, ...config }; + const messages = SAMPLE_MESSAGES; + + const detected = await detectFormats(messages); + const parsed = await parseToCanonical(detected); + await emitNormalized(parsed.successful, parsed.failed, cfg.strictMode); +} + +type DetectedMessage = RawMessage & { detectedFormat: RawFormat }; + +// --- Step: Detect format --- +export async function detectFormats( + messages: RawMessage[] +): Promise { + "use step"; + const { getWritable } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + const results: DetectedMessage[] = []; + + for (const msg of messages) { + await new Promise((r) => setTimeout(r, 250)); + + // In a real system, format detection would inspect the payload + // Here we trust the declared format but still emit detection events + const detectedFormat = msg.format; + + await writer.write({ + type: "normalize_detect", + messageId: msg.id, + detectedFormat, + }); + + results.push({ ...msg, detectedFormat }); + } + + writer.close(); + return results; +} + +type ParseResult = { + successful: CanonicalOrder[]; + failed: { messageId: string; error: string }[]; +}; + +// --- Step: Parse to canonical --- +export async function parseToCanonical( + messages: DetectedMessage[] +): Promise { + "use step"; + const { getWritable } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + const successful: CanonicalOrder[] = []; + const failed: ParseResult["failed"] = []; + + for (const msg of messages) { + await new Promise((r) => setTimeout(r, 300)); + + try { + const canonical = parseMessage(msg); + successful.push(canonical); + + await writer.write({ + type: "normalize_parse", + messageId: msg.id, + canonical, + }); + } catch (err) { + const error = err instanceof Error ? err.message : String(err); + failed.push({ messageId: msg.id, error }); + + await writer.write({ + type: "normalize_result", + messageId: msg.id, + error, + }); + } + } + + writer.close(); + return { successful, failed }; +} + +// --- Step: Emit normalized results --- +export async function emitNormalized( + successful: CanonicalOrder[], + failed: { messageId: string; error: string }[], + strictMode: boolean +) { + "use step"; + const { getWritable, FatalError: Fatal } = await import("workflow"); + const writable = getWritable(); + const writer = writable.getWriter(); + + if (strictMode && failed.length > 0) { + await writer.write({ + type: "normalize_done", + messageId: "summary", + error: `Strict mode: ${failed.length} message(s) failed to normalize`, + results: { successful, failed }, + }); + writer.close(); + throw new Fatal( + `Strict mode: ${failed.length} messages failed normalization` + ); + } + + await writer.write({ + type: "normalize_done", + messageId: "summary", + results: { successful, failed }, + }); + + writer.close(); +} + +// --- Format parsers --- +function parseMessage(msg: DetectedMessage): CanonicalOrder { + switch (msg.detectedFormat) { + case "xml": + return parseXml(msg); + case "csv": + return parseCsv(msg); + case "legacy-json": + return parseLegacyJson(msg); + default: + throw new Error(`Unknown format: ${msg.detectedFormat}`); + } +} + +function parseXml(msg: DetectedMessage): CanonicalOrder { + const id = msg.payload.match(/id="([^"]+)"/)?.[1] ?? "unknown"; + const customer = + msg.payload.match(/([^<]+)<\/customer>/)?.[1] ?? "unknown"; + const amount = parseFloat( + msg.payload.match(/([^<]+)<\/amount>/)?.[1] ?? "0" + ); + const currency = + msg.payload.match(/([^<]+)<\/currency>/)?.[1] ?? "USD"; + return { orderId: id, customer, amount, currency, sourceFormat: "xml" }; +} + +function parseCsv(msg: DetectedMessage): CanonicalOrder { + const [orderId, customer, amountStr, currency] = msg.payload.split(","); + return { + orderId: orderId ?? "unknown", + customer: customer ?? "unknown", + amount: parseFloat(amountStr ?? "0"), + currency: currency ?? "USD", + sourceFormat: "csv", + }; +} + +function parseLegacyJson(msg: DetectedMessage): CanonicalOrder { + const data = JSON.parse(msg.payload) as { + order_num: string; + cust_name: string; + total: number; + cur: string; + }; + return { + orderId: data.order_num, + customer: data.cust_name, + amount: data.total, + currency: data.cur, + sourceFormat: "legacy-json", + }; +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks detection, parsing, and emission as durable steps +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — stops retries when strict mode validation fails +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams normalization progress to the client diff --git a/docs/content/docs/cookbook/routing/routing-slip.mdx b/docs/content/docs/cookbook/routing/routing-slip.mdx new file mode 100644 index 0000000000..2a3198f5d7 --- /dev/null +++ b/docs/content/docs/cookbook/routing/routing-slip.mdx @@ -0,0 +1,149 @@ +--- +title: Routing Slip +description: Attach an itinerary to the message so each hop knows where to send it next. +type: guide +summary: Execute a flexible sequence of processing stages defined per-request in a routing slip. +--- + +When each request needs a different sequence of processing stages -- and that sequence is determined at request time rather than hardcoded -- use a routing slip. The caller passes an ordered list of stages, and the workflow iterates through them. + +## Pattern + +The workflow receives a slip (an array of stage names) alongside the payload. It loops through the slip, calling a step function for each stage. This lets different requests follow completely different processing paths without changing the workflow code. + +### Simplified + +```typescript lineNumbers +type SlipStage = "inventory" | "payment" | "packaging" | "shipping" | "notification"; + +declare function processStage(orderId: string, stage: SlipStage, index: number): Promise<{ stage: SlipStage; status: string }>; // @setup + +export async function routingSlip(orderId: string, slip: SlipStage[]) { + "use workflow"; + + const results = []; + + for (let i = 0; i < slip.length; i++) { + const result = await processStage(orderId, slip[i], i); + results.push(result); + } + + return { status: "completed", orderId, stages: results }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable } from "workflow"; + +export type SlipStage = "inventory" | "payment" | "packaging" | "shipping" | "notification"; + +export type SlipEvent = + | { type: "stage_start"; stage: SlipStage; index: number } + | { type: "stage_complete"; stage: SlipStage; index: number; message: string; durationMs: number } + | { type: "done"; totalMs: number; stageCount: number }; + +export type StageResult = { + stage: SlipStage; + status: "completed"; + message: string; + durationMs: number; +}; + +export type RoutingSlipResult = { + status: "completed"; + orderId: string; + stages: StageResult[]; + totalMs: number; +}; + +// Demo: per-stage delays so the UI shows staggered progression +const STAGE_DELAY_MS: Record = { + inventory: 600, + payment: 750, + packaging: 800, + shipping: 900, + notification: 650, +}; + +export async function routingSlip( + orderId: string, + slip: SlipStage[] +): Promise { + "use workflow"; + + const results: StageResult[] = []; + const startMs = Date.now(); + + for (let i = 0; i < slip.length; i++) { + const result = await processStage(orderId, slip[i], i); + results.push(result); + } + + const totalMs = Date.now() - startMs; + await emitDone(totalMs, results.length); + + return { + status: "completed", + orderId, + stages: results, + totalMs, + }; +} + +async function processStage( + orderId: string, + stage: SlipStage, + index: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + const startMs = Date.now(); + + try { + await writer.write({ type: "stage_start", stage, index }); + + // Demo: simulate processing time for visualization + await new Promise((r) => setTimeout(r, STAGE_DELAY_MS[stage])); + + const messages: Record = { + inventory: `Verified stock for order ${orderId}`, + payment: `Payment processed for order ${orderId}`, + packaging: `Package prepared for order ${orderId}`, + shipping: `Shipment dispatched for order ${orderId}`, + notification: `Customer notified for order ${orderId}`, + }; + + const durationMs = Date.now() - startMs; + await writer.write({ type: "stage_complete", stage, index, message: messages[stage], durationMs }); + + return { + stage, + status: "completed", + message: messages[stage], + durationMs, + }; + } finally { + writer.releaseLock(); + } +} + +async function emitDone(totalMs: number, stageCount: number): Promise { + "use step"; + + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "done", totalMs, stageCount }); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function +- [`"use step"`](/docs/api-reference/directives/use-step) — marks each stage processor as a durable step +- [`getWritable()`](/docs/api-reference/step/get-writable) — streams stage progress events to the client diff --git a/docs/content/docs/cookbook/webhooks/async-request-reply.mdx b/docs/content/docs/cookbook/webhooks/async-request-reply.mdx new file mode 100644 index 0000000000..aa3fabb114 --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/async-request-reply.mdx @@ -0,0 +1,263 @@ +--- +title: Async Request-Reply +description: Start work, wait off-thread, and continue when an async callback or signal arrives. +type: guide +summary: Submit a request to a vendor API and resume when the webhook callback arrives. +--- + +Submit a request to a vendor API and resume when the webhook callback arrives. Use this pattern when the external service responds asynchronously via a callback URL instead of returning a result inline. + +## Pattern + +Create a webhook, pass its token to the vendor, then race the incoming callback against a heartbeat-driven timeout. The workflow suspends with zero compute cost until the callback arrives or the deadline expires. + +### Simplified + +```typescript lineNumbers +import { createWebhook, sleep, FatalError } from "workflow"; + +declare function submitVerification(docId: string, correlationId: string, webhookToken: string): Promise; // @setup +declare function finalizeResult(result: { outcome: string; details: string }): Promise; // @setup + +export async function asyncRequestReply(documentId: string) { + "use workflow"; + + const correlationId = `doc:${documentId}`; + const webhook = createWebhook({ respondWith: "manual" }); + + await submitVerification(documentId, correlationId, webhook.token); + + // Race: wait for vendor callback OR timeout + const result = await Promise.race([ + (async () => { + for await (const request of webhook) { + const body = await request.json(); + await request.respondWith(Response.json({ ack: true })); + return body.status === "approved" + ? { outcome: "verified", details: body.details ?? "OK" } + : { outcome: "rejected", details: body.reason ?? "Failed" }; + } + throw new FatalError("Webhook closed without callback"); + })(), + sleep("30s").then(() => ({ outcome: "timed_out", details: "No response within 30s" })), + ]); + + await finalizeResult(result); + return { documentId, correlationId, outcome: result.outcome }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { + createWebhook, + getWritable, + sleep, + FatalError, + type RequestWithResponse, + type Webhook, +} from "workflow"; + +// ── Event types (discriminated union) ──────────────────────────────────── + +export type CallbackPayload = + | { status: "approved"; details: string } + | { status: "rejected"; reason: string }; + +export type AsyncReplyEvent = + | { type: "submitted"; documentId: string; correlationId: string; webhookToken: string } + | { type: "waiting"; correlationId: string; timeoutMs: number } + | { type: "heartbeat"; elapsed: number; timeoutMs: number } + | { type: "callback_received"; correlationId: string; payload: CallbackPayload } + | { type: "duplicate_callback_ignored"; correlationId: string } + | { type: "timed_out"; correlationId: string } + | { type: "finalized"; outcome: "verified" | "rejected" | "timed_out"; details: string } + | { type: "done"; outcome: "verified" | "rejected" | "timed_out" }; + +const TIMEOUT_MS = 30_000; // 30 seconds for demo +const HEARTBEAT_MS = 3_000; // heartbeat every 3 seconds + +// ── Main workflow ──────────────────────────────────────────────────────── + +export async function asyncRequestReply(documentId: string) { + "use workflow"; + + const correlationId = `doc:${documentId}`; + + // Phase 1 — Submit verification and register webhook + const webhook = createWebhook({ respondWith: "manual" }); + await submitVerification(documentId, correlationId, webhook.token); + + // Phase 2 — Wait for vendor callback or timeout + const result = await awaitCallbackOrTimeout(correlationId, webhook); + + // Phase 3 — Finalize and emit done + await finalizeResult(result); + + return { documentId, correlationId, outcome: result.outcome }; +} + +// ── Phase 1: Submit verification ───────────────────────────────────────── + +async function submitVerification( + documentId: string, + correlationId: string, + webhookToken: string +) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ type: "submitted", documentId, correlationId, webhookToken }); + } finally { + writer.releaseLock(); + } +} + +// ── Phase 2: Await callback or timeout ─────────────────────────────────── + +type WaitResult = + | { outcome: "verified"; details: string } + | { outcome: "rejected"; details: string } + | { outcome: "timed_out"; details: string }; + +async function awaitCallbackOrTimeout( + correlationId: string, + webhook: Webhook +): Promise { + await emit({ type: "waiting", correlationId, timeoutMs: TIMEOUT_MS }); + + // Shared flag so the losing branch stops emitting after the race settles + let settled = false; + + const result = await Promise.race([ + // Branch A: listen for vendor callbacks via webhook + (async (): Promise => { + let first = true; + + for await (const request of webhook) { + const payload = await processCallback(request, correlationId, first); + + if (first) { + first = false; + settled = true; + return payload.status === "approved" + ? { outcome: "verified" as const, details: payload.details } + : { outcome: "rejected" as const, details: payload.reason }; + } + // After first callback, loop continues to catch duplicates + // until the timeout branch wins the race + } + + throw new FatalError("Webhook stream closed without receiving a callback"); + })(), + + // Branch B: heartbeat loop that eventually times out + (async (): Promise => { + let elapsed = 0; + while (elapsed < TIMEOUT_MS && !settled) { + await sleep(new Date(Date.now() + HEARTBEAT_MS)); + elapsed += HEARTBEAT_MS; + if (settled) break; + if (elapsed < TIMEOUT_MS) { + await emit({ type: "heartbeat", elapsed, timeoutMs: TIMEOUT_MS }); + } + } + if (settled) { + return { outcome: "timed_out", details: "Cancelled — callback already received" }; + } + settled = true; + await emit({ type: "timed_out", correlationId }); + return { outcome: "timed_out", details: `No vendor response within ${TIMEOUT_MS / 1000}s` }; + })(), + ]); + + return result; +} + +// ── Step: process a single webhook callback ────────────────────────────── + +async function processCallback( + request: RequestWithResponse, + correlationId: string, + isFirst: boolean +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + const body = await request.json().catch(() => ({})); + const payload = parseCallbackPayload(body); + + if (isFirst) { + await request.respondWith(Response.json({ ack: true, status: payload.status })); + await writer.write({ type: "callback_received", correlationId, payload }); + } else { + await request.respondWith( + Response.json({ ack: false, duplicate: true }, { status: 409 }) + ); + await writer.write({ type: "duplicate_callback_ignored", correlationId }); + } + + return payload; + } finally { + writer.releaseLock(); + } +} + +// ── Phase 3: Finalize result ───────────────────────────────────────────── + +async function finalizeResult(result: WaitResult) { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write({ + type: "finalized", + outcome: result.outcome, + details: result.details, + }); + await writer.write({ type: "done", outcome: result.outcome }); + } finally { + writer.releaseLock(); + } +} + +// ── Helpers ────────────────────────────────────────────────────────────── + +function parseCallbackPayload(body: Record): CallbackPayload { + if (body?.status === "approved") { + return { + status: "approved", + details: typeof body.details === "string" ? body.details : "Document verified", + }; + } + return { + status: "rejected", + reason: typeof body.reason === "string" ? body.reason : "Verification failed", + }; +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: AsyncReplyEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`createWebhook()`](/docs/api-reference/workflow/create-webhook) — creates an HTTP endpoint the workflow can await +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retries on permanent failures +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/claim-check.mdx b/docs/content/docs/cookbook/webhooks/claim-check.mdx new file mode 100644 index 0000000000..0390f936c3 --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/claim-check.mdx @@ -0,0 +1,107 @@ +--- +title: Claim Check +description: Pass a small reference through the workflow; store or fetch the heavy payload elsewhere. +type: guide +summary: Accept a lightweight token instead of passing a 50 MB file through every workflow step. +--- + +Accept a lightweight token instead of passing a 50 MB file through every workflow step. Use this pattern when payloads are too large to serialize into the event log efficiently. + +## Pattern + +The workflow receives a small identifier (the "claim check") instead of the full payload. A hook suspends execution until the external system signals that the blob is ready, then a step fetches and processes it using the token. + +### Simplified + +```typescript lineNumbers +import { defineHook } from "workflow"; + +export const blobReady = defineHook<{ blobToken: string }>(); + +declare function processBlob(blobToken: string): Promise; // @setup + +export async function claimCheckImport(importId: string) { + "use workflow"; + + const hookToken = `upload:${importId}`; + + // Suspend until the external system signals the blob is uploaded + const { blobToken } = await blobReady.create({ token: hookToken }); + + // Process the blob by reference — never pass the full payload + await processBlob(blobToken); + + return { importId, blobToken, status: "indexed" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable } from "workflow"; + +// Typed events streamed to the UI via getWritable() +export type ClaimCheckEvent = + | { type: "start"; importId: string; hookToken: string } + | { type: "waiting"; importId: string; hookToken: string } + | { type: "upload_received"; importId: string; blobToken: string } + | { type: "processing"; importId: string; blobToken: string } + | { type: "completed"; importId: string; blobToken: string } + | { type: "done"; importId: string; status: "indexed" }; + +export const blobReady = defineHook<{ blobToken: string }>(); + +export async function claimCheckImport(importId: string) { + "use workflow"; + + const hookToken = `upload:${importId}`; + + await emit({ type: "start", importId, hookToken }); + await emit({ type: "waiting", importId, hookToken }); + + // Claim-check: only a token enters the workflow (not a 50MB payload). + const { blobToken } = await blobReady.create({ token: hookToken }); + + await emit({ type: "upload_received", importId, blobToken }); + + await emit({ type: "processing", importId, blobToken }); + await processBlob(blobToken); + + await emit({ type: "completed", importId, blobToken }); + await emit({ type: "done", importId, status: "indexed" }); + + return { importId, blobToken, status: "indexed" as const }; +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +async function processBlob(blobToken: string) { + "use step"; + // Simulate fetching + indexing a large blob by its token + await delay(700); + console.info("[claim-check] process_blob", { blobToken }); +} + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/event-gateway.mdx b/docs/content/docs/cookbook/webhooks/event-gateway.mdx new file mode 100644 index 0000000000..80bdda3533 --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/event-gateway.mdx @@ -0,0 +1,146 @@ +--- +title: Event Gateway +description: Normalize many external event shapes into one internal representation. +type: guide +summary: Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. +--- + +Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. Use this pattern when a workflow must collect multiple independent signals before proceeding. + +## Pattern + +Create one hook per expected signal, then race `Promise.all` (all signals arrived) against `sleep` (deadline expired). If all signals arrive in time, continue to the next phase. Otherwise, report which signals are missing. + +### Simplified + +```typescript lineNumbers +import { defineHook, sleep } from "workflow"; + +export const orderSignal = defineHook<{ ok: true }>(); + +declare function shipOrder(orderId: string): Promise; // @setup + +const SIGNALS = ["payment", "inventory", "fraud"] as const; + +export async function eventGateway(orderId: string, timeoutMs: number = 10_000) { + "use workflow"; + + const hooks = SIGNALS.map((kind) => + orderSignal.create({ token: `${kind}:${orderId}` }) + ); + + const outcome = await Promise.race([ + Promise.all(hooks).then(() => ({ type: "ready" as const })), + sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const })), + ]); + + if (outcome.type === "timeout") { + return { orderId, status: "timeout" }; + } + + await shipOrder(orderId); + return { orderId, status: "shipped" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { defineHook, getWritable, sleep } from "workflow"; + +// Typed events streamed to the UI via getWritable() +export type GatewayEvent = + | { type: "waiting"; orderId: string; tokens: Record; timeoutMs: number } + | { type: "signal_received"; orderId: string; signal: string; token: string } + | { type: "all_received"; orderId: string } + | { type: "shipping"; orderId: string } + | { type: "shipped"; orderId: string } + | { type: "timeout"; orderId: string; missing: string[] } + | { type: "done"; orderId: string; status: "shipped" | "timeout" }; + +export const orderSignal = defineHook<{ ok: true }>(); + +const SIGNAL_KINDS = ["payment", "inventory", "fraud"] as const; +export type SignalKind = (typeof SIGNAL_KINDS)[number]; + +export async function eventGateway(orderId: string, timeoutMs: number = 10_000) { + "use workflow"; + + const tokens: Record = {}; + const hooks = SIGNAL_KINDS.map((kind) => { + const token = `${kind}:${orderId}`; + tokens[kind] = token; + return { kind, hook: orderSignal.create({ token }), token }; + }); + + await emit({ + type: "waiting", + orderId, + tokens, + timeoutMs, + }); + + // Track which signals have been received + const received = new Set(); + + const signalPromises = hooks.map(({ kind, hook, token }) => + hook.then(() => { + received.add(kind); + return { kind, token }; + }) + ); + + const outcome = await Promise.race([ + Promise.all(signalPromises).then((results) => ({ type: "ready" as const, results })), + sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const, results: [] as { kind: string; token: string }[] })), + ]); + + // Emit signal_received events for all signals that arrived + for (const { kind, token } of outcome.results) { + await emit({ type: "signal_received", orderId, signal: kind, token }); + } + + if (outcome.type === "timeout") { + const missing = SIGNAL_KINDS.filter((k) => !received.has(k)); + await emit({ type: "timeout", orderId, missing }); + await emit({ type: "done", orderId, status: "timeout" }); + return { orderId, status: "timeout" as const }; + } + + await emit({ type: "all_received", orderId }); + await emit({ type: "shipping", orderId }); + await shipOrder(orderId); + await emit({ type: "shipped", orderId }); + await emit({ type: "done", orderId, status: "shipped" }); + return { orderId, status: "shipped" as const }; +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +async function shipOrder(orderId: string) { + "use step"; + // Simulate shipping API call + await new Promise((resolve) => setTimeout(resolve, 600)); + console.info("[event-gateway] ship_order", { orderId }); +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/meta.json b/docs/content/docs/cookbook/webhooks/meta.json new file mode 100644 index 0000000000..d9625aa9de --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/meta.json @@ -0,0 +1,11 @@ +{ + "title": "Webhooks & Callbacks", + "pages": [ + "async-request-reply", + "request-reply", + "webhook-basics", + "claim-check", + "event-gateway", + "status-poller" + ] +} diff --git a/docs/content/docs/cookbook/webhooks/request-reply.mdx b/docs/content/docs/cookbook/webhooks/request-reply.mdx new file mode 100644 index 0000000000..893b6bdaa5 --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/request-reply.mdx @@ -0,0 +1,213 @@ +--- +title: Request-Reply +description: Call/response style interaction modeled inside a durable workflow. +type: guide +summary: Send a request to a service, wait for a correlated reply with a deadline, and retry on timeout. +--- + +Send a request to a service, wait for a correlated reply with a deadline, and retry on timeout. Use this when you need guaranteed delivery with timeout-based retries across unreliable downstream services. + +## Pattern + +For each downstream service, send the request and wait up to a deadline for the reply. If the deadline expires, retry up to a configurable maximum. The workflow orchestrates the sequential fan-out while each step handles the actual RPC call. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function sendRequest( + requestId: string, service: string, payload: string, timeoutMs: number, maxAttempts: number +): Promise<{ service: string; response: string | null; success: boolean }>; // @setup + +export async function requestReplyFlow( + requestId: string, + services: string[] = ["user-service", "inventory-service", "payment-service"], + timeoutMs: number = 800, + maxAttempts: number = 2 +) { + "use workflow"; + + const results = []; + + for (const service of services) { + const result = await sendRequest(requestId, service, `lookup:${requestId}`, timeoutMs, maxAttempts); + results.push(result); + } + + return { requestId, results }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { getWritable, sleep } from "workflow"; + +export type RequestReplyEvent = + | { type: "request_sent"; requestId: string; service: string; payload: string } + | { type: "waiting_for_reply"; requestId: string; service: string; deadline: string } + | { type: "reply_received"; requestId: string; service: string; response: string; latencyMs: number } + | { type: "timeout"; requestId: string; service: string; attempt: number } + | { type: "retrying"; requestId: string; service: string; attempt: number; maxAttempts: number } + | { type: "all_replies_collected"; requestId: string; results: Array<{ service: string; response: string }> } + | { type: "failed"; requestId: string; service: string; reason: string } + | { type: "done"; requestId: string; totalServices: number; successCount: number; failCount: number }; + +export interface RequestReplyResult { + requestId: string; + results: Array<{ service: string; response: string | null; success: boolean }>; +} + +// Demo timing +const REQUEST_DELAY_MS = 400; +const REPLY_BASE_DELAY_MS = 600; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +// Simulated service responses — in production these would be real RPC calls +const SERVICE_RESPONSES: Record = { + "user-service": { response: "user_profile={name:'Ada',plan:'pro'}", latencyMs: 350, failOnFirstAttempt: false }, + "inventory-service": { response: "stock={sku:'WF-100',qty:42}", latencyMs: 1200, failOnFirstAttempt: true }, + "payment-service": { response: "payment={method:'card',last4:'4242'}", latencyMs: 500, failOnFirstAttempt: false }, +}; + +export async function requestReplyFlow( + requestId: string, + services: string[] = ["user-service", "inventory-service", "payment-service"], + timeoutMs: number = 800, + maxAttempts: number = 2 +): Promise { + "use workflow"; + + const results: Array<{ service: string; response: string | null; success: boolean }> = []; + + for (const service of services) { + const result = await sendRequest(requestId, service, `lookup:${requestId}`, timeoutMs, maxAttempts); + results.push(result); + } + + const successResults = results + .filter((r) => r.success && r.response) + .map((r) => ({ service: r.service, response: r.response! })); + + await emitEvent({ + type: "all_replies_collected", + requestId, + results: successResults, + }); + + await emitEvent({ + type: "done", + requestId, + totalServices: services.length, + successCount: results.filter((r) => r.success).length, + failCount: results.filter((r) => !r.success).length, + }); + + return { requestId, results }; +} + +async function sendRequest( + requestId: string, + service: string, + payload: string, + timeoutMs: number, + maxAttempts: number +): Promise<{ service: string; response: string | null; success: boolean }> { + "use step"; + + const writer = getWritable().getWriter(); + const serviceConfig = SERVICE_RESPONSES[service] ?? { + response: "ok", + latencyMs: 400, + failOnFirstAttempt: false, + }; + + try { + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + await writer.write({ + type: "request_sent", + requestId, + service, + payload, + }); + + await delay(REQUEST_DELAY_MS); + + await writer.write({ + type: "waiting_for_reply", + requestId, + service, + deadline: `${timeoutMs}ms`, + }); + + // Simulate: first attempt of a slow service exceeds timeout + const simulatedLatency = + serviceConfig.failOnFirstAttempt && attempt === 1 + ? timeoutMs + 500 // will exceed deadline + : serviceConfig.latencyMs; + + if (simulatedLatency > timeoutMs) { + // Timeout — service too slow + await delay(timeoutMs); + await writer.write({ type: "timeout", requestId, service, attempt }); + + if (attempt < maxAttempts) { + await writer.write({ + type: "retrying", + requestId, + service, + attempt: attempt + 1, + maxAttempts, + }); + } + continue; + } + + // Reply arrives within deadline + await delay(simulatedLatency); + await writer.write({ + type: "reply_received", + requestId, + service, + response: serviceConfig.response, + latencyMs: simulatedLatency, + }); + + return { service, response: serviceConfig.response, success: true }; + } + + // Exhausted all attempts + await writer.write({ + type: "failed", + requestId, + service, + reason: `No reply after ${maxAttempts} attempts`, + }); + + return { service, response: null, success: false }; + } finally { + writer.releaseLock(); + } +} + +async function emitEvent(event: RequestReplyEvent): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/status-poller.mdx b/docs/content/docs/cookbook/webhooks/status-poller.mdx new file mode 100644 index 0000000000..197cba771d --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/status-poller.mdx @@ -0,0 +1,221 @@ +--- +title: Status Poller +description: Poll an external API or job until it reaches a terminal state, with backoff. +type: guide +summary: Poll a video transcoding job until it's ready, sleeping between checks with a max-poll safety valve. +--- + +Poll a video transcoding job until it's ready, sleeping between checks with a max-poll safety valve. Use this pattern when an external system has no callback mechanism and you need to wait for completion. + +## Pattern + +Loop up to a maximum number of polls. Each iteration calls a step to check the job status. If the job is not ready, use `sleep()` to wait before the next check. The durable sleep means the workflow consumes zero compute while waiting. + +### Simplified + +```typescript lineNumbers +import { sleep } from "workflow"; + +declare function checkTranscodeJob(jobId: string, poll: number): Promise; // @setup + +export async function pollTranscodeStatus( + jobId: string, + maxPolls: number = 8, + intervalMs: number = 1000 +) { + "use workflow"; + + for (let poll = 1; poll <= maxPolls; poll++) { + const state = await checkTranscodeJob(jobId, poll); + + if (state === "ready") { + return { jobId, status: "completed", pollCount: poll }; + } + + if (poll < maxPolls) { + await sleep(`${intervalMs}ms`); + } + } + + return { jobId, status: "timeout", pollCount: maxPolls }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { sleep, getWritable } from "workflow"; + +export type JobState = + | "queued" + | "processing" + | "encoding" + | "finalizing" + | "ready"; + +export type PollEvent = + | { type: "poll_start"; poll: number; jobId: string } + | { + type: "poll_result"; + poll: number; + jobState: JobState; + outcome: "not_ready" | "ready"; + } + | { type: "sleep_start"; poll: number; durationMs: number } + | { type: "sleep_end"; poll: number } + | { type: "completed"; poll: number; jobId: string } + | { type: "timeout"; poll: number; jobId: string } + | { type: "done"; jobId: string; status: "completed" | "timeout"; pollCount: number }; + +export interface PollResult { + jobId: string; + status: "completed" | "timeout"; + pollCount: number; + finalState?: string; +} + +const STEP_DELAY_MS = 500; +const JOB_STATE_SEQUENCE: JobState[] = [ + "queued", + "processing", + "encoding", + "finalizing", + "ready", +]; + +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function safeWrite( + writer: WritableStreamDefaultWriter, + event: PollEvent +): Promise { + try { + await writer.write(event); + } catch { + // Best-effort streaming + } +} + +export async function pollTranscodeStatus( + jobId: string, + maxPolls: number = 8, + intervalMs: number = 1000, + readyAtPoll: number = 4 +): Promise { + "use workflow"; + + for (let poll = 1; poll <= maxPolls; poll++) { + const state = await checkTranscodeJob(jobId, poll, readyAtPoll); + + if (state === "ready") { + await emitDone(jobId, "completed", poll); + return { jobId, status: "completed", pollCount: poll, finalState: state }; + } + + if (poll < maxPolls) { + await emitSleepStart(poll, intervalMs); + await sleep(`${intervalMs}ms`); + await emitSleepEnd(poll); + } + } + + await emitTimeout(maxPolls, jobId); + await emitDone(jobId, "timeout", maxPolls); + + return { jobId, status: "timeout", pollCount: maxPolls }; +} + +async function checkTranscodeJob( + jobId: string, + poll: number, + readyAtPoll: number +): Promise { + "use step"; + + const writer = getWritable().getWriter(); + + try { + await safeWrite(writer, { type: "poll_start", poll, jobId }); + await delay(STEP_DELAY_MS); + + let jobState: JobState; + if (poll >= readyAtPoll) { + jobState = "ready"; + } else { + const statesBeforeReady = JOB_STATE_SEQUENCE.slice(0, -1); + const idx = Math.min(poll - 1, statesBeforeReady.length - 1); + jobState = statesBeforeReady[idx]; + } + + const outcome = jobState === "ready" ? "ready" : "not_ready"; + await safeWrite(writer, { type: "poll_result", poll, jobState, outcome }); + + if (outcome === "ready") { + await safeWrite(writer, { type: "completed", poll, jobId }); + } + + return jobState; + } finally { + writer.releaseLock(); + } +} + +checkTranscodeJob.maxRetries = 0; + +async function emitSleepStart(poll: number, durationMs: number): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await safeWrite(writer, { type: "sleep_start", poll, durationMs }); + } finally { + writer.releaseLock(); + } +} + +emitSleepStart.maxRetries = 0; + +async function emitSleepEnd(poll: number): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await safeWrite(writer, { type: "sleep_end", poll }); + } finally { + writer.releaseLock(); + } +} + +emitSleepEnd.maxRetries = 0; + +async function emitTimeout(poll: number, jobId: string): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await safeWrite(writer, { type: "timeout", poll, jobId }); + } finally { + writer.releaseLock(); + } +} + +emitTimeout.maxRetries = 0; + +async function emitDone(jobId: string, status: "completed" | "timeout", pollCount: number): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await safeWrite(writer, { type: "done", jobId, status, pollCount }); + } finally { + writer.releaseLock(); + } +} + +emitDone.maxRetries = 0; +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/webhook-basics.mdx b/docs/content/docs/cookbook/webhooks/webhook-basics.mdx new file mode 100644 index 0000000000..e9f1c1b7ad --- /dev/null +++ b/docs/content/docs/cookbook/webhooks/webhook-basics.mdx @@ -0,0 +1,137 @@ +--- +title: Webhook Basics +description: Ingest HTTP webhooks, validate, and drive workflow steps from external systems. +type: guide +summary: Accept Stripe or GitHub webhooks, validate signatures, and kick off internal workflow steps. +--- + +Accept Stripe or GitHub webhooks, validate signatures, and kick off internal workflow steps. Use this pattern whenever an external system pushes events to your application via HTTP callbacks. + +## Pattern + +Create a webhook with manual response control, then iterate over incoming requests in a `for await` loop. Each request is processed in its own step, letting you validate, respond, and record the event durably. + +### Simplified + +```typescript lineNumbers +import { createWebhook, type RequestWithResponse } from "workflow"; + +declare function processPaymentEvent(request: RequestWithResponse): Promise<{ type: string }>; // @setup + +export async function paymentWebhook(orderId: string) { + "use workflow"; + + const webhook = createWebhook({ respondWith: "manual" }); + + const ledger: { type: string }[] = []; + + for await (const request of webhook) { + const entry = await processPaymentEvent(request); + ledger.push(entry); + if (entry.type === "refund.created" || entry.type === "order.completed") break; + } + + return { orderId, ledger, status: "settled" }; +} +``` + +### Full Implementation + +```typescript lineNumbers +import { createWebhook, getWritable, type RequestWithResponse } from "workflow"; + +export type WebhookEvent = + | { type: "webhook_ready"; token: string } + | { type: "event_received"; eventType: string; amount?: number } + | { type: "response_sent"; eventType: string; action: string } + | { type: "done"; status: "settled"; ledgerSize: number }; + +const MAX_EVENTS = 50; + +export async function paymentWebhook(orderId: string) { + "use workflow"; + + const webhook = createWebhook({ + respondWith: "manual", + }); + + await emit({ type: "webhook_ready", token: webhook.token }); + + const ledger: { type: string; amount?: number; processedAt: string }[] = []; + + for await (const request of webhook) { + const entry = await processPaymentEvent(request); + ledger.push(entry); + if (entry.type === "refund.created" || entry.type === "order.completed") break; + if (ledger.length >= MAX_EVENTS) break; + } + + await emit({ type: "done", status: "settled", ledgerSize: ledger.length }); + + return { orderId, webhookUrl: webhook.url, ledger, status: "settled" as const }; +} + +/** + * Step: Emit a single event to the UI stream. + * Re-acquires the writer inside the step so it survives durable suspension. + */ +async function emit(event: T): Promise { + "use step"; + const writer = getWritable().getWriter(); + try { + await writer.write(event); + } finally { + writer.releaseLock(); + } +} + +async function processPaymentEvent( + request: RequestWithResponse +) { + "use step"; + + const writer = getWritable().getWriter(); + + try { + const body = await request.json().catch(() => ({})); + const type = body?.type ?? "unknown"; + const amount = typeof body?.amount === "number" ? body.amount : undefined; + + await writer.write({ type: "event_received", eventType: type, amount }); + + let action = "ignored"; + + if (type === "payment.created") { + action = "received"; + await request.respondWith(Response.json({ ack: true, action })); + } else if (type === "payment.requires_action") { + action = "awaiting customer"; + await request.respondWith(Response.json({ ack: true, action })); + } else if (type === "payment.succeeded") { + action = "captured"; + await request.respondWith(Response.json({ ack: true, action })); + } else if (type === "payment.failed") { + action = "flagged for review"; + await request.respondWith(Response.json({ ack: true, action })); + } else if (type === "refund.created") { + action = "refunded"; + await request.respondWith(Response.json({ ack: true, action })); + } else { + await request.respondWith(Response.json({ ack: true, action })); + } + + await writer.write({ type: "response_sent", eventType: type, action }); + + return { type, amount, processedAt: new Date().toISOString() }; + } finally { + writer.releaseLock(); + } +} +``` + +## Key APIs + +- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function +- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`createWebhook()`](/docs/api-reference/workflow/create-webhook) — creates an HTTP endpoint the workflow can await +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/meta.json b/docs/content/docs/meta.json index eda30fd046..0c8ad884cd 100644 --- a/docs/content/docs/meta.json +++ b/docs/content/docs/meta.json @@ -4,6 +4,7 @@ "---", "getting-started", "foundations", + "cookbook", "how-it-works", "observability", "ai", diff --git a/docs/lib/cookbook-tree.ts b/docs/lib/cookbook-tree.ts new file mode 100644 index 0000000000..7644f9ad96 --- /dev/null +++ b/docs/lib/cookbook-tree.ts @@ -0,0 +1,757 @@ +export type Branch = { + label: string; + icon: string; + slugs?: string[]; + next?: TreeNode; +}; + +export type TreeNode = { + id: string; + question: string; + branches: Branch[]; +}; + +export type Recipe = { + slug: string; + title: string; + description: string; + whenToUse: string; + category: string; +}; + +/** Map from slug → category folder for URL construction */ +export const slugToCategory: Record = { + saga: 'payments', + choreography: 'payments', + 'process-manager': 'payments', + 'guaranteed-delivery': 'payments', + 'transactional-outbox': 'payments', + 'idempotent-receiver': 'payments', + + 'approval-gate': 'approvals', + 'cancellable-export': 'approvals', + 'approval-chain': 'approvals', + 'scheduler-agent-supervisor': 'approvals', + + 'retry-backoff': 'resilience', + 'retryable-rate-limit': 'resilience', + throttle: 'resilience', + 'circuit-breaker': 'resilience', + bulkhead: 'resilience', + 'hedge-request': 'resilience', + 'dead-letter-queue': 'resilience', + + 'fan-out': 'notifications', + 'publish-subscribe': 'notifications', + 'recipient-list': 'notifications', + 'onboarding-drip': 'notifications', + 'wakeable-reminder': 'notifications', + 'scheduled-digest': 'notifications', + + 'async-request-reply': 'webhooks', + 'request-reply': 'webhooks', + 'webhook-basics': 'webhooks', + 'claim-check': 'webhooks', + 'event-gateway': 'webhooks', + 'status-poller': 'webhooks', + + pipeline: 'data-processing', + 'batch-processor': 'data-processing', + 'map-reduce': 'data-processing', + 'scatter-gather': 'data-processing', + aggregator: 'data-processing', + splitter: 'data-processing', + resequencer: 'data-processing', + 'competing-consumers': 'data-processing', + 'priority-queue': 'data-processing', + + 'content-based-router': 'routing', + detour: 'routing', + 'routing-slip': 'routing', + 'message-translator': 'routing', + normalizer: 'routing', + 'content-enricher': 'routing', + 'message-filter': 'routing', + + 'wire-tap': 'observability', + 'message-history': 'observability', + 'correlation-identifier': 'observability', + 'event-sourcing': 'observability', + 'namespaced-streams': 'observability', +}; + +/** All recipe metadata, keyed by slug */ +export const recipes: Record = { + saga: { + slug: 'saga', + title: 'Saga', + description: + 'Long-lived transaction across services using forward steps and compensations.', + whenToUse: + 'Upgrade a subscription (reserve seats, capture invoice, provision) with auto-rollback on failure.', + category: 'payments', + }, + choreography: { + slug: 'choreography', + title: 'Choreography', + description: + 'Peers react to events independently \u2014 no central orchestrator.', + whenToUse: + 'Order flow where inventory, payment, and shipping react to events with automatic compensation on failure.', + category: 'payments', + }, + 'process-manager': { + slug: 'process-manager', + title: 'Process Manager', + description: + 'Track a multi-step business process and react to events until it completes.', + whenToUse: + 'Orchestrate payment, inventory, backorder, shipping, and delivery with branching logic.', + category: 'payments', + }, + 'guaranteed-delivery': { + slug: 'guaranteed-delivery', + title: 'Guaranteed Delivery', + description: + 'Persist-and-retry semantics so work is not lost across crashes or restarts.', + whenToUse: + 'Ensure a payment confirmation is delivered even if the server restarts mid-send.', + category: 'payments', + }, + 'transactional-outbox': { + slug: 'transactional-outbox', + title: 'Transactional Outbox', + description: + 'Write business data and an outbox event in one transaction, then publish reliably.', + whenToUse: + 'Persist an order and relay it to a message broker in one transaction for at-least-once delivery.', + category: 'payments', + }, + 'idempotent-receiver': { + slug: 'idempotent-receiver', + title: 'Idempotent Receiver', + description: + 'Handle duplicate deliveries safely (same logical operation, same outcome).', + whenToUse: + 'Detect duplicate payment webhooks with an idempotency key and return the cached result.', + category: 'payments', + }, + 'approval-gate': { + slug: 'approval-gate', + title: 'Approval Gate', + description: + 'Pause the workflow until a human approves or rejects, then resume or fail.', + whenToUse: + 'Content moderation hold: pause publishing until a reviewer clicks approve or reject.', + category: 'approvals', + }, + 'cancellable-export': { + slug: 'cancellable-export', + title: 'Cancellable Export', + description: + 'Long-running job that the user can cancel while steps are in flight.', + whenToUse: + 'User starts a 100k-row data export and hits Cancel mid-flight without waiting for completion.', + category: 'approvals', + }, + 'approval-chain': { + slug: 'approval-chain', + title: 'Approval Chain', + description: + 'Route work through a sequence of approvers; advance only when each step signs off.', + whenToUse: + 'Purchase orders needing manager, director, VP sign-off with per-level escalation timeouts.', + category: 'approvals', + }, + 'scheduler-agent-supervisor': { + slug: 'scheduler-agent-supervisor', + title: 'Scheduler-Agent-Supervisor', + description: + 'Scheduled triggers plus supervised agent/worker style execution.', + whenToUse: + 'Dispatch content generation to agents in sequence, checking quality thresholds with escalation.', + category: 'approvals', + }, + 'retry-backoff': { + slug: 'retry-backoff', + title: 'Retry with Backoff', + description: + 'Retry failed steps with increasing delay to avoid hammering flaky dependencies.', + whenToUse: + 'Retry a flaky email API with 1s, 2s, 4s backoff instead of failing on the first hiccup.', + category: 'resilience', + }, + 'retryable-rate-limit': { + slug: 'retryable-rate-limit', + title: 'Retryable Rate Limit', + description: + 'On 429 / rate limits, back off and retry instead of failing immediately.', + whenToUse: + 'Sync contacts to an external CRM and auto-retry when the API returns 429 with retry-after.', + category: 'resilience', + }, + throttle: { + slug: 'throttle', + title: 'Throttle', + description: + 'Limit how often work runs or how many concurrent operations are allowed.', + whenToUse: + 'Cap outbound API calls to 10/second so you do not blow your third-party rate limit.', + category: 'resilience', + }, + 'circuit-breaker': { + slug: 'circuit-breaker', + title: 'Circuit Breaker', + description: + 'Stop calling a failing dependency for a cooldown, then probe for recovery.', + whenToUse: + 'Stop hammering a down payment gateway after 3 failures, wait 30s, then test with one probe request.', + category: 'resilience', + }, + bulkhead: { + slug: 'bulkhead', + title: 'Bulkhead', + description: + 'Isolate capacity or failure domains so one overloaded path does not sink the whole system.', + whenToUse: + 'Partition order items into isolated groups so one bad SKU does not block the rest of the shipment.', + category: 'resilience', + }, + 'hedge-request': { + slug: 'hedge-request', + title: 'Hedge Request', + description: + 'Send duplicate requests; take the first successful response to cut tail latency.', + whenToUse: + 'Fire the same search query to two replicas and use whichever responds first.', + category: 'resilience', + }, + 'dead-letter-queue': { + slug: 'dead-letter-queue', + title: 'Dead Letter Queue', + description: + 'After repeated failure, move a message aside for inspection instead of infinite retry.', + whenToUse: + 'Route undeliverable messages to a dead-letter queue after 3 retries for ops review.', + category: 'resilience', + }, + 'fan-out': { + slug: 'fan-out', + title: 'Fan-Out', + description: + 'One trigger fans out to parallel branches (often paired with gather/aggregate).', + whenToUse: + 'Broadcast an incident alert to Slack, email, SMS, and PagerDuty in parallel.', + category: 'notifications', + }, + 'publish-subscribe': { + slug: 'publish-subscribe', + title: 'Publish-Subscribe', + description: + 'One publisher, many subscribers \u2014 broadcast-style distribution.', + whenToUse: + 'A product-update event triggers email, push notification, and analytics subscribers independently.', + category: 'notifications', + }, + 'recipient-list': { + slug: 'recipient-list', + title: 'Recipient List', + description: + 'Same logical message delivered to a list of recipients (static or dynamic).', + whenToUse: + 'Evaluate severity rules at runtime and alert matching channels (Slack, email, PagerDuty).', + category: 'notifications', + }, + 'onboarding-drip': { + slug: 'onboarding-drip', + title: 'Onboarding Drip', + description: + 'Time-delayed sequence (e.g. emails or nudges) with durable waits between steps.', + whenToUse: + 'Send a welcome email on signup, a tips email after 2 days, and a check-in after a week.', + category: 'notifications', + }, + 'wakeable-reminder': { + slug: 'wakeable-reminder', + title: 'Wakeable Reminder', + description: + 'Sleep until a deadline or wake early when an external event arrives.', + whenToUse: + 'Schedule a payment reminder for 3 days out, but let the user cancel, snooze, or pay early via webhook.', + category: 'notifications', + }, + 'scheduled-digest': { + slug: 'scheduled-digest', + title: 'Scheduled Digest', + description: + 'Accumulate activity and emit a summary on a schedule (e.g. daily digest).', + whenToUse: + 'Open a 1-hour collection window for events, then email a digest when the window closes.', + category: 'notifications', + }, + 'async-request-reply': { + slug: 'async-request-reply', + title: 'Async Request-Reply', + description: + 'Start work, wait off-thread, and continue when an async callback or signal arrives.', + whenToUse: + 'Submit a request to a vendor API and resume when the webhook callback arrives.', + category: 'webhooks', + }, + 'request-reply': { + slug: 'request-reply', + title: 'Request-Reply', + description: + 'Call/response style interaction modeled inside a durable workflow.', + whenToUse: + 'Send a request to a service, wait for a correlated reply with a deadline, and retry on timeout.', + category: 'webhooks', + }, + 'webhook-basics': { + slug: 'webhook-basics', + title: 'Webhook Basics', + description: + 'Ingest HTTP webhooks, validate, and drive workflow steps from external systems.', + whenToUse: + 'Accept Stripe or GitHub webhooks, validate signatures, and kick off internal workflow steps.', + category: 'webhooks', + }, + 'claim-check': { + slug: 'claim-check', + title: 'Claim Check', + description: + 'Pass a small reference through the workflow; store or fetch the heavy payload elsewhere.', + whenToUse: + 'Accept a lightweight token instead of passing a 50 MB file through every workflow step.', + category: 'webhooks', + }, + 'event-gateway': { + slug: 'event-gateway', + title: 'Event Gateway', + description: + 'Normalize many external event shapes into one internal representation.', + whenToUse: + 'Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order.', + category: 'webhooks', + }, + 'status-poller': { + slug: 'status-poller', + title: 'Status Poller', + description: + 'Poll an external API or job until it reaches a terminal state, with backoff.', + whenToUse: + 'Poll a video transcoding job until it is ready, sleeping between checks with a max-poll safety valve.', + category: 'webhooks', + }, + pipeline: { + slug: 'pipeline', + title: 'Pipeline', + description: + 'Linear chain of stages \u2014 each step\u2019s output feeds the next.', + whenToUse: + 'Run a 4-stage ETL (extract, transform, validate, load) with live progress streaming.', + category: 'data-processing', + }, + 'batch-processor': { + slug: 'batch-processor', + title: 'Batch Processor', + description: + 'Collect items over time or up to a size, then process them as a single batch.', + whenToUse: + 'Process a large CSV import in batches, auto-resuming from the last completed batch after a crash.', + category: 'data-processing', + }, + 'map-reduce': { + slug: 'map-reduce', + title: 'Map-Reduce', + description: + 'Map work in parallel, then reduce partial results into a single answer.', + whenToUse: + 'Partition a large analytics dataset into chunks, process in parallel, and merge into one report.', + category: 'data-processing', + }, + 'scatter-gather': { + slug: 'scatter-gather', + title: 'Scatter-Gather', + description: + 'Fan out to many workers, then collect and merge their replies.', + whenToUse: + 'Query 4 shipping providers for quotes in parallel and pick the cheapest one that responds.', + category: 'data-processing', + }, + aggregator: { + slug: 'aggregator', + title: 'Aggregator', + description: + 'Merge many parallel outcomes into one combined result (pair with scatter-gather / fan-out).', + whenToUse: + 'Collect inventory from multiple warehouses with a timeout so stragglers do not block checkout.', + category: 'data-processing', + }, + splitter: { + slug: 'splitter', + title: 'Splitter', + description: + 'Break one compound message into many smaller messages for downstream steps.', + whenToUse: + 'Split a multi-item order into individual line items for independent validation and fulfillment.', + category: 'data-processing', + }, + resequencer: { + slug: 'resequencer', + title: 'Resequencer', + description: + 'Buffer and reorder out-of-order messages before the next stage.', + whenToUse: + 'Buffer out-of-order webhook fragments and release them in the correct sequence.', + category: 'data-processing', + }, + 'competing-consumers': { + slug: 'competing-consumers', + title: 'Competing Consumers', + description: + 'Multiple workers consume the same kind of work for throughput and scale-out.', + whenToUse: + 'Multiple workflow instances race to claim items from a shared queue \u2014 only one wins each item.', + category: 'data-processing', + }, + 'priority-queue': { + slug: 'priority-queue', + title: 'Priority Queue', + description: 'Prefer higher-priority work when multiple items are waiting.', + whenToUse: + 'Process enterprise-tier jobs before free-tier jobs when the queue is backed up.', + category: 'data-processing', + }, + 'content-based-router': { + slug: 'content-based-router', + title: 'Content-Based Router', + description: + 'Branch to different handlers based on fields inside the message or payload.', + whenToUse: + 'Classify a support ticket and route it to billing, technical, account, or feedback handlers.', + category: 'routing', + }, + detour: { + slug: 'detour', + title: 'Detour', + description: + 'Temporarily bypass or replace a step (e.g. maintenance, A/B, fallback path).', + whenToUse: + 'Toggle a QA review stage on/off in a deploy pipeline based on a runtime feature flag.', + category: 'routing', + }, + 'routing-slip': { + slug: 'routing-slip', + title: 'Routing Slip', + description: + 'Attach an itinerary to the message so each hop knows where to send it next.', + whenToUse: + 'Execute a flexible sequence of processing stages defined per-request in a routing slip.', + category: 'routing', + }, + 'message-translator': { + slug: 'message-translator', + title: 'Message Translator', + description: + 'Convert between external and internal message formats at the boundary.', + whenToUse: + 'Convert partner XML orders into your internal JSON schema at the API boundary.', + category: 'routing', + }, + normalizer: { + slug: 'normalizer', + title: 'Normalizer', + description: + 'Map heterogeneous inputs into one canonical shape before routing.', + whenToUse: + 'Accept orders as XML, CSV, or legacy JSON and transform them into a single canonical shape.', + category: 'routing', + }, + 'content-enricher': { + slug: 'content-enricher', + title: 'Content Enricher', + description: + 'Look up extra data and attach it before the next step sees the message.', + whenToUse: + 'Enrich a sales lead by querying CRM, social, and Clearbit in parallel before routing to sales.', + category: 'routing', + }, + 'message-filter': { + slug: 'message-filter', + title: 'Message Filter', + description: + 'Drop or accept messages based on rules before downstream processing.', + whenToUse: + 'Drop low-priority log events before they hit the expensive analytics pipeline.', + category: 'routing', + }, + 'wire-tap': { + slug: 'wire-tap', + title: 'Wire Tap', + description: + 'Observe or copy messages in flight for logging/debugging without changing the main path.', + whenToUse: + 'Mirror production order events to a debug logger without touching the main processing path.', + category: 'observability', + }, + 'message-history': { + slug: 'message-history', + title: 'Message History', + description: + 'Keep an audit trail of what passed through the flow and in what order.', + whenToUse: + 'Track a support ticket through normalize, classify, route, dispatch with full history at each step.', + category: 'observability', + }, + 'correlation-identifier': { + slug: 'correlation-identifier', + title: 'Correlation Identifier', + description: + 'Tie outbound requests to the right workflow run when async replies arrive.', + whenToUse: + 'Tag outbound API calls with a correlation ID so async responses match back to the right order.', + category: 'observability', + }, + 'event-sourcing': { + slug: 'event-sourcing', + title: 'Event Sourcing', + description: + 'Drive behavior from an append-only event log; rebuild or audit state from history.', + whenToUse: + 'Append domain events to an immutable log and replay them to detect bugs or migrate projections.', + category: 'observability', + }, + 'namespaced-streams': { + slug: 'namespaced-streams', + title: 'Namespaced Streams', + description: + 'Separate streams (e.g. per tenant or topic) so clients only see relevant events.', + whenToUse: + 'Emit workflow events to separate UI and ops-telemetry streams simultaneously.', + category: 'observability', + }, +}; + +/** The decision tree */ +export const tree: TreeNode = { + id: 'root', + question: 'I want to\u2026', + branches: [ + { + label: 'Process payments & orders', + icon: '$', + next: { + id: 'payments', + question: 'What happens if a step fails?', + branches: [ + { + label: 'Roll back everything automatically', + icon: '\u21a9', + slugs: ['saga'], + }, + { + label: 'Let services react independently', + icon: '\u26a1', + slugs: ['choreography'], + }, + { + label: 'Orchestrate with branching logic', + icon: '\u25c8', + slugs: ['process-manager', 'pipeline'], + }, + { + label: 'Make sure nothing gets lost', + icon: '\u2713', + slugs: [ + 'guaranteed-delivery', + 'transactional-outbox', + 'idempotent-receiver', + ], + }, + ], + }, + }, + { + label: 'Approve or review something', + icon: '\u270b', + next: { + id: 'approve', + question: 'How many approvers?', + branches: [ + { + label: 'One person', + icon: '1', + slugs: ['approval-gate', 'cancellable-export'], + }, + { + label: 'A chain of approvers', + icon: '\u22ef', + slugs: ['approval-chain', 'scheduler-agent-supervisor'], + }, + ], + }, + }, + { + label: 'Handle flaky APIs', + icon: '\u21bb', + next: { + id: 'flaky', + question: "What's going wrong?", + branches: [ + { + label: 'Random failures or timeouts', + icon: '\u26a0', + slugs: ['retry-backoff'], + }, + { + label: 'Rate limited (429s)', + icon: '\u2298', + slugs: ['retryable-rate-limit', 'throttle'], + }, + { + label: 'Service is fully down', + icon: '\u2715', + slugs: ['circuit-breaker', 'bulkhead'], + }, + { + label: 'Too slow, need a faster fallback', + icon: '\u23f1', + slugs: ['hedge-request', 'dead-letter-queue'], + }, + ], + }, + }, + { + label: 'Send notifications & alerts', + icon: '\u2192', + next: { + id: 'notify', + question: 'How should they be sent?', + branches: [ + { + label: 'All at once, in parallel', + icon: '\u2ad8', + slugs: ['fan-out', 'publish-subscribe'], + }, + { + label: 'Only to matching recipients', + icon: '\u2442', + slugs: ['recipient-list'], + }, + { + label: 'Spread out over days or weeks', + icon: '\u25f4', + slugs: ['onboarding-drip', 'wakeable-reminder'], + }, + { + label: 'Batched into a digest', + icon: '\u25a4', + slugs: ['scheduled-digest'], + }, + ], + }, + }, + { + label: 'Wait for a webhook or callback', + icon: '\u2193', + next: { + id: 'wait', + question: 'What are you waiting for?', + branches: [ + { + label: 'An async API response', + icon: '\u21c4', + slugs: ['async-request-reply', 'request-reply'], + }, + { + label: 'An inbound webhook', + icon: '\u2193', + slugs: ['webhook-basics', 'claim-check'], + }, + { + label: 'Multiple signals to converge', + icon: '\u2295', + slugs: ['event-gateway'], + }, + { + label: 'A job to finish (polling)', + icon: '\u25f4', + slugs: ['status-poller'], + }, + ], + }, + }, + { + label: 'Process data in bulk', + icon: '\u25a4', + next: { + id: 'bulk', + question: "What's the shape of the work?", + branches: [ + { + label: 'Linear pipeline (A then B then C)', + icon: '\u25b8', + slugs: ['pipeline', 'batch-processor'], + }, + { + label: 'Parallel map, then merge results', + icon: '\u2295', + slugs: ['map-reduce', 'scatter-gather', 'aggregator'], + }, + { + label: 'Split one payload into many', + icon: '\u2ad8', + slugs: ['splitter', 'resequencer'], + }, + { + label: 'Many workers competing for items', + icon: '\u2299', + slugs: ['competing-consumers', 'priority-queue'], + }, + ], + }, + }, + { + label: 'Route to the right handler', + icon: '\u2442', + next: { + id: 'route', + question: "What's the main operation?", + branches: [ + { + label: 'Branch based on message content', + icon: '\u25c8', + slugs: ['content-based-router', 'detour'], + }, + { + label: 'Dynamic route list per request', + icon: '\u22ef', + slugs: ['routing-slip', 'recipient-list'], + }, + { + label: 'Transform or normalize the format', + icon: '\u21c4', + slugs: ['message-translator', 'normalizer', 'content-enricher'], + }, + { + label: 'Filter out noise before processing', + icon: '\u2715', + slugs: ['message-filter'], + }, + ], + }, + }, + { + label: 'Observe & audit the flow', + icon: '\u25ce', + slugs: [ + 'wire-tap', + 'message-history', + 'correlation-identifier', + 'event-sourcing', + 'namespaced-streams', + ], + }, + ], +}; From 1ece03ce8c4e1affc254d99ef6a1cabf173a2abc Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 00:55:43 -0600 Subject: [PATCH 02/17] ploop: iteration 1 checkpoint Automated checkpoint commit. Ploop-Iter: 1 --- .workflow-vitest/workflows.mjs.debug.json | 495 +++++++++++++++ .../app/[lang]/cookbooks/[[...slug]]/page.tsx | 115 ++++ docs/app/[lang]/cookbooks/layout.tsx | 13 + .../geistdocs/cookbook-explorer.tsx | 2 +- .../cookbook/approvals/approval-chain.mdx | 197 +----- .../docs/cookbook/approvals/approval-gate.mdx | 172 +----- .../cookbook/approvals/cancellable-export.mdx | 91 +-- .../approvals/scheduler-agent-supervisor.mdx | 294 +-------- .../cookbook/data-processing/aggregator.mdx | 189 +----- .../data-processing/batch-processor.mdx | 140 +---- .../data-processing/competing-consumers.mdx | 166 +---- .../cookbook/data-processing/map-reduce.mdx | 166 +---- .../cookbook/data-processing/pipeline.mdx | 129 +--- .../data-processing/priority-queue.mdx | 139 +---- .../cookbook/data-processing/resequencer.mdx | 162 +---- .../data-processing/scatter-gather.mdx | 215 +------ .../cookbook/data-processing/splitter.mdx | 209 +------ docs/content/docs/cookbook/meta.json | 8 - .../docs/cookbook/notifications/fan-out.mdx | 262 +------- .../notifications/onboarding-drip.mdx | 105 +--- .../notifications/publish-subscribe.mdx | 178 +----- .../cookbook/notifications/recipient-list.mdx | 225 +------ .../notifications/scheduled-digest.mdx | 184 +----- .../notifications/wakeable-reminder.mdx | 135 +--- .../observability/correlation-identifier.mdx | 235 +------ .../cookbook/observability/event-sourcing.mdx | 277 +-------- .../observability/message-history.mdx | 527 +--------------- .../observability/namespaced-streams.mdx | 115 +--- .../docs/cookbook/observability/wire-tap.mdx | 227 +------ .../docs/cookbook/payments/choreography.mdx | 578 +---------------- .../cookbook/payments/guaranteed-delivery.mdx | 156 +---- .../cookbook/payments/idempotent-receiver.mdx | 159 +---- .../cookbook/payments/process-manager.mdx | 579 +----------------- docs/content/docs/cookbook/payments/saga.mdx | 320 +--------- .../payments/transactional-outbox.mdx | 135 +--- .../docs/cookbook/resilience/bulkhead.mdx | 173 +----- .../cookbook/resilience/circuit-breaker.mdx | 180 +----- .../cookbook/resilience/dead-letter-queue.mdx | 170 +---- .../cookbook/resilience/hedge-request.mdx | 155 +---- .../cookbook/resilience/retry-backoff.mdx | 144 +---- .../resilience/retryable-rate-limit.mdx | 119 +--- .../docs/cookbook/resilience/throttle.mdx | 157 +---- .../cookbook/routing/content-based-router.mdx | 244 +------- .../cookbook/routing/content-enricher.mdx | 318 +--------- docs/content/docs/cookbook/routing/detour.mdx | 170 +---- .../docs/cookbook/routing/message-filter.mdx | 234 +------ .../cookbook/routing/message-translator.mdx | 226 +------ .../docs/cookbook/routing/normalizer.mdx | 284 +-------- .../docs/cookbook/routing/routing-slip.mdx | 121 +--- .../cookbook/webhooks/async-request-reply.mdx | 217 +------ .../docs/cookbook/webhooks/claim-check.mdx | 72 +-- .../docs/cookbook/webhooks/event-gateway.mdx | 104 +--- .../docs/cookbook/webhooks/request-reply.mdx | 174 +----- .../docs/cookbook/webhooks/status-poller.mdx | 180 +----- .../docs/cookbook/webhooks/webhook-basics.mdx | 102 +-- docs/content/docs/meta.json | 1 - docs/geistdocs.tsx | 4 + docs/lib/geistdocs/cookbook-source.ts | 53 ++ .../.workflow-vitest/steps.mjs | 123 ++++ .../.workflow-vitest/steps.mjs.debug.json | 10 + .../.workflow-vitest/workflows.mjs | 212 +++++++ .../.workflow-vitest/workflows.mjs.debug.json | 6 + .../.workflow-vitest/steps.mjs | 151 +++++ .../.workflow-vitest/steps.mjs.debug.json | 10 + .../.workflow-vitest/workflows.mjs | 215 +++++++ .../.workflow-vitest/workflows.mjs.debug.json | 6 + .../.workflow-vitest/steps.mjs | 164 +++++ .../.workflow-vitest/steps.mjs.debug.json | 10 + .../.workflow-vitest/workflows.mjs | 204 ++++++ .../.workflow-vitest/workflows.mjs.debug.json | 6 + 70 files changed, 2010 insertions(+), 10008 deletions(-) create mode 100644 .workflow-vitest/workflows.mjs.debug.json create mode 100644 docs/app/[lang]/cookbooks/[[...slug]]/page.tsx create mode 100644 docs/app/[lang]/cookbooks/layout.tsx create mode 100644 docs/lib/geistdocs/cookbook-source.ts create mode 100644 tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs create mode 100644 tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs.debug.json create mode 100644 tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs create mode 100644 tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs.debug.json create mode 100644 tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs create mode 100644 tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs.debug.json create mode 100644 tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs create mode 100644 tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs.debug.json create mode 100644 tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs create mode 100644 tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs.debug.json create mode 100644 tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs create mode 100644 tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs.debug.json diff --git a/.workflow-vitest/workflows.mjs.debug.json b/.workflow-vitest/workflows.mjs.debug.json new file mode 100644 index 0000000000..7c8b2b1c3a --- /dev/null +++ b/.workflow-vitest/workflows.mjs.debug.json @@ -0,0 +1,495 @@ +{ + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/nextjs-turbopack/app/.well-known/agent/v1/steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/nextjs-webpack/app/.well-known/agent/v1/steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/docs/app/[lang]/(home)/components/implementation.tsx", + "/Users/johnlindquist/dev/workflow/docs/app/[lang]/(home)/components/intro/intro.tsx", + "/Users/johnlindquist/dev/workflow/docs/app/[lang]/(home)/components/run-anywhere.tsx", + "/Users/johnlindquist/dev/workflow/docs/app/[lang]/(home)/components/use-cases-server.tsx", + "/Users/johnlindquist/dev/workflow/packages/builders/dist/discover-entries-esbuild-plugin.test.js", + "/Users/johnlindquist/dev/workflow/packages/builders/dist/swc-esbuild-plugin.test.js", + "/Users/johnlindquist/dev/workflow/packages/builders/src/discover-entries-esbuild-plugin.test.ts", + "/Users/johnlindquist/dev/workflow/packages/builders/src/swc-esbuild-plugin.test.ts", + "/Users/johnlindquist/dev/workflow/packages/core/e2e/build-errors.test.ts", + "/Users/johnlindquist/dev/workflow/packages/core/e2e/dev.test.ts", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-step.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/conflicting-directives/output-workflow.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-client.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-step.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/instance-methods/output-workflow.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-client.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-step.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/misplaced-function-directive/output-workflow.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-client.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-step.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/errors/non-async-functions/output-workflow.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/anonymous-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/default-arrow-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/default-parameter-usage/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/default-workflow-collision/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/expr-fn-default-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/mixed-functions/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/module-level-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-in-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/nested-step-with-closure/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/nested-steps-in-object-constructor/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/separate-export-statement/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/single-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/static-method-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/user-named-dunder-default/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/using-declaration-workflow/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/workflow-arrow-function/input.js", + "/Users/johnlindquist/dev/workflow/packages/swc-plugin-workflow/transform/tests/fixture/workflow-client-property/input.js", + "/Users/johnlindquist/dev/workflow/packages/typescript-plugin/src/code-fixes.test.ts", + "/Users/johnlindquist/dev/workflow/packages/typescript-plugin/src/diagnostics.test.ts", + "/Users/johnlindquist/dev/workflow/packages/typescript-plugin/src/hover.test.ts", + "/Users/johnlindquist/dev/workflow/packages/typescript-plugin/src/utils.test.ts", + "/Users/johnlindquist/dev/workflow/packages/world-testing/dist/workflows/hooks.js", + "/Users/johnlindquist/dev/workflow/packages/world-testing/workflows/addition.ts", + "/Users/johnlindquist/dev/workflow/packages/world-testing/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/packages/world-testing/workflows/noop.ts", + "/Users/johnlindquist/dev/workflow/packages/world-testing/workflows/null-byte.ts", + "/Users/johnlindquist/dev/workflow/packages/world-testing/workflows/retriable-and-fatal.ts", + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/approval-expiry-escalation/workflows/purchase-approval.ts", + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/compensation-saga/workflows/order-saga.ts", + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/duplicate-webhook-order/workflows/shopify-order.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/0_workflow_only.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/100_durable_agent_e2e.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/10_single_stmt_control_flow.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/1_simple.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/2_control_flow.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/3_streams.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/4_ai.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/5_hooks.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/6_batching.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/7_full.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/97_bench.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/98_duplicate_case.ts", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/99_e2e.ts", + "/Users/johnlindquist/dev/workflow/workbench/nextjs-turbopack/app/.well-known/agent/v1/steps.ts", + "/Users/johnlindquist/dev/workflow/workbench/nextjs-turbopack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/workbench/nextjs-turbopack/workflows/96_many_steps.ts", + "/Users/johnlindquist/dev/workflow/workbench/nextjs-turbopack/workflows/agent_chat.ts", + "/Users/johnlindquist/dev/workflow/workbench/nextjs-webpack/app/.well-known/agent/v1/steps.ts", + "/Users/johnlindquist/dev/workflow/workbench/nextjs-webpack/workflows/8_react_render.tsx", + "/Users/johnlindquist/dev/workflow/workbench/nitro-v3/workflows/0_demo.ts", + "/Users/johnlindquist/dev/workflow/workbench/sveltekit/src/workflows/0_calc.ts", + "/Users/johnlindquist/dev/workflow/workbench/sveltekit/src/workflows/user-signup.ts", + "/Users/johnlindquist/dev/workflow/workbench/swc-playground/components/swc-playground.tsx", + "/Users/johnlindquist/dev/workflow/workbench/vitest/workflows/hooks.ts", + "/Users/johnlindquist/dev/workflow/workbench/vitest/workflows/simple.ts", + "/Users/johnlindquist/dev/workflow/workbench/vitest/workflows/sleeping.ts", + "/Users/johnlindquist/dev/workflow/workbench/vitest/workflows/third-party.ts", + "/Users/johnlindquist/dev/workflow/workbench/vitest/workflows/webhook.ts" + ], + "serdeOnlyFiles": [ + "/Users/johnlindquist/dev/workflow/.claude/worktrees/api-audit-01/workbench/example/workflows/serde-models.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/fix-skills-paths/workbench/example/workflows/serde-models.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1108/workbench/example/workflows/serde-models.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pr-1111/workbench/example/workflows/serde-models.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/pricing-adjustment/workbench/example/workflows/serde-models.ts", + "/Users/johnlindquist/dev/workflow/.claude/worktrees/swift-jumping-pizza/workbench/example/workflows/serde-models.ts", + "/Users/johnlindquist/dev/workflow/packages/web/build/server/assets/server-build-TA2fif61.js", + "/Users/johnlindquist/dev/workflow/workbench/example/workflows/serde-models.ts" + ] +} diff --git a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx new file mode 100644 index 0000000000..60eab998a6 --- /dev/null +++ b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx @@ -0,0 +1,115 @@ +import { Step, Steps } from 'fumadocs-ui/components/steps'; +import { Tab, Tabs } from 'fumadocs-ui/components/tabs'; +import { createRelativeLink } from 'fumadocs-ui/mdx'; +import type { Metadata } from 'next'; +import { notFound } from 'next/navigation'; +import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; +import { AskAI } from '@/components/geistdocs/ask-ai'; +import { CopyPage } from '@/components/geistdocs/copy-page'; +import { + DocsBody, + DocsDescription, + DocsPage, + DocsTitle, +} from '@/components/geistdocs/docs-page'; +import { EditSource } from '@/components/geistdocs/edit-source'; +import { Feedback } from '@/components/geistdocs/feedback'; +import { getMDXComponents } from '@/components/geistdocs/mdx-components'; +import { OpenInChat } from '@/components/geistdocs/open-in-chat'; +import { ScrollTop } from '@/components/geistdocs/scroll-top'; +import { Badge } from '@/components/ui/badge'; +import { Separator } from '@/components/ui/separator'; +import { getLLMText, getPageImage, source } from '@/lib/geistdocs/source'; + +const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { + const { slug, lang } = await params; + + // Prepend 'cookbook' to resolve from the docs source + const resolvedSlug = slug ? ['cookbook', ...slug] : ['cookbook']; + const page = source.getPage(resolvedSlug, lang); + + if (!page) { + notFound(); + } + + const markdown = await getLLMText(page); + const MDX = page.data.body; + + return ( + + + + + + + + + + ), + }} + toc={page.data.toc} + > + {page.data.title} + {page.data.description} + + , + })} + /> + + + ); +}; + +export const generateStaticParams = () => { + // Generate params for all cookbook pages + const allParams = source.generateParams(); + return allParams + .filter((p) => Array.isArray(p.slug) && p.slug[0] === 'cookbook') + .map((p) => ({ + ...p, + slug: (p.slug as string[]).slice(1), // Remove 'cookbook' prefix + })); +}; + +export const generateMetadata = async ({ + params, +}: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { + const { slug, lang } = await params; + const resolvedSlug = slug ? ['cookbook', ...slug] : ['cookbook']; + const page = source.getPage(resolvedSlug, lang); + + if (!page) { + notFound(); + } + + const metadata: Metadata = { + title: page.data.title, + description: page.data.description, + openGraph: { + images: getPageImage(page).url, + }, + alternates: { + canonical: `/cookbooks${slug ? `/${slug.join('/')}` : ''}`, + types: { + 'text/markdown': `/cookbooks${slug ? `/${slug.join('/')}` : ''}.md`, + }, + }, + }; + + return metadata; +}; + +export default Page; diff --git a/docs/app/[lang]/cookbooks/layout.tsx b/docs/app/[lang]/cookbooks/layout.tsx new file mode 100644 index 0000000000..73cdf8e44a --- /dev/null +++ b/docs/app/[lang]/cookbooks/layout.tsx @@ -0,0 +1,13 @@ +import { DocsLayout } from '@/components/geistdocs/docs-layout'; +import { getCookbookTree } from '@/lib/geistdocs/cookbook-source'; + +const Layout = async ({ + children, + params, +}: LayoutProps<'/[lang]/cookbooks'>) => { + const { lang } = await params; + + return {children}; +}; + +export default Layout; diff --git a/docs/components/geistdocs/cookbook-explorer.tsx b/docs/components/geistdocs/cookbook-explorer.tsx index e039afa192..c894f5aa35 100644 --- a/docs/components/geistdocs/cookbook-explorer.tsx +++ b/docs/components/geistdocs/cookbook-explorer.tsx @@ -161,7 +161,7 @@ export function CookbookExplorer({ lang }: { lang: string }) { return (

diff --git a/docs/content/docs/cookbook/approvals/approval-chain.mdx b/docs/content/docs/cookbook/approvals/approval-chain.mdx index 1daf9013a8..97a84336ce 100644 --- a/docs/content/docs/cookbook/approvals/approval-chain.mdx +++ b/docs/content/docs/cookbook/approvals/approval-chain.mdx @@ -5,13 +5,11 @@ type: guide summary: Purchase orders needing manager, director, VP sign-off with per-level escalation timeouts. --- -Use the approval chain pattern when a request must pass through multiple approval levels in sequence. Each level has its own timeout, and the chain advances only when an approver signs off. If an approver rejects, the chain stops immediately. +Use the approval chain pattern when a request must pass through multiple approval levels in sequence, with per-level timeouts and escalation. ## Pattern -The workflow determines the required approval levels based on the request (e.g., amount thresholds), then iterates through each level. At each level, a hook is created for the approver and raced against a timeout. If the timeout fires, the chain escalates to the next level. If approved, the chain completes. If rejected, the workflow stops and returns the rejection. - -### Simplified +Determine approval levels from the request, then iterate through each. At each level, race a hook against a timeout — if the timeout fires, escalate to the next level. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; @@ -32,21 +30,17 @@ export async function approvalChain(expenseId: string, amount: number) { await notifyApprover(expenseId, level.role); const levelHook = defineHook<{ approved: boolean; comment?: string }>(); - const hook = levelHook.create({ - token: `approval:${expenseId}:${level.role}`, - }); + const hook = levelHook.create({ token: `approval:${expenseId}:${level.role}` }); const result = await Promise.race([ hook.then((p) => ({ type: "decision" as const, payload: p })), sleep(level.timeout).then(() => ({ type: "timeout" as const })), ]); - if (result.type === "timeout") continue; // Escalate to next level - + if (result.type === "timeout") continue; if (!result.payload.approved) { return { expenseId, status: "rejected", decidedBy: level.role }; } - return { expenseId, status: "approved", decidedBy: level.role }; } @@ -54,191 +48,8 @@ export async function approvalChain(expenseId: string, amount: number) { } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable, sleep } from "workflow"; - -export type ApprovalRole = "manager" | "director" | "vp"; - -export type ApprovalSignal = { - approved: boolean; - comment?: string; - decidedBy?: string; -}; - -export type ChainEvent = - | { type: "submitted"; expenseId: string; amount: number; levels: ApprovalRole[] } - | { type: "level_waiting"; role: ApprovalRole; token: string; timeout: string } - | { type: "level_approved"; role: ApprovalRole; comment?: string } - | { type: "level_rejected"; role: ApprovalRole; comment?: string } - | { type: "level_timeout"; role: ApprovalRole } - | { type: "approved"; decidedBy: ApprovalRole; comment?: string } - | { type: "rejected"; decidedBy: ApprovalRole; comment?: string } - | { type: "expired" } - | { type: "done"; status: "approved" | "rejected" | "expired" }; - -type ApprovalLevel = { - role: ApprovalRole; - timeout: "10s" | "8s" | "6s"; -}; - -const LEVEL_CHAIN: readonly ApprovalLevel[] = [ - { role: "manager", timeout: "10s" }, - { role: "director", timeout: "8s" }, - { role: "vp", timeout: "6s" }, -] as const; - -export function getApprovalLevelsForAmount(amount: number): ApprovalLevel[] { - if (amount < 500) return [LEVEL_CHAIN[0]]; - if (amount < 5000) return [LEVEL_CHAIN[0], LEVEL_CHAIN[1]]; - return [...LEVEL_CHAIN]; -} - -export async function approvalChain(expenseId: string, amount: number) { - "use workflow"; - - const levels = getApprovalLevelsForAmount(amount); - - await submitExpense(expenseId, amount, levels.map((level) => level.role)); - - for (const level of levels) { - await notifyLevel(expenseId, level.role, level.timeout); - - const levelHook = defineHook(); - const hook = levelHook.create({ - token: `approval:${expenseId}:${level.role}`, - }); - - const result = await Promise.race([ - hook.then((payload) => ({ type: "decision" as const, payload })), - sleep(level.timeout).then(() => ({ type: "timeout" as const })), - ]); - - if (result.type === "timeout") { - await recordTimeout(expenseId, level.role); - continue; - } - - if (!result.payload.approved) { - await rejectExpense(expenseId, level.role, result.payload.comment); - await emitDone("rejected"); - return { - expenseId, - amount, - status: "rejected" as const, - decidedBy: level.role, - comment: result.payload.comment, - }; - } - - await approveExpense(expenseId, level.role, result.payload.comment); - await emitDone("approved"); - return { - expenseId, - amount, - status: "approved" as const, - decidedBy: level.role, - comment: result.payload.comment, - }; - } - - await expireExpense(expenseId); - await emitDone("expired"); - return { - expenseId, - amount, - status: "timed_out" as const, - }; -} - -async function submitExpense(expenseId: string, amount: number, levels: ApprovalRole[]) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "submitted", expenseId, amount, levels }); - } finally { - writer.releaseLock(); - } -} - -async function notifyLevel(expenseId: string, role: ApprovalRole, timeout: string) { - "use step"; - - const writer = getWritable().getWriter(); - try { - const token = `approval:${expenseId}:${role}`; - await writer.write({ type: "level_waiting", role, token, timeout }); - } finally { - writer.releaseLock(); - } -} - -async function recordTimeout(expenseId: string, role: ApprovalRole) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "level_timeout", role }); - } finally { - writer.releaseLock(); - } - console.info("[approval-chain] level_timeout", { expenseId, role }); -} - -async function approveExpense(expenseId: string, role: ApprovalRole, comment?: string) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "level_approved", role, comment }); - await writer.write({ type: "approved", decidedBy: role, comment }); - } finally { - writer.releaseLock(); - } -} - -async function rejectExpense(expenseId: string, role: ApprovalRole, comment?: string) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "level_rejected", role, comment }); - await writer.write({ type: "rejected", decidedBy: role, comment }); - } finally { - writer.releaseLock(); - } -} - -async function emitDone(status: "approved" | "rejected" | "expired") { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", status }); - } finally { - writer.releaseLock(); - } -} - -async function expireExpense(expenseId: string) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "expired" }); - } finally { - writer.releaseLock(); - } - console.info("[approval-chain] expense_timed_out", { expenseId }); -} -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access - [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates typed hooks for each approval level - [`sleep()`](/docs/api-reference/workflow/sleep) — per-level escalation timeout -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams chain progress to the client diff --git a/docs/content/docs/cookbook/approvals/approval-gate.mdx b/docs/content/docs/cookbook/approvals/approval-gate.mdx index e323b4b0ef..24aec26b92 100644 --- a/docs/content/docs/cookbook/approvals/approval-gate.mdx +++ b/docs/content/docs/cookbook/approvals/approval-gate.mdx @@ -5,13 +5,11 @@ type: guide summary: Content moderation hold — pause publishing until a reviewer clicks approve or reject. --- -Use the approval gate pattern when a workflow must pause and wait for human input before continuing. The workflow creates a hook that external systems (email links, Slack buttons, admin UIs) can call to resume execution. +Use the approval gate pattern when a workflow must pause and wait for human input before continuing. External systems (email links, Slack buttons, admin UIs) call the hook to resume execution. ## Pattern -The workflow creates a deterministic hook token and races it against a `sleep()` timeout. If the hook fires before the timeout, the workflow inspects the approval payload and either fulfills or cancels. If the timeout wins, the workflow auto-cancels. This pattern survives server restarts because both the hook and the sleep are durably persisted. - -### Simplified +Create a deterministic hook token and race it against a `sleep()` timeout. If the hook fires, inspect the payload and fulfill or cancel. If the timeout wins, auto-cancel. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; @@ -29,7 +27,6 @@ export async function approvalGate(orderId: string) { const hook = approvalHook.create({ token: `order_approval:${orderId}` }); - // Race: human approval vs. timeout const result = await Promise.race([ hook.then((payload) => ({ type: "approval" as const, payload })), sleep("24h").then(() => ({ type: "timeout" as const })), @@ -50,173 +47,8 @@ export async function approvalGate(orderId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable, sleep } from "workflow"; -import type { StringValue } from "ms"; - -// Define the approval payload type -export interface ApprovalPayload { - approved: boolean; - comment?: string; - approvedBy?: string; -} - -// Define the hook for type-safe approval handling -export const orderApprovalHook = defineHook(); - -// Result type for the workflow -export interface ApprovalResult { - orderId: string; - status: "approved" | "rejected" | "timeout"; - comment?: string; - approvedBy?: string; -} - -// Typed events streamed to the UI via getWritable() -export type ApprovalEvent = - | { type: "request_sent"; orderId: string } - | { type: "waiting"; orderId: string; token: string; timeoutMs: number } - | { type: "approved"; orderId: string; approvedBy?: string; comment?: string } - | { type: "rejected"; orderId: string; approvedBy?: string; comment?: string } - | { type: "timeout"; orderId: string } - | { type: "fulfilling"; orderId: string } - | { type: "fulfilled"; orderId: string } - | { type: "cancelling"; orderId: string; reason: string } - | { type: "cancelled"; orderId: string; reason: string } - | { type: "done"; orderId: string; status: "approved" | "rejected" | "timeout" }; - -const TIMEOUT_MS: Record = { - "10s": 10_000, - "30s": 30_000, - "1m": 60_000, - "5m": 300_000, - "24h": 86_400_000, -}; - -/** - * Approval Gate Workflow - * - * Demonstrates the "Signal + timer" pattern: - * - Creates a deterministic hook token for external systems to resume - * - Uses Promise.race to implement timeout behavior - * - Waits for human approval or times out after specified duration - */ -export async function approvalGate( - orderId: string, - timeout: StringValue = "24h" -): Promise { - "use workflow"; - - // Request approval (e.g., send email, create ticket, notify Slack) - await requestApproval(orderId); - await emit({ type: "request_sent", orderId }); - - // Create hook with deterministic token based on orderId - const hook = orderApprovalHook.create({ - token: `order_approval:${orderId}`, - }); - - const timeoutMs = TIMEOUT_MS[timeout] ?? 30_000; - await emit({ - type: "waiting", - orderId, - token: hook.token, - timeoutMs, - }); - - // Race between approval hook and timeout - const result = await Promise.race([ - hook.then((payload) => ({ - type: "approval" as const, - payload, - })), - sleep(timeout).then(() => ({ - type: "timeout" as const, - payload: null, - })), - ]); - - if (result.type === "timeout") { - await emit({ type: "timeout", orderId }); - await emit({ type: "cancelling", orderId, reason: "Approval timed out" }); - await cancelOrder(orderId, "Approval timed out"); - await emit({ type: "cancelled", orderId, reason: "Approval timed out" }); - await emit({ type: "done", orderId, status: "timeout" }); - return { orderId, status: "timeout" }; - } - - const { approved, comment, approvedBy } = result.payload!; - - if (approved) { - await emit({ type: "approved", orderId, approvedBy, comment }); - await emit({ type: "fulfilling", orderId }); - await fulfillOrder(orderId); - await emit({ type: "fulfilled", orderId }); - await emit({ type: "done", orderId, status: "approved" }); - return { orderId, status: "approved", comment, approvedBy }; - } else { - await emit({ type: "rejected", orderId, approvedBy, comment }); - await emit({ type: "cancelling", orderId, reason: comment || "Rejected by approver" }); - await cancelOrder(orderId, comment || "Rejected by approver"); - await emit({ type: "cancelled", orderId, reason: comment || "Rejected by approver" }); - await emit({ type: "done", orderId, status: "rejected" }); - return { orderId, status: "rejected", comment, approvedBy }; - } -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -/** - * Step: Request approval from external system - */ -async function requestApproval(orderId: string): Promise { - "use step"; - // Simulate sending approval request (email, ticket, Slack notification) - await delay(500); - console.log(`[Order ${orderId}] Approval request sent`); -} - -/** - * Step: Fulfill the order after approval - */ -async function fulfillOrder(orderId: string): Promise { - "use step"; - await delay(600); - console.log(`[Order ${orderId}] Order fulfilled successfully`); -} - -/** - * Step: Cancel the order (on rejection or timeout) - */ -async function cancelOrder(orderId: string, reason: string): Promise { - "use step"; - await delay(500); - console.log(`[Order ${orderId}] Order cancelled: ${reason}`); -} - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access - [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a typed hook for external signals - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timeout that survives restarts -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams approval events to the client diff --git a/docs/content/docs/cookbook/approvals/cancellable-export.mdx b/docs/content/docs/cookbook/approvals/cancellable-export.mdx index 8c6d5f1a0b..e817a27cdb 100644 --- a/docs/content/docs/cookbook/approvals/cancellable-export.mdx +++ b/docs/content/docs/cookbook/approvals/cancellable-export.mdx @@ -5,13 +5,11 @@ type: guide summary: User starts a 100k-row data export and hits Cancel mid-flight without waiting for completion. --- -Use this pattern when a long-running job should be cancellable by the user at any point. Each step in the loop acts as a cancellation checkpoint -- when `run.cancel()` is called, the workflow stops between steps. +Use this pattern when a long-running job should be cancellable by the user at any point. Each step in the loop acts as a cancellation checkpoint — when `run.cancel()` is called, the workflow stops between steps. ## Pattern -The workflow iterates through sections of work, with each iteration awaiting a step. Between steps, the runtime checks for cancellation. If `run.cancel()` was called, the workflow terminates gracefully without executing further steps. No special cancellation logic is needed in the workflow code itself. - -### Simplified +Iterate through sections of work, awaiting a step each time. Between steps, the runtime checks for cancellation. No special cancellation logic is needed in the workflow code itself. ```typescript lineNumbers declare function generateSection(index: number, title: string): Promise<{ title: string; status: string }>; // @setup @@ -38,90 +36,7 @@ export async function generateReport(accountId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable } from "workflow"; - -export type SectionEvent = - | { type: "section_start"; index: number; title: string } - | { type: "section_done"; index: number; title: string } - | { type: "complete"; total: number } - | { type: "cancelled"; completedCount: number; total: number }; - -const SECTION_DELAY_MS = 500; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -const SECTION_TITLES = [ - "Introduction", - "Market Analysis", - "Technical Architecture", - "Implementation Plan", - "Risk Assessment", - "Financial Projections", - "Timeline", - "Team Structure", - "Competitive Landscape", - "Conclusion", -]; - -export async function generateReport( - accountId: string, - systemPrompt: string -) { - "use workflow"; - - // Each await is a cancellation checkpoint — - // run.cancel() takes effect between steps. - const sections: { title: string; status: string }[] = []; - - for (let i = 0; i < SECTION_TITLES.length; i++) { - const title = SECTION_TITLES[i]; - const result = await generateSection(i, title, sections[i - 1]?.title); - sections.push(result); - } - - await emitDone(sections.length); - - return { accountId, sections, status: "completed" }; -} - -async function emitDone(total: number) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "complete", total }); - } finally { - writer.releaseLock(); - } -} - -emitDone.maxRetries = 0; - -async function generateSection( - index: number, - title: string, - previousTitle?: string -) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "section_start", index, title }); - await delay(SECTION_DELAY_MS); - await writer.write({ type: "section_done", index, title }); - return { title, previous: previousTitle, status: "generated" }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — each step is a cancellation checkpoint -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams section progress to the client +- [`run.cancel()`](/docs/api-reference/workflow-api/cancel) — cancels a running workflow between step boundaries diff --git a/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx b/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx index 6b49c38ebe..8bdb8eab2d 100644 --- a/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx +++ b/docs/content/docs/cookbook/approvals/scheduler-agent-supervisor.mdx @@ -5,13 +5,11 @@ type: guide summary: Dispatch content generation to agents in sequence, checking quality thresholds with escalation. --- -Use the scheduler-agent-supervisor pattern when work should be dispatched to agents in a priority order, with a supervisor checking quality after each attempt. If an agent's output does not meet the threshold, the supervisor escalates to the next agent. +Use the scheduler-agent-supervisor pattern when work should be dispatched to agents in priority order, with a supervisor checking quality after each attempt and escalating on failure. ## Pattern -The workflow iterates through a list of agents (fast, thorough, premium). For each agent, it dispatches work and runs a quality check. If the quality passes, the content is published. If it fails, the workflow sleeps (cooldown) and escalates to the next agent. If all agents fail, the workflow reports failure. - -### Simplified +Iterate through agents (fast, thorough, premium). For each, dispatch work and run a quality check. If quality passes, publish. If it fails, sleep (cooldown) and escalate to the next agent. ```typescript lineNumbers import { sleep } from "workflow"; @@ -37,7 +35,6 @@ export async function schedulerAgentSupervisor( return { status: "published", publishedBy: agentId, publicationId }; } - // Cooldown before escalating to next agent await sleep("2s"); } @@ -45,294 +42,7 @@ export async function schedulerAgentSupervisor( } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getWritable, sleep } from "workflow"; - -const SUPERVISOR_AGENTS = [ - { id: "fast-model", label: "Fast Model" }, - { id: "thorough-model", label: "Thorough Model" }, - { id: "premium-model", label: "Premium Model" }, -] as const; - -export type SupervisorAgentId = (typeof SUPERVISOR_AGENTS)[number]["id"]; -export type QualityThreshold = "low" | "medium" | "high"; - -const QUALITY_THRESHOLD_SCORE: Record = { - low: 65, - medium: 80, - high: 92, -}; - -const AGENT_QUALITY_SCORE: Record = { - "fast-model": 68, - "thorough-model": 82, - "premium-model": 89, -}; - -// Demo: simulate real-world latency so the UI can show progress. -const AGENT_GENERATION_MS: Record = { - "fast-model": 1400, - "thorough-model": 2100, - "premium-model": 2800, -}; - -const QUALITY_CHECK_MS = 600; -const PUBLISH_MS = 700; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export type SupervisorEvent = - | { type: "agent_dispatched"; agentId: SupervisorAgentId; agentIndex: number; label: string } - | { type: "agent_generating"; agentId: SupervisorAgentId; progressPct: number } - | { type: "agent_generated"; agentId: SupervisorAgentId } - | { type: "quality_check"; agentId: SupervisorAgentId } - | { type: "quality_result"; agentId: SupervisorAgentId; score: number; requiredScore: number; passed: boolean } - | { type: "cooldown"; fromAgentId: SupervisorAgentId; toAgentId: SupervisorAgentId; reason: string } - | { type: "publishing"; agentId: SupervisorAgentId } - | { type: "done"; publishedBy: SupervisorAgentId; publicationId: string; qualityScore: number } - | { type: "failed"; reason: string }; - -type DispatchResult = { - agentId: SupervisorAgentId; - topic: string; - draft: string; - estimatedScore: number; -}; - -type QualityGateResult = { - score: number; - requiredScore: number; - passed: boolean; - reason: string; -}; - -type PublishResult = { - publicationId: string; -}; - -type RerouteRecord = { - from: SupervisorAgentId; - to: SupervisorAgentId; - reason: string; - cooldown: "2s"; -}; - -export type SchedulerAgentSupervisorResult = - | { - status: "published"; - topic: string; - threshold: QualityThreshold; - requiredScore: number; - qualityScore: number; - publishedBy: SupervisorAgentId; - publicationId: string; - reroutes: RerouteRecord[]; - } - | { - status: "failed"; - topic: string; - threshold: QualityThreshold; - requiredScore: number; - reroutes: RerouteRecord[]; - }; - -export async function schedulerAgentSupervisor( - topic: string, - threshold: QualityThreshold = "medium" -): Promise { - "use workflow"; - - const normalizedTopic = topic.trim(); - const requiredScore = QUALITY_THRESHOLD_SCORE[threshold]; - const reroutes: RerouteRecord[] = []; - - for (let agentIndex = 0; agentIndex < SUPERVISOR_AGENTS.length; agentIndex += 1) { - const agent = SUPERVISOR_AGENTS[agentIndex]; - - const draft = await dispatchToAgent(agent.id, normalizedTopic, agentIndex); - const quality = await checkQuality(draft, requiredScore); - - if (quality.passed) { - const published = await publishContent(draft, quality); - - return { - status: "published", - topic: normalizedTopic, - threshold, - requiredScore, - qualityScore: quality.score, - publishedBy: agent.id, - publicationId: published.publicationId, - reroutes, - }; - } - - const nextAgent = SUPERVISOR_AGENTS[agentIndex + 1]; - if (!nextAgent) { - break; - } - - const rerouteReason = `${agent.id} score ${quality.score} below ${quality.requiredScore}`; - reroutes.push({ - from: agent.id, - to: nextAgent.id, - reason: rerouteReason, - cooldown: "2s", - }); - - await emitCooldown(agent.id, nextAgent.id, rerouteReason); - await sleep("2s"); - } - - await emitFailed("all_agents_failed_quality"); - - return { - status: "failed", - topic: normalizedTopic, - threshold, - requiredScore, - reroutes, - }; -} - -async function dispatchToAgent( - agentId: SupervisorAgentId, - topic: string, - agentIndex: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const estimatedScore = AGENT_QUALITY_SCORE[agentId]; - const agentDef = SUPERVISOR_AGENTS.find((a) => a.id === agentId)!; - - try { - await writer.write({ - type: "agent_dispatched", - agentId, - agentIndex, - label: agentDef.label, - }); - - // Demo: simulate generation with progress ticks - const genMs = AGENT_GENERATION_MS[agentId]; - const tickCount = 5; - const tickMs = genMs / tickCount; - for (let tick = 1; tick <= tickCount; tick++) { - await delay(tickMs); - await writer.write({ - type: "agent_generating", - agentId, - progressPct: Math.round((tick / tickCount) * 100), - }); - } - - await writer.write({ type: "agent_generated", agentId }); - - const draft = `Draft ${agentIndex + 1} for topic "${topic}" by ${agentId}`; - return { agentId, topic, draft, estimatedScore }; - } finally { - writer.releaseLock(); - } -} - -async function checkQuality( - draft: DispatchResult, - requiredScore: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "quality_check", agentId: draft.agentId }); - await delay(QUALITY_CHECK_MS); - - const score = draft.estimatedScore; - const passed = score >= requiredScore; - const reason = passed - ? `Score ${score} passed threshold ${requiredScore}` - : `Score ${score} below threshold ${requiredScore}`; - - await writer.write({ - type: "quality_result", - agentId: draft.agentId, - score, - requiredScore, - passed, - }); - - return { score, requiredScore, passed, reason }; - } finally { - writer.releaseLock(); - } -} - -async function publishContent( - draft: DispatchResult, - quality: QualityGateResult -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "publishing", agentId: draft.agentId }); - await delay(PUBLISH_MS); - - const publicationId = `pub_${draft.agentId}_${Date.now().toString(36)}`; - - await writer.write({ - type: "done", - publishedBy: draft.agentId, - publicationId, - qualityScore: quality.score, - }); - - return { publicationId }; - } finally { - writer.releaseLock(); - } -} - -async function emitCooldown( - fromAgentId: SupervisorAgentId, - toAgentId: SupervisorAgentId, - reason: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "cooldown", fromAgentId, toAgentId, reason }); - } finally { - writer.releaseLock(); - } -} - -emitCooldown.maxRetries = 0; - -async function emitFailed(reason: string): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "failed", reason }); - } finally { - writer.releaseLock(); - } -} - -emitFailed.maxRetries = 0; -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access - [`sleep()`](/docs/api-reference/workflow/sleep) — durable cooldown between agent escalations -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams agent progress to the client diff --git a/docs/content/docs/cookbook/data-processing/aggregator.mdx b/docs/content/docs/cookbook/data-processing/aggregator.mdx index 2caeeae6c6..5c39d6bf7a 100644 --- a/docs/content/docs/cookbook/data-processing/aggregator.mdx +++ b/docs/content/docs/cookbook/data-processing/aggregator.mdx @@ -5,20 +5,18 @@ type: guide summary: Collect inventory from multiple warehouses with a timeout so stragglers don't block checkout. --- -Collect inventory from multiple warehouses with a timeout so stragglers don't block checkout. Use this pattern when you need to gather signals from multiple sources and combine them, but cannot wait forever for all of them. +Use aggregator when you need to gather signals from multiple sources and combine them, but cannot wait forever for all of them. ## Pattern -Create one hook per expected source, then race `Promise.all` (all sources responded) against `sleep` (deadline). Whether all signals arrive or the timeout fires first, aggregate whatever data you have into a single result. - -### Simplified +Create one hook per expected source, then race `Promise.all` (all responded) against `sleep` (deadline). Aggregate whatever data arrived into a single result. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; export const aggregatorSignal = defineHook<{ source: string; value: number }>(); -declare function processBatch(batchId: string, received: Map): Promise<{ totalValue: number }>; // @setup +declare function processBatch(batchId: string, received: { source: string; value: number }[]): Promise<{ totalValue: number }>; // @setup const SOURCES = ["warehouse-a", "warehouse-b", "warehouse-c"] as const; @@ -26,7 +24,9 @@ export async function aggregator(batchId: string, timeoutMs: number = 8000) { "use workflow"; const hooks = SOURCES.map((source) => - aggregatorSignal.create({ token: `${source}:${batchId}` }).then((payload) => ({ source, payload })) + aggregatorSignal + .create({ token: `${source}:${batchId}` }) + .then((payload) => ({ source, payload })) ); const outcome = await Promise.race([ @@ -34,188 +34,15 @@ export async function aggregator(batchId: string, timeoutMs: number = 8000) { sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const, results: [] as { source: string; payload: { source: string; value: number } }[] })), ]); - const received = new Map(outcome.results.map(({ source, payload }) => [source, payload])); + const received = outcome.results.map(({ payload }) => payload); const summary = await processBatch(batchId, received); return { batchId, status: outcome.type === "ready" ? "aggregated" : "partial", summary }; } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable, sleep } from "workflow"; - -// --------------------------------------------------------------------------- -// Typed events streamed to the UI via getWritable() -// --------------------------------------------------------------------------- -export type AggregatorEvent = - | { type: "collecting"; batchId: string; tokens: Record; expectedCount: number; timeoutMs: number } - | { type: "signal_received"; batchId: string; source: string; value: number; receivedCount: number; expectedCount: number } - | { type: "all_collected"; batchId: string } - | { type: "timeout"; batchId: string; missing: string[]; received: string[] } - | { type: "processing"; batchId: string } - | { type: "done"; batchId: string; status: "aggregated" | "partial"; summary: AggregatorSummary }; - -export type AggregatorSummary = { - totalSignals: number; - receivedSignals: number; - totalValue: number; - sources: string[]; -}; - -// --------------------------------------------------------------------------- -// Hook definition — each source sends { source, value } -// --------------------------------------------------------------------------- -export type SignalPayload = { source: string; value: number }; - -export const aggregatorSignal = defineHook(); - -const SOURCES = ["warehouse-a", "warehouse-b", "warehouse-c"] as const; -export type SourceId = (typeof SOURCES)[number]; - -// --------------------------------------------------------------------------- -// Workflow: collect N signals with a timeout, then aggregate -// --------------------------------------------------------------------------- -export async function aggregator( - batchId: string, - timeoutMs: number = 8000 -): Promise<{ batchId: string; status: "aggregated" | "partial"; summary: AggregatorSummary }> { - "use workflow"; - - // Create one hook per source with deterministic tokens - const tokens: Record = {}; - const hooks = SOURCES.map((source) => { - const token = `${source}:${batchId}`; - tokens[source] = token; - return { source, hook: aggregatorSignal.create({ token }), token }; - }); - - await emit({ - type: "collecting", - batchId, - tokens, - expectedCount: SOURCES.length, - timeoutMs, - }); - - // Track received signals - const received = new Map(); - - const signalPromises = hooks.map(({ source, hook }) => - hook.then((payload) => { - received.set(source, payload); - return { source, payload }; - }) - ); - - // Race: collect all signals OR timeout - const outcome = await Promise.race([ - Promise.all(signalPromises).then((results) => ({ - type: "ready" as const, - results, - })), - sleep(`${timeoutMs}ms`).then(() => ({ - type: "timeout" as const, - results: [] as { source: string; payload: SignalPayload }[], - })), - ]); - - // Snapshot received signals at the timeout boundary so late arrivals - // cannot mutate the summary or trigger additional events. - const receivedSnapshot = new Map(received); - - // Emit signal_received events for signals that arrived - for (const { source, payload } of outcome.results) { - await emit({ - type: "signal_received", - batchId, - source, - value: payload.value, - receivedCount: receivedSnapshot.size, - expectedCount: SOURCES.length, - }); - } - - if (outcome.type === "timeout") { - const receivedSources = [...receivedSnapshot.keys()]; - const missing = SOURCES.filter((s) => !receivedSnapshot.has(s)); - - // Emit signal_received for signals that arrived before timeout but - // were not part of the Promise.all resolution (partial arrivals). - for (const [source, payload] of receivedSnapshot) { - if (!outcome.results.some((r) => r.source === source)) { - await emit({ - type: "signal_received", - batchId, - source, - value: payload.value, - receivedCount: receivedSnapshot.size, - expectedCount: SOURCES.length, - }); - } - } - - await emit({ type: "timeout", batchId, missing, received: receivedSources }); - const summary = await processBatch(batchId, receivedSnapshot); - await emit({ type: "done", batchId, status: "partial", summary }); - return { batchId, status: "partial" as const, summary }; - } - - await emit({ type: "all_collected", batchId }); - const summary = await processBatch(batchId, receivedSnapshot); - await emit({ type: "done", batchId, status: "aggregated", summary }); - return { batchId, status: "aggregated" as const, summary }; -} - -// --------------------------------------------------------------------------- -// Step: emit a single event to the UI stream -// --------------------------------------------------------------------------- -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Step: process collected signals into an aggregated result -// --------------------------------------------------------------------------- -async function processBatch( - batchId: string, - received: ReadonlyMap -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "processing", batchId }); - } finally { - writer.releaseLock(); - } - - // Simulate processing delay - await new Promise((resolve) => setTimeout(resolve, 600)); - - const sources = [...received.keys()]; - const totalValue = [...received.values()].reduce((sum, p) => sum + p.value, 0); - - return { - totalSignals: SOURCES.length, - receivedSignals: received.size, - totalValue, - sources, - }; -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/data-processing/batch-processor.mdx b/docs/content/docs/cookbook/data-processing/batch-processor.mdx index d4c656124e..a32305f607 100644 --- a/docs/content/docs/cookbook/data-processing/batch-processor.mdx +++ b/docs/content/docs/cookbook/data-processing/batch-processor.mdx @@ -5,13 +5,11 @@ type: guide summary: Process a large CSV import in batches, auto-resuming from the last completed batch after a crash. --- -Process a large CSV import in batches, auto-resuming from the last completed batch after a crash. Use this pattern for bulk operations where processing the entire dataset in one step would be too slow or risky. +Use batch processor for bulk operations where processing the entire dataset in one step would be too slow or risky. ## Pattern -Divide the total work into fixed-size batches and process each as its own step. Because each step is recorded in the event log, a crash mid-way automatically resumes from the last completed batch on replay. - -### Simplified +Divide work into fixed-size batches and process each as its own step. Because each step is recorded in the event log, a crash mid-way automatically resumes from the last completed batch on replay. ```typescript lineNumbers declare function processBatch(batch: number, start: number, end: number): Promise; // @setup @@ -34,138 +32,6 @@ export async function batchProcessor( } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need it unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type BatchEvent = - | { type: "batch_start"; batch: number; start: number; end: number; label: string } - | { type: "batch_done"; batch: number; start: number; end: number; label: string } - | { type: "crash"; afterBatch: number; message: string } - | { type: "resume"; fromBatch: number } - | { type: "complete"; totalBatches: number; processedRecords: number } - | { type: "done"; status: "done"; totalBatches: number; processedRecords: number }; - -// Demo: >= 500ms per step (timing rules) -const BATCH_STEP_MS = 650; - -const numberFmt = new Intl.NumberFormat("en-US"); - -function formatNumber(n: number): string { - return numberFmt.format(n); -} - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function batchProcessor( - total: number = 10_000, - batchSize: number = 1_000, - crashAfterBatches: number | null = null -) { - "use workflow"; - - const totalBatches = Math.ceil(total / batchSize); - - for (let batch = 1; batch <= totalBatches; batch++) { - const start = (batch - 1) * batchSize + 1; - const end = Math.min(total, batch * batchSize); - - // Crash simulation: after the specified batch, emit crash + pause + resume - if (crashAfterBatches !== null && batch === crashAfterBatches + 1) { - await emitCrashAndResume(crashAfterBatches, batch, batchSize); - } - - await processBatch(batch, start, end); - } - - await emitComplete(totalBatches, total); - await emitDone(totalBatches, total); - - return { total, batchSize, status: "done" as const }; -} - -async function processBatch( - batch: number, - start: number, - end: number -) { - "use step"; - - const writer = getWritable().getWriter(); - const label = `${formatNumber(start)}\u2013${formatNumber(end)}`; - - try { - await writer.write({ type: "batch_start", batch, start, end, label }); - - // Demo: simulate processing time for visualization - await delay(BATCH_STEP_MS); - - await writer.write({ type: "batch_done", batch, start, end, label }); - } finally { - writer.releaseLock(); - } -} - -async function emitCrashAndResume( - crashAfterBatch: number, - resumeFromBatch: number, - batchSize: number -) { - "use step"; - - const writer = getWritable().getWriter(); - - try { - const nextRecord = crashAfterBatch * batchSize + 1; - await writer.write({ - type: "crash", - afterBatch: crashAfterBatch, - message: `Simulated crash after batch ${crashAfterBatch}. Resume continues at record ${formatNumber(nextRecord)}.`, - }); - - // Demo: brief pause to simulate downtime - await delay(800); - - await writer.write({ - type: "resume", - fromBatch: resumeFromBatch, - }); - } finally { - writer.releaseLock(); - } -} - -async function emitDone(totalBatches: number, processedRecords: number) { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", status: "done", totalBatches, processedRecords }); - } finally { - writer.releaseLock(); - } -} - -async function emitComplete(totalBatches: number, processedRecords: number) { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "complete", totalBatches, processedRecords }); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function diff --git a/docs/content/docs/cookbook/data-processing/competing-consumers.mdx b/docs/content/docs/cookbook/data-processing/competing-consumers.mdx index 4247e133b6..7af9d7e708 100644 --- a/docs/content/docs/cookbook/data-processing/competing-consumers.mdx +++ b/docs/content/docs/cookbook/data-processing/competing-consumers.mdx @@ -5,13 +5,11 @@ type: guide summary: Multiple workflow instances race to claim items from a shared queue — only one wins each item. --- -Multiple workflow instances race to claim items from a shared queue — only one wins each item. Use this pattern when you need exactly-once processing with multiple parallel consumers. +Use competing consumers when multiple workers should process items from a shared queue with exactly-once semantics. ## Pattern -For each item in the queue, multiple consumers attempt to claim it. The runtime's deterministic workflow IDs ensure that duplicate starts are no-ops — only the first consumer to claim an item processes it. - -### Simplified +For each item, multiple consumers attempt to claim it. The runtime's deterministic workflow IDs ensure that duplicate starts are no-ops — only the first consumer to claim an item processes it. ```typescript lineNumbers declare function processItem(itemId: string, consumers: string[]): Promise<{ itemId: string; claimedBy: string }>; // @setup @@ -31,164 +29,6 @@ export async function competingConsumers(items: string[], consumers: string[]) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable } from "workflow"; - -export type ItemId = string; -export type ConsumerId = string; - -export type CCEvent = - | { type: "claiming"; itemId: string; consumerId: string } - | { type: "claimed"; itemId: string; consumerId: string } - | { type: "duplicate"; itemId: string; consumerId: string; wonBy: string } - | { type: "processing"; itemId: string; consumerId: string } - | { type: "processed"; itemId: string; consumerId: string } - | { type: "done"; summary: { processed: number; duplicatesBlocked: number } }; - -type ItemResult = { - itemId: string; - claimedBy: string; - duplicateAttempts: number; - status: "processed"; -}; - -type QueueReport = { - status: "done"; - results: ItemResult[]; - summary: { - processed: number; - duplicatesBlocked: number; - }; -}; - -// Demo: simulated processing latency so the UI can show progress -const CLAIM_DELAY_MS = 400; -const PROCESS_DELAY_MS = 800; -const SUMMARY_DELAY_MS = 500; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -// Each item is processed exactly once. Multiple consumers attempt to claim -// the same item, but deterministic workflow IDs mean duplicate starts are -// no-ops — the runtime deduplicates at the execution level. -export async function competingConsumers( - items: string[], - consumers: string[] -): Promise { - "use workflow"; - - const results: ItemResult[] = []; - - // Simulate consumers racing to claim each item. In production, - // start() with id: `process-item-${itemId}` deduplicates automatically. - for (const itemId of items) { - const result = await processItem(itemId, consumers); - results.push(result); - } - - return recordResults(results); -} - -async function processItem( - itemId: string, - consumers: string[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - // Simulate multiple consumers trying to claim the same item. - // The first consumer wins; the rest are blocked as duplicates. - const winnerIndex = Math.abs(hashCode(itemId)) % consumers.length; - const winner = consumers[winnerIndex]; - let duplicateAttempts = 0; - - for (let i = 0; i < consumers.length; i++) { - const consumerId = consumers[i]; - await writer.write({ type: "claiming", itemId, consumerId }); - await delay(CLAIM_DELAY_MS); - - if (i === winnerIndex) { - await writer.write({ type: "claimed", itemId, consumerId }); - } else { - duplicateAttempts++; - await writer.write({ - type: "duplicate", - itemId, - consumerId, - wonBy: winner, - }); - } - } - - // The winning consumer processes the item - await writer.write({ type: "processing", itemId, consumerId: winner }); - await delay(PROCESS_DELAY_MS); - await writer.write({ type: "processed", itemId, consumerId: winner }); - - return { - itemId, - claimedBy: winner, - duplicateAttempts, - status: "processed", - }; - } finally { - writer.releaseLock(); - } -} - -async function recordResults( - results: ItemResult[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await delay(SUMMARY_DELAY_MS); - - const processed = results.length; - const duplicatesBlocked = results.reduce( - (sum, r) => sum + r.duplicateAttempts, - 0 - ); - - const report: QueueReport = { - status: "done", - results, - summary: { processed, duplicatesBlocked }, - }; - - await writer.write({ type: "done", summary: report.summary }); - return report; - } finally { - writer.releaseLock(); - } -} - -// Simple deterministic hash for assigning items to consumers -function hashCode(str: string): number { - let hash = 0; - for (let i = 0; i < str.length; i++) { - const char = str.charCodeAt(i); - hash = (hash << 5) - hash + char; - hash |= 0; - } - return hash; -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`getStepMetadata()`](/docs/api-reference/workflow/get-step-metadata) — access step attempt number and metadata -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function diff --git a/docs/content/docs/cookbook/data-processing/map-reduce.mdx b/docs/content/docs/cookbook/data-processing/map-reduce.mdx index 74abbd5ee7..c721d15f3e 100644 --- a/docs/content/docs/cookbook/data-processing/map-reduce.mdx +++ b/docs/content/docs/cookbook/data-processing/map-reduce.mdx @@ -5,14 +5,12 @@ type: guide summary: Partition a large analytics dataset into chunks, process in parallel, and merge into one report. --- -Partition a large analytics dataset into chunks, process in parallel, and merge into one report. Use this pattern when you can split work into independent partitions that are combined at the end. +Use map-reduce when you can split work into independent partitions that are combined at the end. ## Pattern Partition the input, fan out with `Promise.all()` so each partition runs as a parallel step, then pass all partial results into a reduce step that merges them into a single output. -### Simplified - ```typescript lineNumbers declare function mapPartition(index: number, chunk: number[]): Promise<{ sum: number; count: number }>; // @setup declare function reduceResults(jobId: string, results: { sum: number; count: number }[]): Promise<{ totalSum: number; average: number }>; // @setup @@ -24,178 +22,20 @@ export async function mapReduce( ) { "use workflow"; - // Partition const partitions: number[][] = []; for (let i = 0; i < items.length; i += chunkSize) { partitions.push(items.slice(i, i + chunkSize)); } - // Map in parallel const results = await Promise.all( partitions.map((chunk, i) => mapPartition(i, chunk)) ); - // Reduce return reduceResults(jobId, results); } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type MapReduceEvent = - | { type: "partitioning"; totalItems: number; chunkCount: number } - | { type: "partition_created"; partitionIndex: number; itemCount: number } - | { type: "mapping"; partitionIndex: number } - | { type: "mapped"; partitionIndex: number; partialSum: number; partialCount: number } - | { type: "reducing" } - | { type: "done"; summary: { totalSum: number; totalCount: number; average: number } }; - -type PartitionResult = { - partitionIndex: number; - sum: number; - count: number; -}; - -type MapReduceReport = { - jobId: string; - status: "done"; - partitions: PartitionResult[]; - summary: { - totalSum: number; - totalCount: number; - average: number; - }; -}; - -// Demo: simulate processing latency so the UI can show progress. -// In production, these delays would be replaced by actual computation. -const PARTITION_DELAY_MS = 400; -const MAP_DELAY_MS = 600; -const REDUCE_DELAY_MS = 500; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -const DEFAULT_CHUNK_SIZE = 3; - -export function partitionInput(items: number[], chunkSize: number = DEFAULT_CHUNK_SIZE): number[][] { - const chunks: number[][] = []; - for (let i = 0; i < items.length; i += chunkSize) { - chunks.push(items.slice(i, i + chunkSize)); - } - return chunks; -} - -// Demo entry point. Partitions input, maps partitions in parallel with -// Promise.all(), and reduces into a single aggregate result. -export async function mapReduce( - jobId: string, - items: number[] = [10, 20, 30, 40, 50, 60, 70, 80, 90], - chunkSize: number = DEFAULT_CHUNK_SIZE -): Promise { - "use workflow"; - - const writer = getWritable().getWriter(); - - try { - const partitions = partitionInput(items, chunkSize); - - await writer.write({ - type: "partitioning", - totalItems: items.length, - chunkCount: partitions.length, - }); - - await delay(PARTITION_DELAY_MS); - - for (let i = 0; i < partitions.length; i++) { - await writer.write({ - type: "partition_created", - partitionIndex: i, - itemCount: partitions[i].length, - }); - } - - // Map: process all partitions in parallel with Promise.all() - const partitionResults = await Promise.all( - partitions.map((chunk, index) => mapPartition(index, chunk)) - ); - - // Reduce: combine all partition results into a final aggregate - return reduceResults(jobId, partitionResults); - } finally { - writer.releaseLock(); - } -} - -async function mapPartition( - partitionIndex: number, - chunk: number[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "mapping", partitionIndex }); - await delay(MAP_DELAY_MS); - - const sum = chunk.reduce((acc, val) => acc + val, 0); - const count = chunk.length; - - await writer.write({ - type: "mapped", - partitionIndex, - partialSum: sum, - partialCount: count, - }); - - return { partitionIndex, sum, count }; - } finally { - writer.releaseLock(); - } -} - -async function reduceResults( - jobId: string, - partitionResults: PartitionResult[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "reducing" }); - await delay(REDUCE_DELAY_MS); - - const totalSum = partitionResults.reduce((acc, r) => acc + r.sum, 0); - const totalCount = partitionResults.reduce((acc, r) => acc + r.count, 0); - const average = totalCount > 0 ? totalSum / totalCount : 0; - - const report: MapReduceReport = { - jobId, - status: "done", - partitions: partitionResults, - summary: { totalSum, totalCount, average }, - }; - - await writer.write({ type: "done", summary: report.summary }); - - return report; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`Promise.all()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all) — runs partitions in parallel diff --git a/docs/content/docs/cookbook/data-processing/pipeline.mdx b/docs/content/docs/cookbook/data-processing/pipeline.mdx index d9a24c85ba..0e5932026f 100644 --- a/docs/content/docs/cookbook/data-processing/pipeline.mdx +++ b/docs/content/docs/cookbook/data-processing/pipeline.mdx @@ -5,14 +5,12 @@ type: guide summary: Run a 4-stage ETL (extract, transform, validate, load) with live progress streaming. --- -Run a 4-stage ETL (extract, transform, validate, load) with live progress streaming. Use this pattern when work must flow through a fixed sequence of stages where each stage depends on the previous one. +Use pipeline when work must flow through a fixed sequence of stages where each depends on the previous one. ## Pattern Define an ordered list of stages and loop through them sequentially. Each stage runs as its own step, so a failure at any point is retried independently without re-running earlier stages. -### Simplified - ```typescript lineNumbers declare function runPipelineStep(name: string, index: number, total: number): Promise; // @setup @@ -29,129 +27,6 @@ export async function pipeline(documentId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable } from "workflow"; - -export type PipelineEvent = - | { type: "step_start"; step: string; index: number; total: number } - | { type: "step_progress"; step: string; percent: number; message: string } - | { - type: "step_done"; - step: string; - index: number; - total: number; - durationMs: number; - } - | { type: "pipeline_done"; totalMs: number }; - -export async function pipeline( - documentId: string -): Promise<{ status: "completed"; steps: number }> { - "use workflow"; - - void documentId; - const steps = ["Extract", "Transform", "Validate", "Load"]; - const startMs = Date.now(); - - for (let i = 0; i < steps.length; i++) { - await runPipelineStep(steps[i], i, steps.length); - } - - await emitPipelineDone(startMs); - - return { status: "completed", steps: steps.length }; -} - -async function emitPipelineDone(startMs: number): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "pipeline_done", totalMs: Date.now() - startMs }); - } finally { - writer.releaseLock(); - } -} - -async function runPipelineStep( - name: string, - index: number, - total: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const startMs = Date.now(); - - try { - await writer.write({ type: "step_start", step: name, index, total }); - - // Simulate work with progress updates - for (let pct = 0; pct <= 100; pct += 20) { - await new Promise((r) => setTimeout(r, 150)); - await writer.write({ - type: "step_progress", - step: name, - percent: pct, - message: getProgressMessage(name, pct), - }); - } - - await writer.write({ - type: "step_done", - step: name, - index, - total, - durationMs: Date.now() - startMs, - }); - } finally { - writer.releaseLock(); - } -} - -function getProgressMessage(step: string, pct: number): string { - const messages: Record = { - Extract: [ - "Connecting to source...", - "Reading metadata...", - "Parsing fields...", - "Extracting content...", - "Buffering records...", - "Extract complete", - ], - Transform: [ - "Initializing rules...", - "Mapping schemas...", - "Converting types...", - "Normalizing values...", - "Applying transforms...", - "Transform complete", - ], - Validate: [ - "Loading constraints...", - "Checking required fields...", - "Validating types...", - "Running business rules...", - "Final validation...", - "Validation complete", - ], - Load: [ - "Connecting to target...", - "Preparing batch...", - "Writing records...", - "Updating indexes...", - "Flushing buffers...", - "Load complete", - ], - }; - return messages[step]?.[pct / 20] ?? "Processing..."; -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function diff --git a/docs/content/docs/cookbook/data-processing/priority-queue.mdx b/docs/content/docs/cookbook/data-processing/priority-queue.mdx index 2787705bc2..0577275084 100644 --- a/docs/content/docs/cookbook/data-processing/priority-queue.mdx +++ b/docs/content/docs/cookbook/data-processing/priority-queue.mdx @@ -5,13 +5,11 @@ type: guide summary: Process enterprise-tier jobs before free-tier jobs when the queue is backed up. --- -Process enterprise-tier jobs before free-tier jobs when the queue is backed up. Use this pattern when work items have different urgency levels and higher-priority items should be processed first. +Use priority queue when work items have different urgency levels and higher-priority items should be processed first. ## Pattern -Accept a list of tasks with priority labels, sort them by priority in a step, then process each task sequentially in priority order. The sort step ensures the highest-priority items are always handled first. - -### Simplified +Sort tasks by priority in a step, then process each sequentially in priority order. The sort step ensures the highest-priority items are always handled first. ```typescript lineNumbers type Priority = "urgent" | "high" | "medium" | "low"; @@ -33,137 +31,6 @@ export async function priorityQueueFlow(tasks: TaskItem[]) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type Priority = "urgent" | "high" | "medium" | "low"; - -export type QueueEvent = - | { type: "tasks_received"; count: number; priorities: Record } - | { type: "sorting"; strategy: string } - | { type: "sorted"; order: string[] } - | { type: "processing_task"; taskId: string; priority: Priority; position: number } - | { type: "task_complete"; taskId: string; priority: Priority; result: string } - | { type: "done"; processed: number; summary: Record }; - -export interface PriorityQueueResult { - processed: number; - summary: Record; -} - -export type TaskItem = { - id: string; - label: string; - priority: Priority; -}; - -const PRIORITY_ORDER: Record = { - urgent: 0, - high: 1, - medium: 2, - low: 3, -}; - -// Demo timing -const SORT_DELAY_MS = 600; -const PROCESS_DELAY_MS = 500; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function sortByPriority(tasks: TaskItem[]): TaskItem[] { - return [...tasks].sort( - (a, b) => PRIORITY_ORDER[a.priority] - PRIORITY_ORDER[b.priority] - ); -} - -export async function priorityQueueFlow( - tasks: TaskItem[] -): Promise { - "use workflow"; - - // Step 1: Receive tasks and tally priorities - const priorities: Record = { urgent: 0, high: 0, medium: 0, low: 0 }; - for (const task of tasks) { - priorities[task.priority] += 1; - } - await emitEvent({ type: "tasks_received", count: tasks.length, priorities }); - - // Step 2: Sort tasks by priority - const sorted = await sortTasks(tasks); - - // Step 3: Process each task in priority order - const summary: Record = { urgent: 0, high: 0, medium: 0, low: 0 }; - for (let i = 0; i < sorted.length; i++) { - await processTask(sorted[i], i + 1); - summary[sorted[i].priority] += 1; - } - - // Step 4: Emit completion - await emitEvent({ type: "done", processed: sorted.length, summary }); - - return { processed: sorted.length, summary }; -} - -async function sortTasks(tasks: TaskItem[]): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "sorting", strategy: "priority-weighted" }); - await delay(SORT_DELAY_MS); - - const sorted = sortByPriority(tasks); - await writer.write({ type: "sorted", order: sorted.map((t) => t.id) }); - return sorted; - } finally { - writer.releaseLock(); - } -} - -async function processTask( - task: TaskItem, - position: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ - type: "processing_task", - taskId: task.id, - priority: task.priority, - position, - }); - await delay(PROCESS_DELAY_MS); - - await writer.write({ - type: "task_complete", - taskId: task.id, - priority: task.priority, - result: `${task.label} completed`, - }); - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: QueueEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function diff --git a/docs/content/docs/cookbook/data-processing/resequencer.mdx b/docs/content/docs/cookbook/data-processing/resequencer.mdx index 8639a3f9c5..a8883f765c 100644 --- a/docs/content/docs/cookbook/data-processing/resequencer.mdx +++ b/docs/content/docs/cookbook/data-processing/resequencer.mdx @@ -5,16 +5,16 @@ type: guide summary: Buffer out-of-order webhook fragments and release them in the correct sequence. --- -Buffer out-of-order webhook fragments and release them in the correct sequence. Use this pattern when messages arrive out of order but downstream processing requires them in sequence. +Use resequencer when messages arrive out of order but downstream processing requires them in sequence. ## Pattern -Create one hook per expected fragment. As fragments arrive (in any order), buffer them. When the next expected sequence number arrives, release it and drain any contiguous buffered fragments. This guarantees in-order delivery regardless of arrival order. - -### Simplified +Create one hook per expected fragment. As fragments arrive (in any order), buffer them. When the next expected sequence number arrives, release it and drain any contiguous buffered fragments. ```typescript lineNumbers -import { defineHook, FatalError } from "workflow"; +import { defineHook } from "workflow"; + +declare function processFragment(seq: number, payload: string): Promise; // @setup export const fragmentHook = defineHook<{ seq: number; payload: string }>(); @@ -23,7 +23,10 @@ export async function resequencer(batchId: string, expectedCount: number) { const hooks = []; for (let i = 1; i <= expectedCount; i++) { - hooks.push({ seq: i, hook: fragmentHook.create({ token: `resequencer:${batchId}:${i}` }) }); + hooks.push({ + seq: i, + hook: fragmentHook.create({ token: `resequencer:${batchId}:${i}` }), + }); } const buffer = new Map(); @@ -55,151 +58,8 @@ export async function resequencer(batchId: string, expectedCount: number) { } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable, FatalError } from "workflow"; - -// Typed events streamed to the UI via getWritable() -export type ResequencerEvent = - | { type: "waiting"; batchId: string; expectedCount: number; tokens: string[] } - | { type: "fragment_received"; batchId: string; seq: number; payload: string } - | { type: "fragment_buffered"; batchId: string; seq: number; bufferSize: number } - | { type: "fragment_released"; batchId: string; seq: number; payload: string; nextExpected: number } - | { type: "error"; batchId: string; message: string } - | { type: "done"; batchId: string; ordered: string[] }; - -export type FragmentPayload = { - seq: number; - payload: string; -}; - -export const fragmentHook = defineHook(); - -export async function resequencer( - batchId: string, - expectedCount: number -) { - "use workflow"; - - // Create one hook per expected fragment - const tokens: string[] = []; - const hooks = []; - for (let i = 1; i <= expectedCount; i++) { - const token = `resequencer:${batchId}:${i}`; - tokens.push(token); - hooks.push({ seq: i, hook: fragmentHook.create({ token }), token }); - } - - await emit({ - type: "waiting", - batchId, - expectedCount, - tokens, - }); - - // Buffer for out-of-order fragments - const buffer = new Map(); - const ordered: string[] = []; - let nextExpected = 1; - - // Wait for all fragments — they can arrive in any order - const pending = new Map( - hooks.map(({ seq, hook }) => [seq, hook.then((data) => ({ seq, payload: data.payload }))]) - ); - - while (ordered.length < expectedCount) { - // Race all still-pending hooks - const result = await Promise.race([...pending.values()]); - pending.delete(result.seq); - - await emit({ - type: "fragment_received", - batchId, - seq: result.seq, - payload: result.payload, - }); - - // Guard: duplicate sequence (already released or buffered) - if (ordered[result.seq - 1] !== undefined || buffer.has(result.seq)) { - throw new FatalError( - `Duplicate sequence ${result.seq} in batch ${batchId}` - ); - } - - // Guard: sequence out of range - if (result.seq < 1 || result.seq > expectedCount) { - throw new FatalError( - `Sequence ${result.seq} out of range [1, ${expectedCount}] in batch ${batchId}` - ); - } - - if (result.seq === nextExpected) { - // Fragment is the one we need — release immediately - ordered.push(result.payload); - nextExpected++; - - await emit({ - type: "fragment_released", - batchId, - seq: result.seq, - payload: result.payload, - nextExpected, - }); - - // Drain any contiguous buffered fragments - while (buffer.has(nextExpected)) { - const bufferedPayload = buffer.get(nextExpected)!; - buffer.delete(nextExpected); - ordered.push(bufferedPayload); - - await emit({ - type: "fragment_released", - batchId, - seq: nextExpected, - payload: bufferedPayload, - nextExpected: nextExpected + 1, - }); - - nextExpected++; - } - } else { - // Out of order — buffer it - buffer.set(result.seq, result.payload); - - await emit({ - type: "fragment_buffered", - batchId, - seq: result.seq, - bufferSize: buffer.size, - }); - } - } - - await emit({ type: "done", batchId, ordered }); - - return { batchId, ordered, status: "complete" as const }; -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retries on permanent failures -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — waits for the next fragment to arrive diff --git a/docs/content/docs/cookbook/data-processing/scatter-gather.mdx b/docs/content/docs/cookbook/data-processing/scatter-gather.mdx index 7853e9b4ee..b1872aad07 100644 --- a/docs/content/docs/cookbook/data-processing/scatter-gather.mdx +++ b/docs/content/docs/cookbook/data-processing/scatter-gather.mdx @@ -5,14 +5,12 @@ type: guide summary: Query 4 shipping providers for quotes in parallel and pick the cheapest one that responds. --- -Query 4 shipping providers for quotes in parallel and pick the cheapest one that responds. Use this pattern when you need to fan out the same request to multiple providers and select the best result. +Use scatter-gather when you need to fan out the same request to multiple providers and select the best result. ## Pattern Launch one step per provider using `Promise.allSettled()` so failures don't cancel the others. A final gather step picks the winner from the successful results. -### Simplified - ```typescript lineNumbers declare function fetchFedExQuote(packageId: string): Promise<{ provider: string; price: number; days: number }>; // @setup declare function fetchUpsQuote(packageId: string): Promise<{ provider: string; price: number; days: number }>; // @setup @@ -41,214 +39,7 @@ export async function scatterGather(packageId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need this unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type ProviderId = "fedex" | "ups" | "dhl" | "usps"; - -export type ProviderEvent = - | { type: "provider_querying"; provider: string } - | { type: "provider_quoted"; provider: string; price: number; days: number } - | { type: "provider_failed"; provider: string; error: string } - | { type: "gathering" } - | { type: "done"; winner: { provider: string; price: number; days: number } | null }; - -type ProviderQuote = { - provider: ProviderId; - price: number; - days: number; -}; - -type ProviderResult = { - provider: ProviderId; - status: "quoted" | "failed"; - price?: number; - days?: number; - error?: string; -}; - -type ScatterGatherResult = { - packageId: string; - status: "done"; - results: ProviderResult[]; - winner: ProviderQuote | null; -}; - -// Demo: simulate real-world network latency so the UI can show progress. -const PROVIDER_DELAY_MS: Record = { - fedex: 700, - ups: 900, - dhl: 1100, - usps: 1300, -}; - -const PROVIDER_QUOTES: Record = { - fedex: { price: 24.99, days: 2 }, - ups: { price: 19.50, days: 3 }, - dhl: { price: 31.00, days: 4 }, - usps: { price: 12.75, days: 5 }, -}; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function scatterGather( - packageId: string, - failProviders: ProviderId[] = [] -): Promise { - "use workflow"; - - const providers: Array<{ - provider: ProviderId; - fetch: () => Promise; - }> = [ - { provider: "fedex", fetch: () => fetchFedExQuote(packageId, failProviders) }, - { provider: "ups", fetch: () => fetchUpsQuote(packageId, failProviders) }, - { provider: "dhl", fetch: () => fetchDhlQuote(packageId, failProviders) }, - { provider: "usps", fetch: () => fetchUspsQuote(packageId, failProviders) }, - ]; - - const settled = await Promise.allSettled( - providers.map((p) => p.fetch()) - ); - - const results: ProviderResult[] = settled.map((result, index) => { - const provider = providers[index].provider; - - if (result.status === "fulfilled") { - return { - provider, - status: "quoted", - price: result.value.price, - days: result.value.days, - }; - } - - return { - provider, - status: "failed", - error: result.reason instanceof Error ? result.reason.message : "Unknown error", - }; - }); - - return gatherBestQuote(packageId, results); -} - -async function fetchProviderQuote( - provider: ProviderId, - packageId: string, - failProviders: ProviderId[] -): Promise { - // Demo: stream progress events to the UI via getWritable() - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "provider_querying", provider }); - await delay(PROVIDER_DELAY_MS[provider]); - - if (failProviders.includes(provider)) { - const error = `${provider.toUpperCase()} service unavailable`; - await writer.write({ type: "provider_failed", provider, error }); - throw new Error(error); - } - - const quote = PROVIDER_QUOTES[provider]; - await writer.write({ - type: "provider_quoted", - provider, - price: quote.price, - days: quote.days, - }); - - return { provider, price: quote.price, days: quote.days }; - } finally { - writer.releaseLock(); - } -} - -async function fetchFedExQuote( - packageId: string, - failProviders: ProviderId[] -): Promise { - "use step"; - return fetchProviderQuote("fedex", packageId, failProviders); -} - -async function fetchUpsQuote( - packageId: string, - failProviders: ProviderId[] -): Promise { - "use step"; - return fetchProviderQuote("ups", packageId, failProviders); -} - -async function fetchDhlQuote( - packageId: string, - failProviders: ProviderId[] -): Promise { - "use step"; - return fetchProviderQuote("dhl", packageId, failProviders); -} - -async function fetchUspsQuote( - packageId: string, - failProviders: ProviderId[] -): Promise { - "use step"; - return fetchProviderQuote("usps", packageId, failProviders); -} - -async function gatherBestQuote( - packageId: string, - results: ProviderResult[] -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "gathering" }); - await delay(500); - - const quotes = results.filter( - (r): r is ProviderResult & { price: number; days: number } => - r.status === "quoted" && r.price !== undefined && r.days !== undefined - ); - - const winner = - quotes.length > 0 - ? quotes.reduce((best, current) => - current.price < best.price ? current : best - ) - : null; - - await writer.write({ - type: "done", - winner: winner - ? { provider: winner.provider, price: winner.price, days: winner.days } - : null, - }); - - return { - packageId, - status: "done", - results, - winner: winner - ? { provider: winner.provider, price: winner.price, days: winner.days } - : null, - }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — fans out to all providers, tolerating failures diff --git a/docs/content/docs/cookbook/data-processing/splitter.mdx b/docs/content/docs/cookbook/data-processing/splitter.mdx index ae3f102af6..b02d56a087 100644 --- a/docs/content/docs/cookbook/data-processing/splitter.mdx +++ b/docs/content/docs/cookbook/data-processing/splitter.mdx @@ -5,17 +5,13 @@ type: guide summary: Split a multi-item order into individual line items for independent validation and fulfillment. --- -Split a multi-item order into individual line items for independent validation and fulfillment. Use this pattern when a single input contains multiple items that should be processed independently. +Use splitter when a single input contains multiple items that should be processed independently. ## Pattern -Iterate over the items in the composite message, processing each as its own step. Each item goes through validation, reservation, and fulfillment independently. Failures in one item do not prevent the others from completing. - -### Simplified +Iterate over items in the composite message, processing each as its own step. Failures in one item do not prevent others from completing. ```typescript lineNumbers -import { FatalError } from "workflow"; - type LineItem = { sku: string; name: string; quantity: number; warehouse: string }; type Order = { orderId: string; items: LineItem[] }; @@ -36,205 +32,6 @@ export async function orderSplitter(order: Order) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getWritable } from "workflow"; - -// Local FatalError — prevents the SDK's automatic retry for permanent failures. -// The workflow package does not export this class, so we define it here. -class FatalError extends Error { - constructor(message: string) { - super(message); - this.name = "FatalError"; - } -} - -export type LineItem = { - sku: string; - name: string; - quantity: number; - warehouse: string; -}; - -export type Order = { - orderId: string; - items: LineItem[]; -}; - -export type SplitterEvent = - | { type: "splitting"; orderId: string; itemCount: number } - | { type: "item_processing"; index: number; sku: string; name: string } - | { type: "item_validated"; index: number; sku: string } - | { type: "item_reserved"; index: number; sku: string; warehouse: string } - | { type: "item_fulfilled"; index: number; sku: string; hookToken: string } - | { type: "item_failed"; index: number; sku: string; error: string } - | { type: "aggregating" } - | { - type: "done"; - summary: { fulfilled: number; failed: number; total: number }; - }; - -type ItemResult = { - index: number; - sku: string; - status: "fulfilled" | "failed"; - hookToken?: string; - error?: string; -}; - -type SplitterReport = { - orderId: string; - status: "done"; - results: ItemResult[]; - summary: { fulfilled: number; failed: number; total: number }; -}; - -// Demo: configures which item indices should fail for the interactive UI. -export type DemoFailures = { - failIndices: number[]; -}; - -const NO_FAILURES: DemoFailures = { failIndices: [] }; - -const ITEM_DELAY_MS = 600; -const AGGREGATE_DELAY_MS = 400; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function emit(writer: WritableStreamDefaultWriter) { - return async (event: SplitterEvent) => { - await writer.write(event); - }; -} - -// The splitter pattern: receives a composite order, splits it into -// individual line items, and processes each one through validation, -// reservation, and fulfillment steps. -export async function orderSplitter( - order: Order, - failures: DemoFailures = NO_FAILURES -): Promise { - "use workflow"; - - const writer = getWritable().getWriter(); - const send = emit(writer); - - try { - await send({ - type: "splitting", - orderId: order.orderId, - itemCount: order.items.length, - }); - - // Split: process each line item as its own step sequence - const results: ItemResult[] = []; - for (let i = 0; i < order.items.length; i++) { - const item = order.items[i]; - const shouldFail = failures.failIndices.includes(i); - const result = await processLineItem( - order.orderId, - item, - i, - shouldFail - ); - results.push(result); - } - - // Aggregate results - await send({ type: "aggregating" }); - await delay(AGGREGATE_DELAY_MS); - - const fulfilled = results.filter((r) => r.status === "fulfilled").length; - const failed = results.length - fulfilled; - const summary = { fulfilled, failed, total: results.length }; - - await send({ type: "done", summary }); - - return { - orderId: order.orderId, - status: "done", - results, - summary, - }; - } finally { - writer.releaseLock(); - } -} - -async function processLineItem( - orderId: string, - item: LineItem, - index: number, - shouldFail: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const send = emit(writer); - - try { - await send({ - type: "item_processing", - index, - sku: item.sku, - name: item.name, - }); - await delay(ITEM_DELAY_MS); - - // Validate - await send({ type: "item_validated", index, sku: item.sku }); - await delay(ITEM_DELAY_MS / 2); - - // Simulate failure for demo - if (shouldFail) { - const error = `Insufficient stock for ${item.sku} at ${item.warehouse}`; - await send({ type: "item_failed", index, sku: item.sku, error }); - throw new FatalError(error); - } - - // Reserve inventory - await send({ - type: "item_reserved", - index, - sku: item.sku, - warehouse: item.warehouse, - }); - await delay(ITEM_DELAY_MS / 2); - - // Fulfill — deterministic hook token based on orderId + itemIndex - const hookToken = `${orderId}_item_${index}_${item.sku}`; - await send({ - type: "item_fulfilled", - index, - sku: item.sku, - hookToken, - }); - - return { index, sku: item.sku, status: "fulfilled", hookToken }; - } catch (err) { - if (err instanceof FatalError) { - return { - index, - sku: item.sku, - status: "failed", - error: err.message, - }; - } - throw err; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retries on permanent failures -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function diff --git a/docs/content/docs/cookbook/meta.json b/docs/content/docs/cookbook/meta.json index c640701cee..30665a9f25 100644 --- a/docs/content/docs/cookbook/meta.json +++ b/docs/content/docs/cookbook/meta.json @@ -2,21 +2,13 @@ "title": "Cookbook", "defaultOpen": true, "pages": [ - "---Payments & Orders---", "payments", - "---Approvals---", "approvals", - "---Resilience---", "resilience", - "---Notifications---", "notifications", - "---Webhooks & Callbacks---", "webhooks", - "---Data Processing---", "data-processing", - "---Routing---", "routing", - "---Observability---", "observability" ] } diff --git a/docs/content/docs/cookbook/notifications/fan-out.mdx b/docs/content/docs/cookbook/notifications/fan-out.mdx index bd375e48fb..12770202ea 100644 --- a/docs/content/docs/cookbook/notifications/fan-out.mdx +++ b/docs/content/docs/cookbook/notifications/fan-out.mdx @@ -5,13 +5,11 @@ type: guide summary: Broadcast an incident alert to Slack, email, SMS, and PagerDuty in parallel. --- -Broadcast an incident alert to Slack, email, SMS, and PagerDuty in parallel. +Broadcast an incident alert to multiple channels in parallel. Use this when a single event must trigger independent notifications and a failure in one channel should not block the others. ## Pattern -The workflow defines one step per notification channel and launches them all with `Promise.allSettled()`. Each channel runs independently — a failure in one does not block the others. Results are aggregated after all channels settle. - -### Simplified +Define one step per notification channel and launch them all with `Promise.allSettled()`. Each channel runs independently. Results are aggregated after all channels settle. ```typescript lineNumbers declare function sendSlackAlert(incidentId: string, message: string): Promise<{ providerId: string }>; // @setup @@ -38,261 +36,7 @@ export async function incidentFanOut(incidentId: string, message: string) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable } from "workflow"; - -// Local FatalError — prevents the SDK's automatic retry for permanent failures. -// The workflow package does not export this class, so we define it here. -class FatalError extends Error { - constructor(message: string) { - super(message); - this.name = "FatalError"; - } -} - -export type NotificationChannel = "slack" | "email" | "sms" | "pagerduty"; - -// Demo-only: configures which channels should fail (and how) in the -// interactive UI. In a real workflow you'd remove this entirely — your -// steps would call real APIs and failures would be organic. -export type DemoFailures = { - transient: NotificationChannel[]; - permanent: NotificationChannel[]; -}; - -export type ChannelEvent = - | { type: "channel_sending"; channel: string } - | { type: "channel_sent"; channel: string; providerId: string } - | { type: "channel_failed"; channel: string; error: string; attempt: number } - | { type: "channel_retrying"; channel: string; attempt: number } - | { type: "aggregating" } - | { type: "done"; summary: { ok: number; failed: number } }; - -type ChannelResult = { - channel: NotificationChannel; - status: "sent" | "failed"; - providerId?: string; - error?: string; -}; - -type IncidentReport = { - incidentId: string; - message: string; - status: "done"; - deliveries: ChannelResult[]; - summary: { - ok: number; - failed: number; - }; -}; - -const CHANNEL_ERROR_MESSAGES: Record = { - slack: "Slack API rate limit exceeded", - email: "Email provider returned 503", - sms: "SMS delivery failed: invalid number", - pagerduty: "PagerDuty integration is not configured", -}; - -// Demo: simulate real-world network latency so the UI can show progress. -// In production, these delays would be replaced by actual API calls. -const CHANNEL_DELAY_MS: Record = { - slack: 650, - pagerduty: 750, - email: 900, - sms: 1150, -}; - -const AGGREGATE_DELAY_MS = 500; - -// setTimeout is available here because delay() is only called from -// "use step" functions, which have full Node.js runtime access. -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -const NO_FAILURES: DemoFailures = { transient: [], permanent: [] }; - -// Demo entry point. The `failures` parameter is only used by the interactive -// UI to let users toggle simulated failures — strip it out when adapting -// this workflow for production use. -export async function incidentFanOut( - incidentId: string, - message: string, - failures: DemoFailures = NO_FAILURES -): Promise { - "use workflow"; - - const fanOutTargets = [ - { - channel: "slack" as const, - send: () => sendSlackAlert(incidentId, message, failures), - }, - { - channel: "email" as const, - send: () => sendEmailAlert(incidentId, message, failures), - }, - { - channel: "sms" as const, - send: () => sendSmsAlert(incidentId, message, failures), - }, - { - channel: "pagerduty" as const, - send: () => sendPagerDutyAlert(incidentId, message, failures), - }, - ]; - - const settled = await Promise.allSettled( - fanOutTargets.map((target) => target.send()) - ); - - const deliveries: ChannelResult[] = settled.map((result, index) => { - const channel = fanOutTargets[index].channel; - - if (result.status === "fulfilled") { - return { - channel, - status: "sent", - providerId: result.value.providerId, - }; - } - - return { - channel, - status: "failed", - error: `${channel}: ${errorMessage(result.reason)}`, - }; - }); - - return aggregateResults(incidentId, message, deliveries); -} - -function errorMessage(reason: unknown): string { - if (reason instanceof Error) return reason.message; - if (typeof reason === "string") return reason; - return "Unknown delivery failure"; -} - -// Demo: shared implementation for all channel steps. In production you'd -// replace the delay + simulated failures with a real API call per channel. -// The getWritable() streaming and getStepMetadata() calls are also demo-only -// — they power the live execution log in the UI. -async function sendChannelAlert( - channel: NotificationChannel, - incidentId: string, - message: string, - failures: DemoFailures -): Promise<{ providerId: string }> { - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "channel_retrying", channel, attempt }); - } - - await writer.write({ type: "channel_sending", channel }); - await delay(CHANNEL_DELAY_MS[channel]); - - // Permanent failure — FatalError prevents the SDK's automatic retry, - // so the channel stays failed in Promise.allSettled(). - if (failures.permanent.includes(channel)) { - const error = CHANNEL_ERROR_MESSAGES[channel]; - await writer.write({ type: "channel_failed", channel, error, attempt }); - throw new FatalError(error); - } - - // Transient failure — throws a regular Error on attempt 1 so the SDK - // auto-retries. The retry will succeed, showing the recovery path. - if (attempt === 1 && failures.transient.includes(channel)) { - throw new Error(CHANNEL_ERROR_MESSAGES[channel]); - } - - const providerId = `${channel}_${incidentId}_${message.length}_${attempt}`; - await writer.write({ type: "channel_sent", channel, providerId }); - - return { providerId }; - } finally { - writer.releaseLock(); - } -} - -async function sendSlackAlert( - incidentId: string, - message: string, - failures: DemoFailures -): Promise<{ providerId: string }> { - "use step"; - return sendChannelAlert("slack", incidentId, message, failures); -} - -async function sendEmailAlert( - incidentId: string, - message: string, - failures: DemoFailures -): Promise<{ providerId: string }> { - "use step"; - return sendChannelAlert("email", incidentId, message, failures); -} - -async function sendSmsAlert( - incidentId: string, - message: string, - failures: DemoFailures -): Promise<{ providerId: string }> { - "use step"; - return sendChannelAlert("sms", incidentId, message, failures); -} - -async function sendPagerDutyAlert( - incidentId: string, - message: string, - failures: DemoFailures -): Promise<{ providerId: string }> { - "use step"; - return sendChannelAlert("pagerduty", incidentId, message, failures); -} - -async function aggregateResults( - incidentId: string, - message: string, - deliveries: ChannelResult[] -): Promise { - "use step"; - // Demo: stream aggregation progress to the UI - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "aggregating" }); - await delay(AGGREGATE_DELAY_MS); - - const ok = deliveries.filter((delivery) => delivery.status === "sent").length; - const failed = deliveries.length - ok; - const report: IncidentReport = { - incidentId, - message, - status: "done", - deliveries, - summary: { ok, failed }, - }; - - await writer.write({ type: "done", summary: report.summary }); - - return report; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — fans out to all channels, isolating failures -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retry for permanent failures -- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/onboarding-drip.mdx b/docs/content/docs/cookbook/notifications/onboarding-drip.mdx index 1ea32ac489..44502cf134 100644 --- a/docs/content/docs/cookbook/notifications/onboarding-drip.mdx +++ b/docs/content/docs/cookbook/notifications/onboarding-drip.mdx @@ -5,13 +5,11 @@ type: guide summary: Send a welcome email on signup, a tips email after 2 days, and a check-in after a week. --- -Send a welcome email on signup, a tips email after 2 days, and a check-in after a week. +Send a welcome email on signup, a tips email after 2 days, and a check-in after a week. Because `sleep()` is durable, the workflow survives cold starts and restarts — even across days or weeks of waiting. ## Pattern -The workflow sends emails at scheduled intervals using `sleep()` between each step. Because `sleep()` is durable, the workflow survives cold starts and restarts — even across days or weeks of waiting. - -### Simplified +Send emails at scheduled intervals using `sleep()` between each step. The durable sleep means the workflow resumes exactly where it left off after any restart. ```typescript lineNumbers import { sleep } from "workflow"; @@ -36,104 +34,7 @@ export async function onboardingDrip(email: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type DripEvent = - | { type: "email_sending"; day: number; label: string } - | { type: "email_sent"; day: number; label: string } - | { type: "sleeping"; duration: string; fromDay: number; toDay: number } - | { type: "done" }; - -const SEND_DELAY_MS = 600; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function runOnboardingDrip(email: string) { - "use workflow"; - - // Day 0: Welcome email - await sendWelcomeEmail(email); - - // Day 1: Getting started tips - await sleep("1d"); - await sendGettingStartedEmail(email); - - // Day 3: Feature highlights - await sleep("2d"); - await sendFeatureHighlightsEmail(email); - - // Day 7: Follow-up - await sleep("4d"); - await sendFollowUpEmail(email); - - return { email, status: "completed", totalDays: 7 }; -} - -async function sendWelcomeEmail(email: string) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "email_sending", day: 0, label: "Welcome Email" }); - await delay(SEND_DELAY_MS); - await writer.write({ type: "email_sent", day: 0, label: "Welcome Email" }); - await writer.write({ type: "sleeping", duration: "1d", fromDay: 0, toDay: 1 }); - return { sent: true, day: 0 }; - } finally { - writer.releaseLock(); - } -} - -async function sendGettingStartedEmail(email: string) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "email_sending", day: 1, label: "Getting Started Tips" }); - await delay(SEND_DELAY_MS); - await writer.write({ type: "email_sent", day: 1, label: "Getting Started Tips" }); - await writer.write({ type: "sleeping", duration: "2d", fromDay: 1, toDay: 3 }); - return { sent: true, day: 1 }; - } finally { - writer.releaseLock(); - } -} - -async function sendFeatureHighlightsEmail(email: string) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "email_sending", day: 3, label: "Feature Highlights" }); - await delay(SEND_DELAY_MS); - await writer.write({ type: "email_sent", day: 3, label: "Feature Highlights" }); - await writer.write({ type: "sleeping", duration: "4d", fromDay: 3, toDay: 7 }); - return { sent: true, day: 3 }; - } finally { - writer.releaseLock(); - } -} - -async function sendFollowUpEmail(email: string) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "email_sending", day: 7, label: "Follow-up & Feedback" }); - await delay(SEND_DELAY_MS); - await writer.write({ type: "email_sent", day: 7, label: "Follow-up & Feedback" }); - await writer.write({ type: "done" }); - return { sent: true, day: 7 }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durable wait between drip emails (days/weeks) -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/publish-subscribe.mdx b/docs/content/docs/cookbook/notifications/publish-subscribe.mdx index 81ba433c2e..82673a8b87 100644 --- a/docs/content/docs/cookbook/notifications/publish-subscribe.mdx +++ b/docs/content/docs/cookbook/notifications/publish-subscribe.mdx @@ -5,13 +5,11 @@ type: guide summary: A product-update event triggers email, push notification, and analytics subscribers independently. --- -A product-update event triggers email, push notification, and analytics subscribers independently. +A product-update event triggers email, push notification, and analytics subscribers independently. This decouples the publisher from the subscriber list — adding a new subscriber is a registry change, not a code change. ## Pattern -The workflow looks up subscribers from a registry, filters by topic, then delivers the message to each matching subscriber. Non-matching subscribers are skipped. This decouples the publisher from the subscriber list — adding a new subscriber is a registry change, not a code change. - -### Simplified +Look up subscribers from a registry, filter by topic, then deliver the message to each match. Non-matching subscribers are skipped. ```typescript lineNumbers type Subscriber = { id: string; name: string; topics: string[] }; @@ -37,175 +35,7 @@ export async function publishSubscribe(topic: string, payload: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type Topic = "orders" | "inventory" | "shipping" | "analytics"; - -export type Subscriber = { - id: string; - name: string; - topics: Topic[]; -}; - -export type PubSubEvent = - | { type: "subscribers_registered"; subscribers: Subscriber[] } - | { type: "message_published"; topic: Topic; payload: string } - | { type: "filtering"; topic: Topic; total: number; matched: number } - | { type: "delivering"; subscriberId: string; subscriberName: string; topic: Topic } - | { type: "delivered"; subscriberId: string; subscriberName: string; topic: Topic } - | { type: "subscriber_skipped"; subscriberId: string; subscriberName: string; topic: Topic } - | { type: "done"; topic: Topic; delivered: number; skipped: number }; - -export interface PubSubResult { - topic: Topic; - delivered: number; - skipped: number; -} - -// Simulated subscriber registry — each subscriber listens to specific topics. -// In production this would come from a database or configuration service. -const SUBSCRIBER_REGISTRY: Subscriber[] = [ - { id: "sub-1", name: "Order Service", topics: ["orders", "inventory"] }, - { id: "sub-2", name: "Warehouse API", topics: ["inventory", "shipping"] }, - { id: "sub-3", name: "Email Notifier", topics: ["orders", "shipping"] }, - { id: "sub-4", name: "Analytics Pipeline", topics: ["orders", "inventory", "shipping", "analytics"] }, - { id: "sub-5", name: "Billing Service", topics: ["orders"] }, -]; - -// Demo timing -const REGISTER_DELAY_MS = 400; -const FILTER_DELAY_MS = 500; -const DELIVER_DELAY_MS = 600; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function publishSubscribeFlow( - topic: Topic, - payload: string -): Promise { - "use workflow"; - - // Step 1: Register subscribers from the registry - const subscribers = await registerSubscribers(); - - // Step 2: Publish message and filter by topic subscription - const matched = await filterSubscribers(topic, payload, subscribers); - - // Step 3: Deliver to each matching subscriber - const delivered = await deliverToSubscribers(topic, matched); - - // Step 4: Summarize results - return summarizeDelivery(topic, delivered, subscribers.length - matched.length); -} - -async function registerSubscribers(): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await delay(REGISTER_DELAY_MS); - await writer.write({ - type: "subscribers_registered", - subscribers: SUBSCRIBER_REGISTRY, - }); - return SUBSCRIBER_REGISTRY; - } finally { - writer.releaseLock(); - } -} - -async function filterSubscribers( - topic: Topic, - payload: string, - subscribers: Subscriber[] -): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "message_published", topic, payload }); - await delay(FILTER_DELAY_MS); - - const matched = subscribers.filter((sub) => sub.topics.includes(topic)); - - await writer.write({ - type: "filtering", - topic, - total: subscribers.length, - matched: matched.length, - }); - - // Emit skip events for non-matching subscribers - for (const sub of subscribers) { - if (!sub.topics.includes(topic)) { - await writer.write({ - type: "subscriber_skipped", - subscriberId: sub.id, - subscriberName: sub.name, - topic, - }); - } - } - - return matched; - } finally { - writer.releaseLock(); - } -} - -async function deliverToSubscribers( - topic: Topic, - subscribers: Subscriber[] -): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - let delivered = 0; - - for (const sub of subscribers) { - await writer.write({ - type: "delivering", - subscriberId: sub.id, - subscriberName: sub.name, - topic, - }); - await delay(DELIVER_DELAY_MS); - await writer.write({ - type: "delivered", - subscriberId: sub.id, - subscriberName: sub.name, - topic, - }); - delivered += 1; - } - - return delivered; - } finally { - writer.releaseLock(); - } -} - -async function summarizeDelivery( - topic: Topic, - delivered: number, - skipped: number -): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", topic, delivered, skipped }); - return { topic, delivered, skipped }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [Workflows & Steps](/docs/foundations/workflows-and-steps) — how workflow and step functions work diff --git a/docs/content/docs/cookbook/notifications/recipient-list.mdx b/docs/content/docs/cookbook/notifications/recipient-list.mdx index f335df187f..253cb8ec6b 100644 --- a/docs/content/docs/cookbook/notifications/recipient-list.mdx +++ b/docs/content/docs/cookbook/notifications/recipient-list.mdx @@ -5,13 +5,11 @@ type: guide summary: Evaluate severity rules at runtime and alert matching channels (Slack, email, PagerDuty). --- -Evaluate severity rules at runtime and alert matching channels (Slack, email, PagerDuty). +Evaluate severity rules at runtime and alert matching channels. This lets you add or change routing rules without modifying the delivery logic. ## Pattern -The workflow evaluates routing rules against the alert severity to build a dynamic recipient list. Matched channels receive the alert in parallel via `Promise.allSettled()`, while unmatched channels are skipped. This lets you add or change routing rules without modifying the delivery logic. - -### Simplified +Evaluate routing rules against the alert severity to build a dynamic recipient list. Matched channels receive the alert in parallel via `Promise.allSettled()`, while unmatched channels are skipped. ```typescript lineNumbers type Severity = "info" | "warning" | "critical"; @@ -41,224 +39,7 @@ export async function recipientList( } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable } from "workflow"; - -// Local FatalError — prevents the SDK's automatic retry for permanent failures. -// The workflow package does not export this class, so we define it here. -class FatalError extends Error { - constructor(message: string) { - super(message); - this.name = "FatalError"; - } -} - -export type RecipientChannel = "slack" | "email" | "pagerduty" | "webhook"; -export type Severity = "info" | "warning" | "critical"; - -export type RoutingRule = { - channel: RecipientChannel; - match: (severity: Severity) => boolean; -}; - -// Demo-only: configures which channels should fail (and how) in the -// interactive UI. In a real workflow you'd remove this entirely — your -// steps would call real APIs and failures would be organic. -export type DemoFailures = { - transient: RecipientChannel[]; - permanent: RecipientChannel[]; -}; - -export type RecipientEvent = - | { type: "rules_evaluated"; matched: string[]; skipped: string[] } - | { type: "delivering"; channel: string } - | { type: "delivered"; channel: string; durationMs: number } - | { type: "delivery_failed"; channel: string; error: string; attempt: number } - | { type: "delivery_retrying"; channel: string; attempt: number } - | { type: "done"; summary: { delivered: number; failed: number; skipped: number } }; - -// ── Routing rules (evaluated at runtime) ─────────────────────────────── -export const RULES: RoutingRule[] = [ - { channel: "slack", match: () => true }, - { channel: "email", match: (s) => s === "warning" || s === "critical" }, - { channel: "pagerduty", match: (s) => s === "critical" }, - { channel: "webhook", match: (s) => s !== "info" }, -]; - -const CHANNEL_ERROR_MESSAGES: Record = { - slack: "Slack API rate limit exceeded", - email: "Email provider returned 503", - pagerduty: "PagerDuty integration is not configured", - webhook: "Webhook endpoint timed out", -}; - -// Demo: simulate real-world network latency so the UI can show progress. -// In production, these delays would be replaced by actual API calls. -const CHANNEL_DELAY_MS: Record = { - slack: 650, - email: 900, - pagerduty: 750, - webhook: 1100, -}; - -// setTimeout is available here because delay() is only called from -// "use step" functions, which have full Node.js runtime access. -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -type DeliveryResult = { - channel: RecipientChannel; - status: "delivered" | "failed"; - durationMs?: number; - error?: string; -}; - -export type RecipientListReport = { - alertId: string; - message: string; - severity: Severity; - status: "done"; - matched: RecipientChannel[]; - skipped: RecipientChannel[]; - deliveries: DeliveryResult[]; - summary: { delivered: number; failed: number; skipped: number }; -}; - -const NO_FAILURES: DemoFailures = { transient: [], permanent: [] }; - -// Demo entry point. The `failures` parameter is only used by the interactive -// UI to let users toggle simulated failures — strip it out when adapting -// this workflow for production use. -export async function recipientList( - alertId: string, - message: string, - severity: Severity = "warning", - failures: DemoFailures = NO_FAILURES -): Promise { - "use workflow"; - - const matched = RULES.filter((r) => r.match(severity)).map((r) => r.channel); - const skipped = RULES.filter((r) => !r.match(severity)).map((r) => r.channel); - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "rules_evaluated", matched, skipped }); - } finally { - writer.releaseLock(); - } - - const settled = await Promise.allSettled( - matched.map((channel) => deliverToRecipient(channel, alertId, message, failures)) - ); - - const deliveries: DeliveryResult[] = settled.map((result, index) => { - const channel = matched[index]; - if (result.status === "fulfilled") { - return { channel, status: "delivered", durationMs: result.value.durationMs }; - } - return { channel, status: "failed", error: `${channel}: ${errorMessage(result.reason)}` }; - }); - - return aggregateResults(alertId, message, severity, matched, skipped, deliveries); -} - -function errorMessage(reason: unknown): string { - if (reason instanceof Error) return reason.message; - if (typeof reason === "string") return reason; - return "Unknown delivery failure"; -} - -// Demo: shared delivery implementation for all channel steps. In production -// you'd replace the delay + simulated failures with a real API call per channel. -// The getWritable() streaming and getStepMetadata() calls are also demo-only -// — they power the live execution log in the UI. -async function deliverToRecipient( - channel: RecipientChannel, - alertId: string, - message: string, - failures: DemoFailures -): Promise<{ durationMs: number }> { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - const start = Date.now(); - - try { - if (attempt > 1) { - await writer.write({ type: "delivery_retrying", channel, attempt }); - } - - await writer.write({ type: "delivering", channel }); - await delay(CHANNEL_DELAY_MS[channel]); - - // Permanent failure — FatalError prevents the SDK's automatic retry, - // so the channel stays failed in Promise.allSettled(). - if (failures.permanent.includes(channel)) { - const error = CHANNEL_ERROR_MESSAGES[channel]; - await writer.write({ type: "delivery_failed", channel, error, attempt }); - throw new FatalError(error); - } - - // Transient failure — throws a regular Error on attempt 1 so the SDK - // auto-retries. The retry will succeed, showing the recovery path. - if (attempt === 1 && failures.transient.includes(channel)) { - throw new Error(CHANNEL_ERROR_MESSAGES[channel]); - } - - const durationMs = Date.now() - start; - await writer.write({ type: "delivered", channel, durationMs }); - return { durationMs }; - } finally { - writer.releaseLock(); - } -} - -async function aggregateResults( - alertId: string, - message: string, - severity: Severity, - matched: RecipientChannel[], - skipped: RecipientChannel[], - deliveries: DeliveryResult[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - const delivered = deliveries.filter((d) => d.status === "delivered").length; - const failed = deliveries.length - delivered; - const summary = { delivered, failed, skipped: skipped.length }; - - await writer.write({ type: "done", summary }); - - return { - alertId, - message, - severity, - status: "done", - matched, - skipped, - deliveries, - summary, - }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — delivers to all matched recipients in parallel -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retry for permanent failures -- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/notifications/scheduled-digest.mdx b/docs/content/docs/cookbook/notifications/scheduled-digest.mdx index 08376aa1ce..f7379462d6 100644 --- a/docs/content/docs/cookbook/notifications/scheduled-digest.mdx +++ b/docs/content/docs/cookbook/notifications/scheduled-digest.mdx @@ -5,13 +5,11 @@ type: guide summary: Open a 1-hour collection window for events, then email a digest when the window closes. --- -Open a 1-hour collection window for events, then email a digest when the window closes. +Open a time window for events, then email a digest when the window closes. The hook can receive multiple events during the window because it is awaited in a loop. ## Pattern -The workflow opens a time window using `sleep()` and races it against incoming events from a `defineHook`. Events accumulate in an array until the window closes. After the window, if any events were collected, a digest email is sent. The hook can receive multiple events during the window because it is awaited in a loop. - -### Simplified +Race incoming events from a `defineHook` against a `sleep()` window. Events accumulate in an array until the window closes, then send a digest if any were collected. ```typescript lineNumbers import { sleep, defineHook } from "workflow"; @@ -53,182 +51,8 @@ export async function collectAndSendDigest( } ``` -### Full Implementation - -```typescript lineNumbers -import { sleep, defineHook, getWritable } from "workflow"; - -export type DigestEventPayload = { - type: string; - message: string; -}; - -export const digestEvent = defineHook(); - -export type DigestStreamEvent = - | { type: "window_open"; token: string; windowMs: number } - | { type: "event_received"; event: DigestEventPayload; eventCount: number } - | { type: "sleep_tick" } - | { type: "window_closed"; eventCount: number } - | { type: "sending_digest"; eventCount: number } - | { type: "digest_sent"; eventCount: number } - | { type: "digest_empty" } - | { type: "done"; status: "sent" | "empty"; eventCount: number }; - -export interface DigestResult { - digestId: string; - userId: string; - token: string; - status: "sent" | "empty"; - eventCount: number; - windowMs: number; -} - -const DEMO_WINDOW_MS = 6_000; - -export async function collectAndSendDigest( - digestId: string, - userId: string, - windowMs: number = DEMO_WINDOW_MS -): Promise { - "use workflow"; - - const token = `digest:${digestId}`; - const hook = digestEvent.create({ token }); - const windowClosed = sleep(`${windowMs}ms`).then(() => ({ - kind: "window_closed" as const, - })); - const events: DigestEventPayload[] = []; - - await emitWindowOpen(token, windowMs); - - while (true) { - const outcome = await Promise.race([ - hook.then((payload) => ({ - kind: "event" as const, - payload, - })), - windowClosed, - ]); - - if (outcome.kind === "window_closed") { - await emitWindowClosed(events.length); - break; - } - - events.push(outcome.payload); - await emitEventReceived(outcome.payload, events.length); - } - - if (events.length === 0) { - await emitDigestEmpty(); - return { - digestId, - userId, - token, - status: "empty", - eventCount: 0, - windowMs, - }; - } - - await sendDigestEmail(userId, events); - - await emitDone("sent", events.length); - - return { - digestId, - userId, - token, - status: "sent", - eventCount: events.length, - windowMs, - }; -} - -async function emitWindowOpen(token: string, windowMs: number) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "window_open", token, windowMs }); - } finally { - writer.releaseLock(); - } -} - -async function emitEventReceived(event: DigestEventPayload, eventCount: number) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "event_received", event, eventCount }); - } finally { - writer.releaseLock(); - } -} - -async function emitWindowClosed(eventCount: number) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "window_closed", eventCount }); - } finally { - writer.releaseLock(); - } -} - -async function emitDigestEmpty() { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "digest_empty" }); - await writer.write({ type: "done", status: "empty", eventCount: 0 }); - } finally { - writer.releaseLock(); - } -} - -async function emitDone(status: "sent" | "empty", eventCount: number) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", status, eventCount }); - } finally { - writer.releaseLock(); - } -} - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -async function sendDigestEmail( - userId: string, - events: DigestEventPayload[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "sending_digest", eventCount: events.length }); - await delay(500); - await writer.write({ type: "digest_sent", eventCount: events.length }); - } finally { - writer.releaseLock(); - } - - console.info("[scheduled-digest] send_digest", { - userId, - eventCount: events.length, - types: events.map((e) => e.type), - }); -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer for the collection window -- [`defineHook`](/docs/api-reference/workflow/define-hook) — receives events from external systems during the window -- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — races incoming events against the window closing -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — receives events from external systems during the window diff --git a/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx b/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx index 7e1eae2d54..714157e354 100644 --- a/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx +++ b/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx @@ -5,13 +5,11 @@ type: guide summary: Schedule a payment reminder for 3 days out, but let the user cancel, snooze, or pay early via webhook. --- -Schedule a payment reminder for 3 days out, but let the user cancel, snooze, or pay early via webhook. +Schedule a payment reminder that can be cancelled, snoozed, or sent early via an external signal. Use this when long-running waits should be interruptible without polling. ## Pattern -The workflow races a durable `sleep()` against a `defineHook` that listens for external events. If the hook fires before the sleep completes, the workflow handles the action (cancel, snooze, or send now). This lets external systems interrupt a long-running wait without polling. - -### Simplified +Race a durable `sleep()` against a `defineHook` that listens for external events. If the hook fires before the sleep completes, handle the action (cancel, snooze, or send now). ```typescript lineNumbers import { defineHook, sleep } from "workflow"; @@ -51,133 +49,8 @@ export async function scheduleReminder(userId: string, delayMs: number) { } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable, sleep } from "workflow"; - -export type ReminderAction = - | { type: "cancel" } - | { type: "send_now" } - | { type: "snooze"; seconds: number }; - -export type ReminderResult = { - userId: string; - status: "sent" | "cancelled"; - sentAt?: string; - token: string; -}; - -export type ReminderEvent = - | { type: "scheduled"; userId: string; sendAtMs: number; token: string; metadata: { userId: string; initialSendAt: string; channel: string } } - | { type: "sleeping"; sendAtMs: number } - | { type: "action_received"; action: ReminderAction } - | { type: "snoozed"; sendAtMs: number } - | { type: "woke" } - | { type: "sending" } - | { type: "sent" } - | { type: "cancelled" } - | { type: "done"; status: "sent" | "cancelled" }; - -export const reminderActionHook = defineHook(); - -export async function scheduleReminder( - userId: string, - delayMs: number -): Promise { - "use workflow"; - - let sendAt = new Date(Date.now() + delayMs); - - const action = reminderActionHook.create({ - token: `reminder:${userId}`, - metadata: { - userId, - initialSendAt: sendAt.toISOString(), - channel: "email", - }, - }); - - await emit({ - type: "scheduled", - userId, - sendAtMs: sendAt.getTime(), - token: action.token, - metadata: { - userId, - initialSendAt: sendAt.toISOString(), - channel: "email", - }, - }); - await emit({ type: "sleeping", sendAtMs: sendAt.getTime() }); - - const outcome = await Promise.race([ - sleep(sendAt).then(() => ({ kind: "time" as const })), - action.then((payload) => ({ kind: "action" as const, payload })), - ]); - - if (outcome.kind === "action") { - await emit({ type: "action_received", action: outcome.payload }); - - if (outcome.payload.type === "cancel") { - await emit({ type: "cancelled" }); - await emit({ type: "done", status: "cancelled" }); - return { userId, status: "cancelled", token: action.token }; - } - - if (outcome.payload.type === "snooze") { - sendAt = new Date(Date.now() + outcome.payload.seconds * 1000); - await emit({ type: "snoozed", sendAtMs: sendAt.getTime() }); - await sleep(sendAt); - } - - if (outcome.payload.type === "send_now") { - await emit({ type: "woke" }); - } - } - - await emit({ type: "sending" }); - await sendReminderEmail(userId, sendAt); - await emit({ type: "sent" }); - await emit({ type: "done", status: "sent" }); - - return { - userId, - status: "sent", - sentAt: sendAt.toISOString(), - token: action.token, - }; -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -async function sendReminderEmail(userId: string, sendAt: Date) { - "use step"; - await new Promise((resolve) => setTimeout(resolve, 500)); - console.info("[wakeable-reminder] send_email", { - userId, - scheduledFor: sendAt.toISOString(), - }); -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durable wait until a deadline -- [`defineHook`](/docs/api-reference/workflow/define-hook) — creates a hook that external systems can trigger -- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — races the sleep against the hook -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a hook that external systems can trigger diff --git a/docs/content/docs/cookbook/observability/correlation-identifier.mdx b/docs/content/docs/cookbook/observability/correlation-identifier.mdx index 5a45ec370a..819a32b2d5 100644 --- a/docs/content/docs/cookbook/observability/correlation-identifier.mdx +++ b/docs/content/docs/cookbook/observability/correlation-identifier.mdx @@ -5,13 +5,11 @@ type: guide summary: Tag outbound API calls with a correlation ID so async responses match back to the right order. --- -When your workflow sends requests to external services that respond asynchronously, you need a way to match responses back to the originating request. A correlation identifier tags each outbound call with a unique ID that the response carries back. +Use a correlation identifier when your workflow sends requests to external services that respond asynchronously and you need to match responses back to the originating request. ## Pattern -The workflow generates a unique correlation ID, attaches it to the outbound request, then durably waits for the async response using `sleep()`. When the response arrives, the correlation ID is verified to match the original request before delivering the result. - -### Simplified +The workflow generates a unique correlation ID, attaches it to the outbound request, then durably waits for the async response. When the response arrives, the correlation ID verifies it matches the original request. ```typescript lineNumbers import { sleep } from "workflow"; @@ -41,234 +39,7 @@ export async function correlationIdentifierFlow( } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type RequestStatus = - | "pending" - | "sent" - | "awaiting_response" - | "matched" - | "delivered" - | "timeout"; - -export type CorrelationEvent = - | { type: "correlation_id_generated"; requestId: string; correlationId: string } - | { type: "request_sent"; requestId: string; correlationId: string; service: string } - | { type: "awaiting_response"; requestId: string; correlationId: string; timeoutMs: number } - | { type: "response_received"; requestId: string; correlationId: string; responseService: string; latencyMs: number } - | { type: "correlation_matched"; requestId: string; correlationId: string; requestPayloadHash: string; responsePayloadHash: string } - | { type: "delivery_complete"; requestId: string; correlationId: string; destination: string } - | { type: "timeout_expired"; requestId: string; correlationId: string } - | { type: "done"; requestId: string; correlationId: string; status: RequestStatus; totalSteps: number }; - -export interface CorrelationIdentifierResult { - requestId: string; - correlationId: string; - status: RequestStatus; - totalSteps: number; -} - -// Simulated external services -const SERVICES = ["payment-api", "inventory-api", "shipping-api", "notification-api"] as const; -export type ServiceName = (typeof SERVICES)[number]; - -// Demo timing -const GENERATE_DELAY_MS = 400; -const SEND_DELAY_MS = 600; -const RESPONSE_MIN_MS = 500; -const RESPONSE_MAX_MS = 1500; -const MATCH_DELAY_MS = 300; -const DELIVER_DELAY_MS = 400; -const RESPONSE_TIMEOUT_MS = 5000; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function generateId(): string { - return `corr-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`; -} - -function hashPayload(payload: string): string { - let hash = 0; - for (let i = 0; i < payload.length; i++) { - hash = ((hash << 5) - hash + payload.charCodeAt(i)) | 0; - } - return Math.abs(hash).toString(16).padStart(8, "0"); -} - -export async function correlationIdentifierFlow( - requestId: string, - service: ServiceName, - payload: string -): Promise { - "use workflow"; - - // Step 1: Generate a unique correlation ID for this request - const correlationId = await generateCorrelationId(requestId); - - // Step 2: Send the request with correlation ID attached - await sendRequest(requestId, correlationId, service, payload); - - // Step 3: Await and match the async response using correlation ID - const matched = await awaitResponse(requestId, correlationId, service, payload); - - if (!matched) { - await emitEvent({ - type: "done", - requestId, - correlationId, - status: "timeout", - totalSteps: 3, - }); - return { requestId, correlationId, status: "timeout", totalSteps: 3 }; - } - - // Step 4: Deliver the matched response to the caller - await matchAndDeliver(requestId, correlationId, service, payload); - - await emitEvent({ - type: "done", - requestId, - correlationId, - status: "delivered", - totalSteps: 4, - }); - - return { requestId, correlationId, status: "delivered", totalSteps: 4 }; -} - -async function generateCorrelationId(requestId: string): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await delay(GENERATE_DELAY_MS); - const correlationId = generateId(); - await writer.write({ - type: "correlation_id_generated", - requestId, - correlationId, - }); - return correlationId; - } finally { - writer.releaseLock(); - } -} - -async function sendRequest( - requestId: string, - correlationId: string, - service: ServiceName, - _payload: string -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await delay(SEND_DELAY_MS); - await writer.write({ - type: "request_sent", - requestId, - correlationId, - service, - }); - } finally { - writer.releaseLock(); - } -} - -async function awaitResponse( - requestId: string, - correlationId: string, - _service: ServiceName, - _payload: string -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ - type: "awaiting_response", - requestId, - correlationId, - timeoutMs: RESPONSE_TIMEOUT_MS, - }); - - // Simulate waiting for async response with durable sleep - const responseLatency = - RESPONSE_MIN_MS + Math.random() * (RESPONSE_MAX_MS - RESPONSE_MIN_MS); - - await sleep(`${Math.round(responseLatency)}ms`); - - // Simulate: response arrives before timeout (demo always succeeds) - await writer.write({ - type: "response_received", - requestId, - correlationId, - responseService: _service, - latencyMs: Math.round(responseLatency), - }); - - return true; - } finally { - writer.releaseLock(); - } -} - -async function matchAndDeliver( - requestId: string, - correlationId: string, - service: ServiceName, - payload: string -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - // Match: verify correlation ID on response matches the original request - const requestHash = hashPayload(payload); - const responseHash = hashPayload(`response-for-${payload}`); - - await delay(MATCH_DELAY_MS); - await writer.write({ - type: "correlation_matched", - requestId, - correlationId, - requestPayloadHash: requestHash, - responsePayloadHash: responseHash, - }); - - // Deliver the matched response to the caller - await delay(DELIVER_DELAY_MS); - await writer.write({ - type: "delivery_complete", - requestId, - correlationId, - destination: `${service}-callback`, - }); - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: CorrelationEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each phase as a durable step +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durably waits for the async response -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams correlation events to the client diff --git a/docs/content/docs/cookbook/observability/event-sourcing.mdx b/docs/content/docs/cookbook/observability/event-sourcing.mdx index d3ddcec749..6b8d07d512 100644 --- a/docs/content/docs/cookbook/observability/event-sourcing.mdx +++ b/docs/content/docs/cookbook/observability/event-sourcing.mdx @@ -5,13 +5,11 @@ type: guide summary: Append domain events to an immutable log and replay them to detect bugs or migrate projections. --- -When you need a complete, auditable record of every state change -- and the ability to rebuild state by replaying history -- use event sourcing. Commands are validated against the current projection, domain events are appended to an immutable log, and the projection is rebuilt from scratch to verify consistency. +Use event sourcing when you need a complete, auditable record of every state change and the ability to rebuild state by replaying history. ## Pattern -The workflow accepts a sequence of commands. A step function validates each command against the current projection, converts valid commands to domain events, and appends them to the log. A second step replays the entire event log from scratch to rebuild the projection and verify it matches. - -### Simplified +Commands are validated against the current projection and converted to domain events appended to an immutable log. A second step replays the entire log from scratch to rebuild the projection and verify consistency. ```typescript lineNumbers type CommandType = "CreateOrder" | "AuthorizePayment" | "ReserveInventory" | "ShipOrder"; @@ -34,274 +32,7 @@ export async function eventSourcing( } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type CommandType = - | "CreateOrder" - | "AuthorizePayment" - | "ReserveInventory" - | "ShipOrder" - | "CancelOrder"; - -export type DomainEvent = - | { kind: "OrderCreated"; orderId: string; timestamp: number } - | { kind: "PaymentAuthorized"; orderId: string; amount: number; timestamp: number } - | { kind: "InventoryReserved"; orderId: string; sku: string; timestamp: number } - | { kind: "OrderShipped"; orderId: string; trackingId: string; timestamp: number } - | { kind: "OrderCancelled"; orderId: string; reason: string; timestamp: number }; - -export type Projection = { - orderId: string; - status: "none" | "created" | "authorized" | "reserved" | "shipped" | "cancelled"; - paymentAuthorized: boolean; - inventoryReserved: boolean; - trackingId: string | null; -}; - -export type ESEvent = - | { type: "command_endpoint_ready"; aggregateId: string } - | { type: "command_received"; command: CommandType; aggregateId: string } - | { type: "event_appended"; event: DomainEvent; index: number } - | { type: "projection_updated"; projection: Projection } - | { type: "invalid_command"; command: CommandType; reason: string } - | { type: "replay_started"; eventCount: number } - | { type: "replay_progress"; index: number; event: DomainEvent; projection: Projection } - | { type: "replay_completed"; projection: Projection } - | { type: "done"; eventLog: DomainEvent[]; projection: Projection }; - -type AggregateReport = { - status: "done"; - aggregateId: string; - eventLog: DomainEvent[]; - projection: Projection; -}; - -// Demo: simulated processing latency so the UI can show progress -const COMMAND_DELAY_MS = 300; -const REPLAY_STEP_DELAY_MS = 400; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function emptyProjection(orderId: string): Projection { - return { - orderId, - status: "none", - paymentAuthorized: false, - inventoryReserved: false, - trackingId: null, - }; -} - -export function applyDomainEvent(projection: Projection, event: DomainEvent): Projection { - switch (event.kind) { - case "OrderCreated": - return { ...projection, status: "created" }; - case "PaymentAuthorized": - return { ...projection, status: "authorized", paymentAuthorized: true }; - case "InventoryReserved": - return { ...projection, status: "reserved", inventoryReserved: true }; - case "OrderShipped": - return { ...projection, status: "shipped", trackingId: event.trackingId }; - case "OrderCancelled": - return { ...projection, status: "cancelled" }; - default: - return projection; - } -} - -export function validateCommand( - command: CommandType, - projection: Projection -): { valid: true } | { valid: false; reason: string } { - switch (command) { - case "CreateOrder": - if (projection.status !== "none") - return { valid: false, reason: "Order already exists" }; - return { valid: true }; - case "AuthorizePayment": - if (projection.status !== "created") - return { valid: false, reason: "Order must be created first" }; - return { valid: true }; - case "ReserveInventory": - if (projection.status !== "authorized") - return { valid: false, reason: "Payment must be authorized first" }; - return { valid: true }; - case "ShipOrder": - if (projection.status !== "reserved") - return { valid: false, reason: "Inventory must be reserved first" }; - return { valid: true }; - case "CancelOrder": - if (projection.status === "none") - return { valid: false, reason: "No order to cancel" }; - if (projection.status === "shipped") - return { valid: false, reason: "Cannot cancel a shipped order" }; - if (projection.status === "cancelled") - return { valid: false, reason: "Order already cancelled" }; - return { valid: true }; - default: - return { valid: false, reason: `Unknown command: ${command}` }; - } -} - -function commandToEvent(command: CommandType, orderId: string): DomainEvent { - const timestamp = Date.now(); - switch (command) { - case "CreateOrder": - return { kind: "OrderCreated", orderId, timestamp }; - case "AuthorizePayment": - return { kind: "PaymentAuthorized", orderId, amount: 99.99, timestamp }; - case "ReserveInventory": - return { kind: "InventoryReserved", orderId, sku: "SKU-001", timestamp }; - case "ShipOrder": - return { kind: "OrderShipped", orderId, trackingId: `TRK-${Date.now()}`, timestamp }; - case "CancelOrder": - return { kind: "OrderCancelled", orderId, reason: "Customer requested", timestamp }; - } -} - -// The workflow accepts a sequence of commands and processes them against -// an append-only event log with projection rebuild. -export async function eventSourcing( - aggregateId: string, - commands: CommandType[] -): Promise { - "use workflow"; - - const eventLog: DomainEvent[] = []; - let projection = emptyProjection(aggregateId); - - // Process each command against the current projection - const processResult = await processCommands( - aggregateId, - commands, - eventLog, - projection - ); - projection = processResult.projection; - - // Replay: rebuild projection from the event log to verify consistency - const replayResult = await replayEventLog(aggregateId, processResult.eventLog); - - return finalizeAggregate(aggregateId, processResult.eventLog, replayResult.projection); -} - -async function processCommands( - aggregateId: string, - commands: CommandType[], - eventLog: DomainEvent[], - projection: Projection -): Promise<{ eventLog: DomainEvent[]; projection: Projection }> { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "command_endpoint_ready", aggregateId }); - - for (const command of commands) { - await writer.write({ type: "command_received", command, aggregateId }); - await delay(COMMAND_DELAY_MS); - - const validation = validateCommand(command, projection); - - if (!validation.valid) { - await writer.write({ - type: "invalid_command", - command, - reason: validation.reason, - }); - continue; - } - - const domainEvent = commandToEvent(command, aggregateId); - eventLog.push(domainEvent); - - await writer.write({ - type: "event_appended", - event: domainEvent, - index: eventLog.length - 1, - }); - - projection = applyDomainEvent(projection, domainEvent); - - await writer.write({ - type: "projection_updated", - projection, - }); - } - - return { eventLog: [...eventLog], projection }; - } finally { - writer.releaseLock(); - } -} - -async function replayEventLog( - aggregateId: string, - eventLog: DomainEvent[] -): Promise<{ projection: Projection }> { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "replay_started", - eventCount: eventLog.length, - }); - - let projection = emptyProjection(aggregateId); - - for (let i = 0; i < eventLog.length; i++) { - await delay(REPLAY_STEP_DELAY_MS); - projection = applyDomainEvent(projection, eventLog[i]); - await writer.write({ - type: "replay_progress", - index: i, - event: eventLog[i], - projection, - }); - } - - await writer.write({ - type: "replay_completed", - projection, - }); - - return { projection }; - } finally { - writer.releaseLock(); - } -} - -async function finalizeAggregate( - aggregateId: string, - eventLog: DomainEvent[], - projection: Projection -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await delay(200); - await writer.write({ type: "done", eventLog, projection }); - return { status: "done", aggregateId, eventLog, projection }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks command processing, replay, and finalization as durable steps -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams command and replay progress to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — command processing and replay are durable steps diff --git a/docs/content/docs/cookbook/observability/message-history.mdx b/docs/content/docs/cookbook/observability/message-history.mdx index 8637811543..27df16877b 100644 --- a/docs/content/docs/cookbook/observability/message-history.mdx +++ b/docs/content/docs/cookbook/observability/message-history.mdx @@ -5,13 +5,11 @@ type: guide summary: Track a support ticket through normalize, classify, route, dispatch with full history at each step. --- -When you need a complete audit trail showing every transformation a message underwent and in what order, use message history. Each step appends an entry to a history array carried alongside the payload, so the final result contains the full processing record. +Use message history when you need a complete audit trail showing every transformation a message underwent and in what order. ## Pattern -The workflow passes an envelope containing both the payload and a history array through each step. Every step appends its action, timestamp, and outcome to the history before returning the updated envelope. On failure, the error is recorded in the history before the workflow completes with a failed status. - -### Simplified +The workflow passes an envelope containing the payload and a history array through each step. Every step appends its action and outcome to the history before returning the updated envelope. ```typescript lineNumbers type HistoryEntry = { step: string; action: string; timestamp: string; correlationId: string }; @@ -44,524 +42,7 @@ export async function messageHistory( } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need this unless it has its own streaming UI. -import { getWritable } from "workflow"; - -// ── Types ─────────────────────────────────────────────────────────────── - -export type Severity = "low" | "medium" | "high" | "critical"; -export type Route = "self-service" | "tier-1" | "tier-2" | "escalation"; - -export type HistoryEntry = { - step: string; - action: "started" | "succeeded" | "failed" | "decision"; - timestamp: string; - attempt: number; - correlationId: string; - detail?: Record; - error?: ApiError; -}; - -export type ApiError = { - code: string; - message: string; -}; - -export type TicketEnvelope = { - payload: { - correlationId: string; - subject: string; - body: string; - severity: Severity | null; - route: Route | null; - dispatchedTo: string | null; - }; - history: HistoryEntry[]; - status: "processing" | "completed" | "failed"; -}; - -export type HistoryEvent = - | { type: "step_started"; step: string; message: string } - | { type: "step_succeeded"; step: string; message: string } - | { - type: "step_failed"; - step: string; - message: string; - error: ApiError; - } - | { - type: "decision"; - step: string; - message: string; - detail: Record; - } - | { type: "done"; envelope: TicketEnvelope }; - -// ── Helpers ───────────────────────────────────────────────────────────── - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function appendHistory( - envelope: TicketEnvelope, - entry: Omit -): TicketEnvelope { - return { - ...envelope, - history: [ - ...envelope.history, - { ...entry, timestamp: new Date().toISOString() }, - ], - }; -} - -// ── Demo configuration ────────────────────────────────────────────────── - -const NORMALIZE_DELAY_MS = 400; -const CLASSIFY_DELAY_MS = 600; -const ROUTE_DELAY_MS = 300; -const DISPATCH_DELAY_MS = 700; -const FINALIZE_DELAY_MS = 200; - -const SEVERITY_KEYWORDS: Record = { - crash: "critical", - down: "critical", - outage: "critical", - urgent: "high", - broken: "high", - error: "medium", - bug: "medium", - slow: "low", - question: "low", -}; - -const ROUTE_MAP: Record = { - low: "self-service", - medium: "tier-1", - high: "tier-2", - critical: "escalation", -}; - -const DISPATCH_TARGETS: Record = { - "self-service": "Knowledge Base Bot", - "tier-1": "Support Agent Pool", - "tier-2": "Senior Engineer On-Call", - escalation: "Incident Commander", -}; - -// ── Workflow ───────────────────────────────────────────────────────────── - -export async function messageHistory( - correlationId: string, - subject: string, - body: string, - failAtStep: string | null = null -): Promise { - "use workflow"; - - let envelope: TicketEnvelope = { - payload: { - correlationId, - subject, - body, - severity: null, - route: null, - dispatchedTo: null, - }, - history: [], - status: "processing", - }; - - try { - envelope = await createEnvelope(envelope); - envelope = await normalizeTicket(envelope, failAtStep); - envelope = await classifySeverity(envelope, failAtStep); - envelope = await chooseRoute(envelope, failAtStep); - envelope = await dispatchTicket(envelope, failAtStep); - envelope = await finalizeSuccess(envelope); - } catch (err) { - const apiError: ApiError = - err instanceof Error - ? { code: "WORKFLOW_FAILED", message: err.message } - : { code: "UNKNOWN", message: "Unknown error" }; - envelope = await finalizeFailure(envelope, apiError); - } - - return envelope; -} - -// ── Steps ──────────────────────────────────────────────────────────────── - -async function createEnvelope( - envelope: TicketEnvelope -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "createEnvelope", - message: `Creating envelope for ticket ${envelope.payload.correlationId}`, - }); - - const result = appendHistory(envelope, { - step: "createEnvelope", - action: "succeeded", - attempt: 1, - correlationId: envelope.payload.correlationId, - detail: { correlationId: envelope.payload.correlationId }, - }); - - await writer.write({ - type: "step_succeeded", - step: "createEnvelope", - message: `Envelope created with correlationId ${envelope.payload.correlationId}`, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function normalizeTicket( - envelope: TicketEnvelope, - failAtStep: string | null -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "normalizeTicket", - message: "Normalizing ticket text", - }); - - await delay(NORMALIZE_DELAY_MS); - - if (failAtStep === "normalizeTicket") { - const apiError: ApiError = { - code: "SERVICE_UNAVAILABLE", - message: "Normalization service unavailable", - }; - const failed = appendHistory(envelope, { - step: "normalizeTicket", - action: "failed", - attempt: 1, - correlationId: envelope.payload.correlationId, - error: apiError, - }); - await writer.write({ - type: "step_failed", - step: "normalizeTicket", - message: "Normalization service unavailable", - error: apiError, - }); - throw new Error(apiError.message); - } - - const normalizedSubject = envelope.payload.subject.trim().toLowerCase(); - const normalizedBody = envelope.payload.body.trim().toLowerCase(); - - const result = appendHistory( - { - ...envelope, - payload: { - ...envelope.payload, - subject: normalizedSubject, - body: normalizedBody, - }, - }, - { - step: "normalizeTicket", - action: "succeeded", - attempt: 1, - correlationId: envelope.payload.correlationId, - detail: { normalizedSubject }, - } - ); - - await writer.write({ - type: "step_succeeded", - step: "normalizeTicket", - message: `Normalized: "${normalizedSubject}"`, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function classifySeverity( - envelope: TicketEnvelope, - failAtStep: string | null -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "classifySeverity", - message: "Classifying ticket severity", - }); - - await delay(CLASSIFY_DELAY_MS); - - if (failAtStep === "classifySeverity") { - const apiError: ApiError = { - code: "MODEL_TIMEOUT", - message: "Classification model timeout", - }; - appendHistory(envelope, { - step: "classifySeverity", - action: "failed", - attempt: 1, - correlationId: envelope.payload.correlationId, - error: apiError, - }); - await writer.write({ - type: "step_failed", - step: "classifySeverity", - message: "Classification model timeout", - error: apiError, - }); - throw new Error(apiError.message); - } - - const combined = `${envelope.payload.subject} ${envelope.payload.body}`; - let severity: Severity = "low"; - for (const [keyword, level] of Object.entries(SEVERITY_KEYWORDS)) { - if (combined.includes(keyword)) { - severity = level; - break; - } - } - - const result = appendHistory( - { ...envelope, payload: { ...envelope.payload, severity } }, - { - step: "classifySeverity", - action: "decision", - attempt: 1, - correlationId: envelope.payload.correlationId, - detail: { severity, matchedText: combined.slice(0, 80) }, - } - ); - - await writer.write({ - type: "decision", - step: "classifySeverity", - message: `Classified as ${severity}`, - detail: { severity }, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function chooseRoute( - envelope: TicketEnvelope, - failAtStep: string | null -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "chooseRoute", - message: "Choosing routing destination", - }); - - await delay(ROUTE_DELAY_MS); - - if (failAtStep === "chooseRoute") { - const apiError: ApiError = { - code: "ROUTING_UNAVAILABLE", - message: "Routing table unavailable", - }; - appendHistory(envelope, { - step: "chooseRoute", - action: "failed", - attempt: 1, - correlationId: envelope.payload.correlationId, - error: apiError, - }); - await writer.write({ - type: "step_failed", - step: "chooseRoute", - message: "Routing table unavailable", - error: apiError, - }); - throw new Error(apiError.message); - } - - const route = ROUTE_MAP[envelope.payload.severity ?? "low"]; - - const result = appendHistory( - { ...envelope, payload: { ...envelope.payload, route } }, - { - step: "chooseRoute", - action: "decision", - attempt: 1, - correlationId: envelope.payload.correlationId, - detail: { route, basedOnSeverity: envelope.payload.severity }, - } - ); - - await writer.write({ - type: "decision", - step: "chooseRoute", - message: `Routed to ${route}`, - detail: { route, basedOnSeverity: envelope.payload.severity }, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function dispatchTicket( - envelope: TicketEnvelope, - failAtStep: string | null -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - const target = - DISPATCH_TARGETS[envelope.payload.route ?? "self-service"]; - - await writer.write({ - type: "step_started", - step: "dispatchTicket", - message: `Dispatching to ${target}`, - }); - - await delay(DISPATCH_DELAY_MS); - - if (failAtStep === "dispatchTicket") { - const apiError: ApiError = { - code: "DISPATCH_FAILED", - message: `Failed to dispatch to ${target}`, - }; - appendHistory(envelope, { - step: "dispatchTicket", - action: "failed", - attempt: 1, - correlationId: envelope.payload.correlationId, - error: apiError, - }); - await writer.write({ - type: "step_failed", - step: "dispatchTicket", - message: `Failed to dispatch to ${target}`, - error: apiError, - }); - throw new Error(apiError.message); - } - - const result = appendHistory( - { ...envelope, payload: { ...envelope.payload, dispatchedTo: target } }, - { - step: "dispatchTicket", - action: "succeeded", - attempt: 1, - correlationId: envelope.payload.correlationId, - detail: { target, route: envelope.payload.route }, - } - ); - - await writer.write({ - type: "step_succeeded", - step: "dispatchTicket", - message: `Dispatched to ${target}`, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function finalizeSuccess( - envelope: TicketEnvelope -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await delay(FINALIZE_DELAY_MS); - - const result = appendHistory( - { ...envelope, status: "completed" as const }, - { - step: "finalizeSuccess", - action: "succeeded", - attempt: 1, - correlationId: envelope.payload.correlationId, - detail: { totalSteps: envelope.history.length + 1 }, - } - ); - - await writer.write({ - type: "done", - envelope: result, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function finalizeFailure( - envelope: TicketEnvelope, - apiError: ApiError -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await delay(FINALIZE_DELAY_MS); - - const result = appendHistory( - { ...envelope, status: "failed" as const }, - { - step: "finalizeFailure", - action: "failed", - attempt: 1, - correlationId: envelope.payload.correlationId, - error: apiError, - } - ); - - await writer.write({ - type: "done", - envelope: result, - }); - - return result; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each processing stage as a durable step -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams step-level history events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each processing stage is a durable step diff --git a/docs/content/docs/cookbook/observability/namespaced-streams.mdx b/docs/content/docs/cookbook/observability/namespaced-streams.mdx index deefc0f5fd..9a27855257 100644 --- a/docs/content/docs/cookbook/observability/namespaced-streams.mdx +++ b/docs/content/docs/cookbook/observability/namespaced-streams.mdx @@ -5,121 +5,38 @@ type: guide summary: Emit workflow events to separate UI and ops-telemetry streams simultaneously. --- -When different consumers need different views of workflow progress -- a UI showing content drafts while an ops dashboard tracks token usage and latency -- use namespaced streams. Each namespace is an independent writable stream that clients can subscribe to separately. +Use namespaced streams when different consumers need different views of workflow progress — a UI showing content drafts while an ops dashboard tracks latency. ## Pattern -The workflow opens multiple `getWritable()` streams with different `namespace` values. Each step writes to the appropriate stream based on the event type. Clients subscribe only to the namespaces they care about. - -### Simplified +The workflow delegates all work to step functions, then publishes results to namespace-specific helper steps. Each namespace is independent, so clients subscribe only to the events they care about. ```typescript lineNumbers -import { getWritable, getWorkflowMetadata } from "workflow"; - -type DraftEvent = { type: "chunk"; text: string; idx: number }; -type TelemetryEvent = { type: "start" | "tokens" | "done"; [key: string]: unknown }; - declare function buildOutline(topic: string): Promise; // @setup declare function writeSections(topic: string, outline: string): Promise; // @setup +declare function publishDraftChunk(runId: string, text: string): Promise; // @setup +declare function recordTelemetry(runId: string, event: string, value?: number): Promise; // @setup -export async function generatePost(topic: string) { - "use workflow"; - - const { workflowRunId } = getWorkflowMetadata(); - const draft = getWritable({ namespace: "draft" }).getWriter(); - const telemetry = getWritable({ namespace: "telemetry" }).getWriter(); - - try { - await telemetry.write({ type: "start", runId: workflowRunId }); - - const outline = await buildOutline(topic); - await draft.write({ type: "chunk", idx: 0, text: outline }); - - const sections = await writeSections(topic, outline); - for (let i = 0; i < sections.length; i++) { - await draft.write({ type: "chunk", idx: i + 1, text: sections[i] }); - } - - await telemetry.write({ type: "done", totalTokens: 945 }); - return { status: "completed", workflowRunId, sectionCount: sections.length + 1 }; - } finally { - draft.releaseLock(); - telemetry.releaseLock(); - } -} -``` - -### Full Implementation - -```typescript lineNumbers -import { getWritable, getWorkflowMetadata } from "workflow"; - -export type DraftEvent = { type: "chunk"; text: string; idx: number }; - -export type TelemetryEvent = - | { type: "start"; runId: string; name: string } - | { type: "tokens"; input: number; output: number } - | { type: "done"; totalMs: number; totalTokens: number }; - -export type GenerateResult = { - status: "completed"; - workflowRunId: string; - sectionCount: number; -}; - -export async function generatePost(topic: string): Promise { +export async function namespacedStreams(runId: string, topic: string) { "use workflow"; - const { workflowRunId } = getWorkflowMetadata(); - - const draft = getWritable({ namespace: "draft" }).getWriter(); - const telemetry = getWritable({ namespace: "telemetry" }).getWriter(); - - const startedAt = Date.now(); + await recordTelemetry(runId, "start"); - try { - await telemetry.write({ type: "start", runId: workflowRunId, name: "generatePost" }); + const outline = await buildOutline(topic); + await publishDraftChunk(runId, outline); - const outline = await buildOutline(topic); - await draft.write({ type: "chunk", idx: 0, text: outline }); - await telemetry.write({ type: "tokens", input: 45, output: 120 }); - - const sections = await writeSections(topic, outline); - for (let i = 0; i < sections.length; i++) { - await draft.write({ type: "chunk", idx: i + 1, text: sections[i] }); - await telemetry.write({ type: "tokens", input: 80 + i * 10, output: 150 + i * 30 }); - } - - const totalMs = Date.now() - startedAt; - await telemetry.write({ type: "done", totalMs, totalTokens: 945 }); - - return { status: "completed", workflowRunId, sectionCount: sections.length + 1 }; - } finally { - draft.releaseLock(); - telemetry.releaseLock(); - } -} - -async function buildOutline(topic: string): Promise { - "use step"; - return `# ${topic}\n\n## Outline\n1. Introduction\n2. Key Concepts\n3. Implementation\n4. Best Practices`; -} + const sections = await writeSections(topic, outline); + await Promise.all( + sections.map((section) => publishDraftChunk(runId, section)), + ); -async function writeSections(topic: string, outline: string): Promise { - "use step"; - void outline; - return [ - `## Introduction\nAn overview of ${topic} and why it matters for modern applications...`, - `## Key Concepts\nThe fundamental building blocks: durable execution, deterministic replay, and step boundaries...`, - `## Implementation\nHere's how to build it step by step with proper error handling and idempotency...`, - `## Best Practices\nTesting strategies, monitoring, and production deployment patterns...`, - ]; + await recordTelemetry(runId, "done", sections.length + 1); + return { runId, sectionCount: sections.length + 1 }; } ``` ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks content generation as durable steps -- [`getWritable()`](/docs/api-reference/step/get-writable) — creates namespaced streams with `{ namespace: "..." }` +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`getWritable()`](/docs/api-reference/workflow/get-writable) — creates namespaced streams with `{ namespace: "..." }` - [`getWorkflowMetadata()`](/docs/api-reference/workflow/get-workflow-metadata) — retrieves the workflow run ID for telemetry correlation diff --git a/docs/content/docs/cookbook/observability/wire-tap.mdx b/docs/content/docs/cookbook/observability/wire-tap.mdx index 6ec8081874..1101895b94 100644 --- a/docs/content/docs/cookbook/observability/wire-tap.mdx +++ b/docs/content/docs/cookbook/observability/wire-tap.mdx @@ -5,13 +5,11 @@ type: guide summary: Mirror production order events to a debug logger without touching the main processing path. --- -When you need to inspect messages flowing through a pipeline for debugging or auditing without altering the main processing path, use a wire tap. Each step captures a snapshot of the message state and appends it to an audit trail alongside normal processing. +Use a wire tap when you need to inspect messages flowing through a pipeline for debugging or auditing without altering the main processing path. ## Pattern -Each step function processes the message normally, then copies a snapshot to an audit trail array. The tap is non-invasive -- it never mutates the message it captures, only reads it. The audit trail accumulates across all steps and is returned with the final result. - -### Simplified +Each step processes the message normally, then copies a snapshot to an audit trail array. The tap is non-invasive — it never mutates the message, only reads it. ```typescript lineNumbers type OrderPayload = { orderId: string; item: string; quantity: number; [key: string]: unknown }; @@ -27,7 +25,6 @@ export async function wireTap(orderId: string, item: string, quantity: number) { const auditTrail: unknown[] = []; let message: OrderPayload = { orderId, item, quantity }; - // Each step processes AND taps a snapshot to the audit trail message = await validateOrder(message, auditTrail); message = await enrichOrder(message, auditTrail); message = await transformOrder(message, auditTrail); @@ -37,223 +34,7 @@ export async function wireTap(orderId: string, item: string, quantity: number) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need this unless it has its own streaming UI. -import { getWritable } from "workflow"; - -// Local FatalError — prevents the SDK's automatic retry for permanent failures. -// The workflow package does not export this class, so we define it here. -class FatalError extends Error { - constructor(message: string) { - super(message); - this.name = "FatalError"; - } -} - -export type StageName = "validate" | "enrich" | "transform" | "deliver"; - -export type WireTapEvent = - | { type: "stage_start"; stage: string } - | { type: "tap_captured"; stage: string; snapshot: Record } - | { type: "stage_done"; stage: string; durationMs: number } - | { type: "done"; auditCount: number; totalMs: number }; - -type OrderPayload = { - orderId: string; - item: string; - quantity: number; - validated?: boolean; - price?: number; - total?: number; - format?: string; - deliveredTo?: string; -}; - -type ProcessingResult = { - orderId: string; - status: "completed"; - auditTrail: Array<{ stage: string; snapshot: Record }>; - totalMs: number; -}; - -// Demo: simulate network latency so the UI can show each stage. -const STAGE_DELAY_MS: Record = { - validate: 600, - enrich: 800, - transform: 500, - deliver: 700, -}; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -// Wire Tap pattern: each processing stage is tapped — a copy of the message -// is sent to an audit channel without altering the main flow. Think of it -// as a network packet sniffer for your workflow messages. -export async function wireTap( - orderId: string, - item: string, - quantity: number -): Promise { - "use workflow"; - - const startMs = Date.now(); - const auditTrail: Array<{ stage: string; snapshot: Record }> = []; - - let message: OrderPayload = { orderId, item, quantity }; - - message = await validateOrder(message, auditTrail); - message = await enrichOrder(message, auditTrail); - message = await transformOrder(message, auditTrail); - message = await deliverOrder(message, auditTrail); - - await emitDone(auditTrail.length, startMs); - - return { - orderId, - status: "completed", - auditTrail, - totalMs: Date.now() - startMs, - }; -} - -// Each step processes the message AND taps a snapshot to the audit trail. -// The tap is non-invasive — it copies, never mutates the message it captures. - -async function validateOrder( - message: OrderPayload, - auditTrail: Array<{ stage: string; snapshot: Record }> -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "stage_start", stage: "validate" }); - await delay(STAGE_DELAY_MS.validate); - - if (message.quantity <= 0) { - throw new FatalError("Invalid quantity: must be greater than 0"); - } - - const result: OrderPayload = { ...message, validated: true }; - - // Wire tap: capture a snapshot without altering the flow - const snapshot = { ...result } as unknown as Record; - auditTrail.push({ stage: "validate", snapshot }); - await writer.write({ type: "tap_captured", stage: "validate", snapshot }); - await writer.write({ type: "stage_done", stage: "validate", durationMs: STAGE_DELAY_MS.validate }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function enrichOrder( - message: OrderPayload, - auditTrail: Array<{ stage: string; snapshot: Record }> -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "stage_start", stage: "enrich" }); - await delay(STAGE_DELAY_MS.enrich); - - // Simulate price lookup - const unitPrice = 29.99; - const result: OrderPayload = { - ...message, - price: unitPrice, - total: unitPrice * message.quantity, - }; - - // Wire tap: capture enriched state - const snapshot = { ...result } as unknown as Record; - auditTrail.push({ stage: "enrich", snapshot }); - await writer.write({ type: "tap_captured", stage: "enrich", snapshot }); - await writer.write({ type: "stage_done", stage: "enrich", durationMs: STAGE_DELAY_MS.enrich }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function transformOrder( - message: OrderPayload, - auditTrail: Array<{ stage: string; snapshot: Record }> -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "stage_start", stage: "transform" }); - await delay(STAGE_DELAY_MS.transform); - - // Simulate format transformation - const result: OrderPayload = { ...message, format: "canonical-v2" }; - - // Wire tap: capture transformed state - const snapshot = { ...result } as unknown as Record; - auditTrail.push({ stage: "transform", snapshot }); - await writer.write({ type: "tap_captured", stage: "transform", snapshot }); - await writer.write({ type: "stage_done", stage: "transform", durationMs: STAGE_DELAY_MS.transform }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function deliverOrder( - message: OrderPayload, - auditTrail: Array<{ stage: string; snapshot: Record }> -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "stage_start", stage: "deliver" }); - await delay(STAGE_DELAY_MS.deliver); - - // Simulate delivery - const result: OrderPayload = { ...message, deliveredTo: "warehouse-us-east-1" }; - - // Wire tap: capture final delivery state - const snapshot = { ...result } as unknown as Record; - auditTrail.push({ stage: "deliver", snapshot }); - await writer.write({ type: "tap_captured", stage: "deliver", snapshot }); - await writer.write({ type: "stage_done", stage: "deliver", durationMs: STAGE_DELAY_MS.deliver }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function emitDone( - auditCount: number, - startMs: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", auditCount, totalMs: Date.now() - startMs }); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each processing stage as a durable step -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — stops retries on permanent validation failures -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams tap snapshots to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each processing stage is a durable step diff --git a/docs/content/docs/cookbook/payments/choreography.mdx b/docs/content/docs/cookbook/payments/choreography.mdx index 8eb5c234d1..274356b687 100644 --- a/docs/content/docs/cookbook/payments/choreography.mdx +++ b/docs/content/docs/cookbook/payments/choreography.mdx @@ -5,16 +5,14 @@ type: guide summary: Order flow where inventory, payment, and shipping react to events with automatic compensation on failure. --- -Use choreography when multiple services should react to events independently. Each participant handles its own logic and emits events for the next participant, with compensation if any step fails. +Use choreography when multiple services should react to events independently, with compensation if any participant fails. ## Pattern -The workflow acts as a durable event bus. Each participant step reacts to the previous event, performs its work, and emits a new event. If a participant fails, upstream participants run compensation steps in reverse to restore consistency. Durable `sleep()` simulates real-world async handoff latency between services. - -### Simplified +Each participant step performs its work and passes control to the next. If a participant fails, upstream participants run compensation steps in reverse. Durable `sleep()` models real-world async handoff latency. ```typescript lineNumbers -import { FatalError, sleep } from "workflow"; +import { sleep } from "workflow"; declare function placeOrder(orderId: string, items: string[]): Promise; // @setup declare function reserveInventory(items: string[]): Promise; // @setup @@ -27,20 +25,16 @@ declare function compensateOrder(orderId: string): Promise; // @setup export async function choreography(orderId: string, items: string[]) { "use workflow"; - // Participant 1: Order Service places the order await placeOrder(orderId, items); - // Participant 2: Inventory Service reacts to order_placed const inventoryOk = await reserveInventory(items); if (!inventoryOk) { await compensateOrder(orderId); return { outcome: "compensated", failedService: "inventory" }; } - // Durable sleep: simulate async handoff between participants await sleep("3s"); - // Participant 3: Payment Service reacts to inventory_reserved const paymentOk = await chargePayment(orderId); if (!paymentOk) { await compensateInventory(items); @@ -48,7 +42,6 @@ export async function choreography(orderId: string, items: string[]) { return { outcome: "compensated", failedService: "payment" }; } - // Participant 4: Shipping Service reacts to payment_processed const shippingOk = await shipOrder(orderId, items); if (!shippingOk) { await compensatePayment(orderId); @@ -61,572 +54,7 @@ export async function choreography(orderId: string, items: string[]) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable, sleep } from "workflow"; -import { FatalError } from "workflow"; - -export type ParticipantId = - | "order-service" - | "inventory-service" - | "payment-service" - | "shipping-service"; - -export type ChoreographyEvent = - | { type: "event_emitted"; participant: ParticipantId; event: string; correlationId: string; message: string } - | { type: "event_received"; participant: ParticipantId; event: string; correlationId: string; message: string } - | { type: "step_started"; participant: ParticipantId; message: string } - | { type: "step_completed"; participant: ParticipantId; message: string } - | { type: "step_retrying"; participant: ParticipantId; attempt: number } - | { type: "compensation_started"; participant: ParticipantId; reason: string; correlationId: string } - | { type: "compensation_completed"; participant: ParticipantId; message: string; correlationId: string } - | { type: "sleeping"; participant: ParticipantId; duration: string; reason: string } - | { type: "done"; correlationId: string; outcome: "fulfilled" | "compensated"; summary: ChoreographySummary }; - -type ChoreographySummary = { - correlationId: string; - outcome: "fulfilled" | "compensated"; - participantsInvolved: ParticipantId[]; - eventsEmitted: number; - trackingId: string | null; - failedService: ParticipantId | null; -}; - -type OrderItem = { name: string; qty: number }; - -// Demo: simulate real-world latency so the UI can show progress. -const STEP_DELAY_MS: Record = { - orderService: 400, - inventoryService: 700, - paymentService: 800, - shippingService: 600, - compensation: 500, -}; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function choreography( - orderId: string, - items: OrderItem[], - failService: string | null -): Promise { - "use workflow"; - - const correlationId = `COR-${orderId}`; - let eventsEmitted = 0; - const participantsInvolved: ParticipantId[] = []; - - // Participant 1: Order Service places the order and emits "order_placed" - const orderPlaced = await orderServicePlaceOrder(correlationId, orderId, items); - eventsEmitted += orderPlaced.events; - participantsInvolved.push("order-service"); - - // Participant 2: Inventory Service reacts to "order_placed", reserves stock - const inventoryResult = await inventoryServiceReserve( - correlationId, - items, - failService === "inventory" - ); - eventsEmitted += inventoryResult.events; - participantsInvolved.push("inventory-service"); - - if (!inventoryResult.success) { - // Compensation: Order Service rolls back - const comp = await orderServiceCompensate(correlationId, orderId, "inventory_failed"); - eventsEmitted += comp.events; - - return finalizeOutcome(correlationId, "compensated", participantsInvolved, eventsEmitted, null, "inventory-service"); - } - - // Durable sleep: simulate async handoff latency between participants - await emitSleeping("inventory-service", "3s", "Awaiting payment-service to react to inventory_reserved event"); - await sleep("3s"); - - // Participant 3: Payment Service reacts to "inventory_reserved", charges customer - const paymentResult = await paymentServiceCharge( - correlationId, - orderId, - failService === "payment" - ); - eventsEmitted += paymentResult.events; - participantsInvolved.push("payment-service"); - - if (!paymentResult.success) { - // Compensation: Inventory Service releases stock, then Order Service rolls back - const invComp = await inventoryServiceCompensate(correlationId, items, "payment_failed"); - eventsEmitted += invComp.events; - const orderComp = await orderServiceCompensate(correlationId, orderId, "payment_failed"); - eventsEmitted += orderComp.events; - - return finalizeOutcome(correlationId, "compensated", participantsInvolved, eventsEmitted, null, "payment-service"); - } - - // Participant 4: Shipping Service reacts to "payment_processed", ships order - const shippingResult = await shippingServiceShip( - correlationId, - orderId, - items, - failService === "shipping" - ); - eventsEmitted += shippingResult.events; - participantsInvolved.push("shipping-service"); - - if (!shippingResult.success) { - // Compensation: Payment refund, Inventory release, Order rollback - const payComp = await paymentServiceCompensate(correlationId, orderId, "shipping_failed"); - eventsEmitted += payComp.events; - const invComp = await inventoryServiceCompensate(correlationId, items, "shipping_failed"); - eventsEmitted += invComp.events; - const orderComp = await orderServiceCompensate(correlationId, orderId, "shipping_failed"); - eventsEmitted += orderComp.events; - - return finalizeOutcome(correlationId, "compensated", participantsInvolved, eventsEmitted, null, "shipping-service"); - } - - // All participants succeeded — order fulfilled - return finalizeOutcome( - correlationId, - "fulfilled", - participantsInvolved, - eventsEmitted, - `TRK-${orderId}-${Date.now().toString(36)}`, - null - ); -} - -// --------------------------------------------------------------------------- -// Participant: Order Service -// --------------------------------------------------------------------------- - -async function orderServicePlaceOrder( - correlationId: string, - orderId: string, - items: OrderItem[] -): Promise<{ events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", participant: "order-service", attempt }); - } - - await writer.write({ - type: "step_started", - participant: "order-service", - message: `Placing order ${orderId} with ${items.length} item(s)`, - }); - - await delay(STEP_DELAY_MS.orderService); - - await writer.write({ - type: "event_emitted", - participant: "order-service", - event: "order_placed", - correlationId, - message: `Order ${orderId} placed — notifying downstream participants`, - }); - - await writer.write({ - type: "step_completed", - participant: "order-service", - message: `Order ${orderId} accepted and order_placed event emitted`, - }); - - return { events: 1 }; - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Participant: Inventory Service -// --------------------------------------------------------------------------- - -async function inventoryServiceReserve( - correlationId: string, - items: OrderItem[], - simulateFail: boolean -): Promise<{ success: boolean; events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", participant: "inventory-service", attempt }); - } - - await writer.write({ - type: "event_received", - participant: "inventory-service", - event: "order_placed", - correlationId, - message: "Received order_placed — checking stock levels", - }); - - await writer.write({ - type: "step_started", - participant: "inventory-service", - message: `Reserving ${items.length} item(s) in warehouse`, - }); - - await delay(STEP_DELAY_MS.inventoryService); - - if (simulateFail) { - await writer.write({ - type: "event_emitted", - participant: "inventory-service", - event: "inventory_failed", - correlationId, - message: "Insufficient stock — emitting inventory_failed event", - }); - - throw new FatalError("Inventory reservation failed: insufficient stock"); - } - - await writer.write({ - type: "event_emitted", - participant: "inventory-service", - event: "inventory_reserved", - correlationId, - message: "Stock reserved — emitting inventory_reserved event", - }); - - await writer.write({ - type: "step_completed", - participant: "inventory-service", - message: `${items.length} item(s) reserved successfully`, - }); - - return { success: true, events: 2 }; - } catch (err) { - if (err instanceof FatalError) { - return { success: false, events: 2 }; - } - throw err; - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Participant: Payment Service -// --------------------------------------------------------------------------- - -async function paymentServiceCharge( - correlationId: string, - orderId: string, - simulateFail: boolean -): Promise<{ success: boolean; events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", participant: "payment-service", attempt }); - } - - await writer.write({ - type: "event_received", - participant: "payment-service", - event: "inventory_reserved", - correlationId, - message: "Received inventory_reserved — processing payment", - }); - - await writer.write({ - type: "step_started", - participant: "payment-service", - message: `Charging payment for order ${orderId}`, - }); - - await delay(STEP_DELAY_MS.paymentService); - - if (simulateFail) { - await writer.write({ - type: "event_emitted", - participant: "payment-service", - event: "payment_failed", - correlationId, - message: "Payment declined — emitting payment_failed event", - }); - - throw new FatalError("Payment processing failed: card declined"); - } - - await writer.write({ - type: "event_emitted", - participant: "payment-service", - event: "payment_processed", - correlationId, - message: "Payment charged — emitting payment_processed event", - }); - - await writer.write({ - type: "step_completed", - participant: "payment-service", - message: `Payment for order ${orderId} processed successfully`, - }); - - return { success: true, events: 2 }; - } catch (err) { - if (err instanceof FatalError) { - return { success: false, events: 2 }; - } - throw err; - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Participant: Shipping Service -// --------------------------------------------------------------------------- - -async function shippingServiceShip( - correlationId: string, - orderId: string, - items: OrderItem[], - simulateFail: boolean -): Promise<{ success: boolean; events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", participant: "shipping-service", attempt }); - } - - await writer.write({ - type: "event_received", - participant: "shipping-service", - event: "payment_processed", - correlationId, - message: "Received payment_processed — preparing shipment", - }); - - await writer.write({ - type: "step_started", - participant: "shipping-service", - message: `Shipping ${items.length} item(s) for order ${orderId}`, - }); - - await delay(STEP_DELAY_MS.shippingService); - - if (simulateFail) { - await writer.write({ - type: "event_emitted", - participant: "shipping-service", - event: "shipping_failed", - correlationId, - message: "Shipment failed — emitting shipping_failed event", - }); - - throw new FatalError("Shipping failed: carrier unavailable"); - } - - await writer.write({ - type: "event_emitted", - participant: "shipping-service", - event: "order_shipped", - correlationId, - message: `Order ${orderId} shipped — emitting order_shipped event`, - }); - - await writer.write({ - type: "step_completed", - participant: "shipping-service", - message: `Order ${orderId} shipped with tracking`, - }); - - return { success: true, events: 2 }; - } catch (err) { - if (err instanceof FatalError) { - return { success: false, events: 2 }; - } - throw err; - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Compensation steps -// --------------------------------------------------------------------------- - -async function orderServiceCompensate( - correlationId: string, - orderId: string, - reason: string -): Promise<{ events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "compensation_started", - participant: "order-service", - reason, - correlationId, - }); - - await delay(STEP_DELAY_MS.compensation); - - await writer.write({ - type: "compensation_completed", - participant: "order-service", - message: `Order ${orderId} rolled back`, - correlationId, - }); - - return { events: 1 }; - } finally { - writer.releaseLock(); - } -} - -async function inventoryServiceCompensate( - correlationId: string, - items: OrderItem[], - reason: string -): Promise<{ events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "compensation_started", - participant: "inventory-service", - reason, - correlationId, - }); - - await delay(STEP_DELAY_MS.compensation); - - await writer.write({ - type: "compensation_completed", - participant: "inventory-service", - message: `${items.length} item(s) released back to stock`, - correlationId, - }); - - return { events: 1 }; - } finally { - writer.releaseLock(); - } -} - -async function paymentServiceCompensate( - correlationId: string, - orderId: string, - reason: string -): Promise<{ events: number }> { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "compensation_started", - participant: "payment-service", - reason, - correlationId, - }); - - await delay(STEP_DELAY_MS.compensation); - - await writer.write({ - type: "compensation_completed", - participant: "payment-service", - message: `Payment for order ${orderId} refunded`, - correlationId, - }); - - return { events: 1 }; - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Finalize -// --------------------------------------------------------------------------- - -async function finalizeOutcome( - correlationId: string, - outcome: "fulfilled" | "compensated", - participantsInvolved: ParticipantId[], - eventsEmitted: number, - trackingId: string | null, - failedService: ParticipantId | null -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - const summary: ChoreographySummary = { - correlationId, - outcome, - participantsInvolved, - eventsEmitted, - trackingId, - failedService, - }; - - await writer.write({ - type: "done", - correlationId, - outcome, - summary, - }); - - return summary; - } finally { - writer.releaseLock(); - } -} - -// --------------------------------------------------------------------------- -// Helper: emit sleeping event -// --------------------------------------------------------------------------- - -async function emitSleeping( - participant: ParticipantId, - duration: string, - reason: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "sleeping", participant, duration, reason }); - } finally { - writer.releaseLock(); - } -} - -emitSleeping.maxRetries = 0; -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — non-retryable error that triggers compensation -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx b/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx index 9e8345a681..18b682d507 100644 --- a/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx +++ b/docs/content/docs/cookbook/payments/guaranteed-delivery.mdx @@ -5,13 +5,11 @@ type: guide summary: Ensure a payment confirmation is delivered even if the server restarts mid-send. --- -Use guaranteed delivery when messages must reach their destination even if the process crashes mid-send. The workflow persists each message before attempting delivery and automatically retries on failure. +Use guaranteed delivery when messages must reach their destination even if the process crashes mid-send. ## Pattern -Each message is persisted as a durable step, then delivery is attempted with built-in retry. The workflow runtime automatically retries failed steps (up to `maxRetries`), so if the server restarts mid-delivery the message is replayed from the event log and retried until it succeeds or exhausts attempts. - -### Simplified +Each message is delivered inside a durable step with built-in retry. If the server restarts mid-delivery, the step is replayed from the event log and retried automatically until it succeeds or exhausts attempts. ```typescript lineNumbers declare function deliverMessage(messageId: string): Promise<{ status: "delivered" | "failed"; attempts: number }>; // @setup @@ -22,7 +20,6 @@ export async function guaranteedDelivery(messages: string[]) { const results = []; for (const messageId of messages) { - // Each step is persisted — delivery resumes after any crash const result = await deliverMessage(messageId); results.push(result); } @@ -32,154 +29,7 @@ export async function guaranteedDelivery(messages: string[]) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable } from "workflow"; - -export type MessageId = string; - -export type GDEvent = - | { type: "persist"; messageId: string } - | { type: "send"; messageId: string; attempt: number } - | { type: "ack"; messageId: string; attempt: number } - | { type: "retry"; messageId: string; attempt: number; error: string } - | { type: "confirm"; messageId: string; attempt: number } - | { type: "fail"; messageId: string; error: string; attempts: number } - | { type: "done"; summary: { delivered: number; failed: number } }; - -type MessageResult = { - messageId: string; - status: "delivered" | "failed"; - attempts: number; - error?: string; -}; - -type DeliveryReport = { - status: "done"; - results: MessageResult[]; - summary: { - delivered: number; - failed: number; - }; -}; - -// Demo: per-step latency so the UI can show progress -const PERSIST_DELAY_MS = 400; -const SEND_DELAY_MS = 600; -const CONFIRM_DELAY_MS = 300; -const MAX_ATTEMPTS = 3; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function guaranteedDelivery( - messages: string[], - failMessages: string[] = [] -): Promise { - "use workflow"; - - const results: MessageResult[] = []; - - for (const messageId of messages) { - const shouldFail = failMessages.includes(messageId); - const result = await deliverMessage(messageId, shouldFail); - results.push(result); - } - - return finalizeDelivery(results); -} - -async function deliverMessage( - messageId: string, - shouldFail: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - await writer.write({ type: "persist", messageId }); - await delay(PERSIST_DELAY_MS); - - await writer.write({ type: "send", messageId, attempt }); - await delay(SEND_DELAY_MS); - - if (shouldFail) { - throw new Error(`Delivery failed: recipient unreachable for ${messageId}`); - } - - await writer.write({ type: "ack", messageId, attempt }); - await delay(CONFIRM_DELAY_MS); - - await writer.write({ type: "confirm", messageId, attempt }); - return { messageId, status: "delivered", attempts: attempt }; - } catch (error: unknown) { - const message = - error instanceof Error ? error.message : "Unknown delivery error"; - - if (attempt >= MAX_ATTEMPTS) { - await writer.write({ - type: "fail", - messageId, - error: message, - attempts: attempt, - }); - return { - messageId, - status: "failed", - attempts: attempt, - error: message, - }; - } - - await writer.write({ - type: "retry", - messageId, - attempt, - error: message, - }); - - throw error instanceof Error ? error : new Error(message); - } finally { - writer.releaseLock(); - } -} - -async function finalizeDelivery( - results: MessageResult[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await delay(CONFIRM_DELAY_MS); - - const delivered = results.filter((r) => r.status === "delivered").length; - const failed = results.length - delivered; - - const report: DeliveryReport = { - status: "done", - results, - summary: { delivered, failed }, - }; - - await writer.write({ type: "done", summary: report.summary }); - return report; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with automatic retry -- [`getStepMetadata()`](/docs/api-reference/workflow/get-step-metadata) — access the current attempt number for retry logic -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams delivery progress to the client +- [`"use step"`](/docs/api-reference/workflow/use-step) — step functions retry automatically on failure diff --git a/docs/content/docs/cookbook/payments/idempotent-receiver.mdx b/docs/content/docs/cookbook/payments/idempotent-receiver.mdx index 095dc620d4..b881bcd171 100644 --- a/docs/content/docs/cookbook/payments/idempotent-receiver.mdx +++ b/docs/content/docs/cookbook/payments/idempotent-receiver.mdx @@ -5,13 +5,11 @@ type: guide summary: Detect duplicate payment webhooks with an idempotency key and return the cached result. --- -Use the idempotent receiver pattern when your workflow may receive the same request more than once (e.g., webhook retries). The workflow checks an idempotency key before processing; if the key was already seen, it returns the cached result instead of re-executing. +Use the idempotent receiver pattern when your workflow may receive the same request more than once (e.g., webhook retries). ## Pattern -The workflow first checks whether the idempotency key has been processed before. If a cached result exists, it short-circuits and returns it. Otherwise, it processes the payment and stores the result keyed by the idempotency key. In production, the workflow's own `runId` can serve as the idempotency key since each run is unique and deterministically replayed. - -### Simplified +The workflow checks an idempotency key before processing. If a cached result exists, it short-circuits. Otherwise it processes the payment and stores the result. The workflow's own `runId` can serve as a natural idempotency key. ```typescript lineNumbers declare function checkIdempotencyKey(key: string): Promise<{ transactionId: string; amount: number } | null>; // @setup @@ -20,172 +18,23 @@ declare function processPayment(key: string, amount: number, currency: string): export async function idempotentReceiver( idempotencyKey: string, amount: number, - currency: string + currency: string, ) { "use workflow"; - // Check if this key was already processed const cached = await checkIdempotencyKey(idempotencyKey); if (cached) { return { idempotencyKey, deduplicated: true, result: cached }; } - // First time — process the payment const result = await processPayment(idempotencyKey, amount, currency); return { idempotencyKey, deduplicated: false, result }; } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need this unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type IdempotentEvent = - | { type: "checking_key"; idempotencyKey: string } - | { type: "duplicate_detected"; idempotencyKey: string; cachedResult: PaymentResult } - | { type: "processing_payment"; idempotencyKey: string; amount: number } - | { type: "payment_processed"; idempotencyKey: string; result: PaymentResult } - | { type: "done"; status: "completed" | "deduplicated"; idempotencyKey: string }; - -export type PaymentResult = { - transactionId: string; - amount: number; - currency: string; - status: "succeeded"; - processedAt: string; -}; - -type PaymentInput = { - idempotencyKey: string; - amount: number; - currency: string; - description: string; -}; - -type IdempotentReport = { - idempotencyKey: string; - deduplicated: boolean; - result: PaymentResult; -}; - -// Demo: in-memory store simulates durable state for idempotency checks. -// In production, this would be the workflow's built-in durable storage -// (the workflow itself is keyed by idempotencyKey via the runId). -const processedKeys = new Map(); - -const CHECK_DELAY_MS = 400; -const PROCESS_DELAY_MS = 800; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function idempotentReceiver( - idempotencyKey: string, - amount: number, - currency: string, - description: string -): Promise { - "use workflow"; - - const cached = await checkIdempotencyKey(idempotencyKey); - - if (cached) { - await emitDuplicateDetected(idempotencyKey, cached); - return { idempotencyKey, deduplicated: true, result: cached }; - } - - const result = await processPayment(idempotencyKey, amount, currency, description); - - await emitCompletion(idempotencyKey); - - return { idempotencyKey, deduplicated: false, result }; -} - -async function checkIdempotencyKey( - idempotencyKey: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "checking_key", idempotencyKey }); - await delay(CHECK_DELAY_MS); // Demo: simulate lookup latency - - const cached = processedKeys.get(idempotencyKey) ?? null; - return cached; - } finally { - writer.releaseLock(); - } -} - -async function emitDuplicateDetected( - idempotencyKey: string, - cachedResult: PaymentResult -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "duplicate_detected", idempotencyKey, cachedResult }); - await delay(200); // Demo: brief pause for UI visibility - await writer.write({ type: "done", status: "deduplicated", idempotencyKey }); - } finally { - writer.releaseLock(); - } -} - -async function processPayment( - idempotencyKey: string, - amount: number, - currency: string, - description: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "processing_payment", idempotencyKey, amount }); - await delay(PROCESS_DELAY_MS); // Demo: simulate payment processing latency - - const result: PaymentResult = { - transactionId: `txn_${idempotencyKey}_${Date.now()}`, - amount, - currency, - status: "succeeded", - processedAt: new Date().toISOString(), - }; - - // Store result for future deduplication - processedKeys.set(idempotencyKey, result); - - await writer.write({ type: "payment_processed", idempotencyKey, result }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function emitCompletion(idempotencyKey: string): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "done", status: "completed", idempotencyKey }); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams deduplication events to the client +- [`"use step"`](/docs/api-reference/workflow/use-step) — step functions with full Node.js access diff --git a/docs/content/docs/cookbook/payments/process-manager.mdx b/docs/content/docs/cookbook/payments/process-manager.mdx index 3fe90e337f..e7068980da 100644 --- a/docs/content/docs/cookbook/payments/process-manager.mdx +++ b/docs/content/docs/cookbook/payments/process-manager.mdx @@ -5,13 +5,11 @@ type: guide summary: Orchestrate payment, inventory, backorder, shipping, and delivery with branching logic. --- -Use the process manager pattern when a business process has multiple steps with branching logic based on intermediate results. The workflow maintains a state machine, transitioning between states as each step completes. +Use the process manager pattern when a business process has multiple steps with branching logic based on intermediate results. ## Pattern -The workflow tracks `currentState` and advances it through a series of steps. Each step can branch the state machine (e.g., payment failure cancels the order, backordered items trigger a durable sleep and recheck). The durable runtime guarantees the process resumes from the correct state after any interruption. - -### Simplified +The workflow advances through a series of steps, branching on results (e.g., payment failure cancels the order, backordered items trigger a durable sleep and recheck). The runtime guarantees the process resumes from the correct state after any interruption. ```typescript lineNumbers import { sleep } from "workflow"; @@ -27,24 +25,19 @@ declare function cancelOrder(orderId: string, reason: string): Promise; // export async function processManager(orderId: string, items: string[]) { "use workflow"; - let state = "received"; - await initializeOrder(orderId); - state = "initialized"; const paymentResult = await validatePayment(orderId); if (paymentResult === "failed") { await cancelOrder(orderId, "payment_failed"); return { orderId, finalState: "cancelled" }; } - state = "payment_validated"; const inventoryResult = await checkInventory(items); if (inventoryResult === "backordered") { - await sleep("5s"); // Wait for restock - await checkInventory(items); // Recheck + await sleep("5s"); + await checkInventory(items); } - state = "inventory_checked"; await reserveInventory(items); await shipOrder(orderId); @@ -54,571 +47,7 @@ export async function processManager(orderId: string, items: string[]) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable, sleep } from "workflow"; - -export type OrderState = - | "received" - | "payment_validated" - | "payment_failed" - | "inventory_checked" - | "inventory_reserved" - | "backordered" - | "shipped" - | "delivery_confirmed" - | "completed" - | "cancelled"; - -export type ProcessManagerEvent = - | { type: "state_transition"; from: OrderState; to: OrderState; step: string } - | { type: "step_started"; step: string; message: string } - | { type: "step_completed"; step: string; message: string } - | { type: "step_retrying"; step: string; attempt: number } - | { type: "branch_taken"; step: string; branch: string; reason: string } - | { type: "sleeping"; step: string; duration: string; reason: string } - | { type: "done"; orderId: string; finalState: OrderState; summary: OrderSummary }; - -type OrderSummary = { - orderId: string; - finalState: OrderState; - stateTransitions: number; - paymentMethod: string; - itemCount: number; - trackingId: string | null; -}; - -type OrderPayload = { - orderId: string; - items: string[]; - paymentMethod: string; - simulatePaymentFail?: boolean; - simulateBackorder?: boolean; -}; - -// Demo: simulate real-world latency so the UI can show progress. -const STEP_DELAY_MS: Record = { - initializeOrder: 400, - validatePayment: 800, - checkInventory: 600, - reserveInventory: 500, - shipOrder: 900, - confirmDelivery: 700, - completeOrder: 400, -}; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function processManager( - orderId: string, - items: string[], - paymentMethod: string, - simulatePaymentFail = false, - simulateBackorder = false -): Promise { - "use workflow"; - - const order: OrderPayload = { - orderId, - items, - paymentMethod, - simulatePaymentFail, - simulateBackorder, - }; - - let currentState: OrderState = "received"; - let stateTransitions = 0; - - // Step 1: Initialize order - currentState = await initializeOrder(order, currentState); - stateTransitions++; - - // Step 2: Validate payment — branches on success/failure - const paymentResult = await validatePayment(order, currentState); - stateTransitions++; - - if (paymentResult === "payment_failed") { - // Branch: payment failed → cancel order - const summary = await cancelOrder(order, paymentResult, stateTransitions); - return summary; - } - currentState = paymentResult; - - // Step 3: Check inventory — branches on available/backorder - const inventoryResult = await checkInventory(order, currentState); - stateTransitions++; - - if (inventoryResult === "backordered") { - // Branch: backordered → sleep and recheck - await emitSleeping( - "checkInventory", - "5s", - "Waiting for backorder restock" - ); - await sleep("5s"); - - // After sleep, recheck — inventory now available - const recheckResult = await recheckInventory(order, "backordered"); - stateTransitions++; - currentState = recheckResult; - } else { - currentState = inventoryResult; - } - - // Step 4: Reserve inventory - currentState = await reserveInventory(order, currentState); - stateTransitions++; - - // Step 5: Ship order - currentState = await shipOrder(order, currentState); - stateTransitions++; - - // Step 6: Confirm delivery - currentState = await confirmDelivery(order, currentState); - stateTransitions++; - - // Step 7: Complete order - return completeOrder(order, currentState, stateTransitions); -} - -async function initializeOrder( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", step: "initializeOrder", attempt }); - } - - await writer.write({ - type: "step_started", - step: "initializeOrder", - message: `Initializing order ${order.orderId} with ${order.items.length} item(s)`, - }); - - await delay(STEP_DELAY_MS.initializeOrder); - - const nextState: OrderState = "received"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "initializeOrder", - }); - - await writer.write({ - type: "step_completed", - step: "initializeOrder", - message: `Order ${order.orderId} initialized`, - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function validatePayment( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", step: "validatePayment", attempt }); - } - - await writer.write({ - type: "step_started", - step: "validatePayment", - message: `Validating ${order.paymentMethod} payment for order ${order.orderId}`, - }); - - await delay(STEP_DELAY_MS.validatePayment); - - if (order.simulatePaymentFail) { - const nextState: OrderState = "payment_failed"; - await writer.write({ - type: "branch_taken", - step: "validatePayment", - branch: "payment_failed", - reason: `Payment declined for ${order.paymentMethod}`, - }); - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "validatePayment", - }); - return nextState; - } - - const nextState: OrderState = "payment_validated"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "validatePayment", - }); - await writer.write({ - type: "step_completed", - step: "validatePayment", - message: `Payment validated via ${order.paymentMethod}`, - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function checkInventory( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", step: "checkInventory", attempt }); - } - - await writer.write({ - type: "step_started", - step: "checkInventory", - message: `Checking inventory for ${order.items.length} item(s)`, - }); - - await delay(STEP_DELAY_MS.checkInventory); - - if (order.simulateBackorder) { - const nextState: OrderState = "backordered"; - await writer.write({ - type: "branch_taken", - step: "checkInventory", - branch: "backordered", - reason: "Items temporarily out of stock, initiating backorder wait", - }); - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "checkInventory", - }); - return nextState; - } - - const nextState: OrderState = "inventory_checked"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "checkInventory", - }); - await writer.write({ - type: "step_completed", - step: "checkInventory", - message: "All items in stock", - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function recheckInventory( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "recheckInventory", - message: "Rechecking inventory after backorder wait", - }); - - await delay(STEP_DELAY_MS.checkInventory); - - const nextState: OrderState = "inventory_checked"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "recheckInventory", - }); - await writer.write({ - type: "step_completed", - step: "recheckInventory", - message: "Backorder resolved — items now available", - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function reserveInventory( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", step: "reserveInventory", attempt }); - } - - await writer.write({ - type: "step_started", - step: "reserveInventory", - message: `Reserving ${order.items.length} item(s) in warehouse`, - }); - - await delay(STEP_DELAY_MS.reserveInventory); - - const nextState: OrderState = "inventory_reserved"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "reserveInventory", - }); - await writer.write({ - type: "step_completed", - step: "reserveInventory", - message: "Inventory reserved successfully", - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function shipOrder( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", step: "shipOrder", attempt }); - } - - await writer.write({ - type: "step_started", - step: "shipOrder", - message: `Shipping order ${order.orderId}`, - }); - - await delay(STEP_DELAY_MS.shipOrder); - - const nextState: OrderState = "shipped"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "shipOrder", - }); - await writer.write({ - type: "step_completed", - step: "shipOrder", - message: `Order shipped — tracking: TRK-${order.orderId}-${Date.now().toString(36)}`, - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function confirmDelivery( - order: OrderPayload, - currentState: OrderState -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - - try { - if (attempt > 1) { - await writer.write({ type: "step_retrying", step: "confirmDelivery", attempt }); - } - - await writer.write({ - type: "step_started", - step: "confirmDelivery", - message: "Awaiting delivery confirmation", - }); - - await delay(STEP_DELAY_MS.confirmDelivery); - - const nextState: OrderState = "delivery_confirmed"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "confirmDelivery", - }); - await writer.write({ - type: "step_completed", - step: "confirmDelivery", - message: "Delivery confirmed by recipient", - }); - - return nextState; - } finally { - writer.releaseLock(); - } -} - -async function completeOrder( - order: OrderPayload, - currentState: OrderState, - stateTransitions: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "completeOrder", - message: `Finalizing order ${order.orderId}`, - }); - - await delay(STEP_DELAY_MS.completeOrder); - - const nextState: OrderState = "completed"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "completeOrder", - }); - - const summary: OrderSummary = { - orderId: order.orderId, - finalState: nextState, - stateTransitions: stateTransitions + 1, - paymentMethod: order.paymentMethod, - itemCount: order.items.length, - trackingId: `TRK-${order.orderId}-${Date.now().toString(36)}`, - }; - - await writer.write({ - type: "done", - orderId: order.orderId, - finalState: nextState, - summary, - }); - - return summary; - } finally { - writer.releaseLock(); - } -} - -async function cancelOrder( - order: OrderPayload, - currentState: OrderState, - stateTransitions: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "step_started", - step: "cancelOrder", - message: `Cancelling order ${order.orderId} due to payment failure`, - }); - - await delay(STEP_DELAY_MS.completeOrder); - - const nextState: OrderState = "cancelled"; - await writer.write({ - type: "state_transition", - from: currentState, - to: nextState, - step: "cancelOrder", - }); - - const summary: OrderSummary = { - orderId: order.orderId, - finalState: nextState, - stateTransitions: stateTransitions + 1, - paymentMethod: order.paymentMethod, - itemCount: order.items.length, - trackingId: null, - }; - - await writer.write({ - type: "done", - orderId: order.orderId, - finalState: nextState, - summary, - }); - - return summary; - } finally { - writer.releaseLock(); - } -} - -async function emitSleeping( - step: string, - duration: string, - reason: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "sleeping", step, duration, reason }); - } finally { - writer.releaseLock(); - } -} - -emitSleeping.maxRetries = 0; -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer for backorder wait -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams state transitions to the client diff --git a/docs/content/docs/cookbook/payments/saga.mdx b/docs/content/docs/cookbook/payments/saga.mdx index 02bfc65364..73e4d3d77c 100644 --- a/docs/content/docs/cookbook/payments/saga.mdx +++ b/docs/content/docs/cookbook/payments/saga.mdx @@ -5,13 +5,11 @@ type: guide summary: Upgrade a subscription (reserve seats, capture invoice, provision) with auto-rollback on failure. --- -Use the saga pattern when a business transaction spans multiple services and you need automatic rollback if any step fails. Each forward step registers a compensation, and on failure the workflow unwinds them in reverse order. +Use the saga pattern when a business transaction spans multiple services and you need automatic rollback if any step fails. ## Pattern -Each step in the saga returns a result and pushes a compensation handler onto a stack. If a later step throws a `FatalError`, the workflow catches it and executes compensations in LIFO order to restore consistency. - -### Simplified +Each forward step pushes a compensation handler onto a stack. If a later step throws a `FatalError`, the workflow catches it and executes compensations in LIFO order. ```typescript lineNumbers import { FatalError } from "workflow"; @@ -44,7 +42,6 @@ export async function subscriptionUpgradeSaga(accountId: string, seats: number) } catch (error) { if (!(error instanceof FatalError)) throw error; - // Unwind compensations in reverse order while (compensations.length > 0) { await compensations.pop()!(); } @@ -54,320 +51,7 @@ export async function subscriptionUpgradeSaga(accountId: string, seats: number) } ``` -### Full Implementation - -```typescript lineNumbers -import { FatalError, getWritable } from "workflow"; - -type FailAtStep = 1 | 2 | 3 | null; -type CompensationAction = - | "releaseSeats" - | "refundInvoice" - | "deprovisionSeats"; -type CompensationHandler = { - action: CompensationAction; - undo: () => Promise; -}; - -export type SagaEvent = - | { type: "step_running"; step: string; label: string } - | { type: "step_succeeded"; step: string; label: string } - | { type: "step_failed"; step: string; label: string; error: string } - | { type: "step_skipped"; step: string; label: string } - | { type: "compensation_pushed"; action: CompensationAction; forStep: string } - | { type: "rolling_back"; failedStep: number } - | { type: "compensating"; action: CompensationAction } - | { type: "compensated"; action: CompensationAction } - | { type: "done"; status: "completed" | "rolled_back" }; - -export interface SubscriptionUpgradeResult { - accountId: string; - seats: number; - status: "completed" | "rolled_back"; - failedStep: FailAtStep; - compensationOrder: CompensationAction[]; -} - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -const STEP_DELAY_MS: Record = { - reserveSeats: 600, - captureInvoice: 700, - provisionSeats: 800, - sendConfirmation: 500, - releaseSeats: 500, - refundInvoice: 600, - deprovisionSeats: 500, -}; - -export async function subscriptionUpgradeSaga( - accountId: string, - seats: number, - failAtStep: FailAtStep = null -): Promise { - "use workflow"; - - const compensations: CompensationHandler[] = []; - - let reservationId: string | null = null; - let invoiceId: string | null = null; - let entitlementId: string | null = null; - - try { - reservationId = await reserveSeats(accountId, seats, failAtStep === 1); - { - const reservationToRelease = reservationId; - compensations.push({ - action: "releaseSeats", - undo: () => releaseSeats(accountId, reservationToRelease), - }); - } - - invoiceId = await captureInvoice(accountId, seats, failAtStep === 2); - { - const invoiceToRefund = invoiceId; - compensations.push({ - action: "refundInvoice", - undo: () => refundInvoice(accountId, invoiceToRefund), - }); - } - - entitlementId = await provisionSeats(accountId, seats, failAtStep === 3); - { - const entitlementToDeprovision = entitlementId; - compensations.push({ - action: "deprovisionSeats", - undo: () => deprovisionSeats(accountId, entitlementToDeprovision), - }); - } - - await sendConfirmation(accountId, seats, invoiceId, entitlementId); - await emitDone("completed"); - - return { - accountId, - seats, - status: "completed", - failedStep: null, - compensationOrder: compensations.map((compensation) => compensation.action), - }; - } catch (error) { - if (!(error instanceof FatalError)) { - throw error; - } - - const executedCompensations: CompensationAction[] = []; - - while (compensations.length > 0) { - const compensation = compensations.pop()!; - executedCompensations.push(compensation.action); - await compensation.undo(); - } - - await emitDone("rolled_back"); - - return { - accountId, - seats, - status: "rolled_back", - failedStep: failAtStep, - compensationOrder: executedCompensations, - }; - } -} - -async function reserveSeats( - accountId: string, - seats: number, - shouldFail: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", step: "reserveSeats", label: "Reserve seats" }); - await delay(STEP_DELAY_MS.reserveSeats); - - if (shouldFail) { - await writer.write({ type: "step_failed", step: "reserveSeats", label: "Reserve seats", error: `reserveSeats failed for account ${accountId}` }); - await writer.write({ type: "step_skipped", step: "captureInvoice", label: "Capture invoice" }); - await writer.write({ type: "step_skipped", step: "provisionSeats", label: "Provision seats" }); - await writer.write({ type: "step_skipped", step: "sendConfirmation", label: "Send confirmation" }); - throw new FatalError( - `reserveSeats failed for account ${accountId} with ${seats} seats` - ); - } - - await writer.write({ type: "step_succeeded", step: "reserveSeats", label: "Reserve seats" }); - await writer.write({ type: "compensation_pushed", action: "releaseSeats", forStep: "reserveSeats" }); - return `seat_reservation:${accountId}:${seats}`; - } finally { - writer.releaseLock(); - } -} - -async function captureInvoice( - accountId: string, - seats: number, - shouldFail: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", step: "captureInvoice", label: "Capture invoice" }); - await delay(STEP_DELAY_MS.captureInvoice); - - if (shouldFail) { - await writer.write({ type: "step_failed", step: "captureInvoice", label: "Capture invoice", error: `captureInvoice failed for account ${accountId}` }); - await writer.write({ type: "step_skipped", step: "provisionSeats", label: "Provision seats" }); - await writer.write({ type: "step_skipped", step: "sendConfirmation", label: "Send confirmation" }); - throw new FatalError( - `captureInvoice failed for account ${accountId} with ${seats} seats` - ); - } - - await writer.write({ type: "step_succeeded", step: "captureInvoice", label: "Capture invoice" }); - await writer.write({ type: "compensation_pushed", action: "refundInvoice", forStep: "captureInvoice" }); - return `invoice:${accountId}:${seats}`; - } finally { - writer.releaseLock(); - } -} - -async function provisionSeats( - accountId: string, - seats: number, - shouldFail: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", step: "provisionSeats", label: "Provision seats" }); - await delay(STEP_DELAY_MS.provisionSeats); - - if (shouldFail) { - await writer.write({ type: "step_failed", step: "provisionSeats", label: "Provision seats", error: `provisionSeats failed for account ${accountId}` }); - await writer.write({ type: "step_skipped", step: "sendConfirmation", label: "Send confirmation" }); - throw new FatalError( - `provisionSeats failed for account ${accountId} with ${seats} seats` - ); - } - - await writer.write({ type: "step_succeeded", step: "provisionSeats", label: "Provision seats" }); - await writer.write({ type: "compensation_pushed", action: "deprovisionSeats", forStep: "provisionSeats" }); - return `entitlement:${accountId}:${seats}`; - } finally { - writer.releaseLock(); - } -} - -async function sendConfirmation( - accountId: string, - seats: number, - invoiceId: string, - entitlementId: string -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", step: "sendConfirmation", label: "Send confirmation" }); - await delay(STEP_DELAY_MS.sendConfirmation); - await writer.write({ type: "step_succeeded", step: "sendConfirmation", label: "Send confirmation" }); - } finally { - writer.releaseLock(); - } - - console.info("[subscription-upgrade-saga] confirmation_sent", { - accountId, - seats, - invoiceId, - entitlementId, - }); -} - -async function releaseSeats( - accountId: string, - reservationId: string | null -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "compensating", action: "releaseSeats" }); - await delay(STEP_DELAY_MS.releaseSeats); - await writer.write({ type: "compensated", action: "releaseSeats" }); - } finally { - writer.releaseLock(); - } - - console.info("[subscription-upgrade-saga] release_seats", { - accountId, - reservationId, - }); -} - -async function refundInvoice( - accountId: string, - invoiceId: string | null -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "compensating", action: "refundInvoice" }); - await delay(STEP_DELAY_MS.refundInvoice); - await writer.write({ type: "compensated", action: "refundInvoice" }); - } finally { - writer.releaseLock(); - } - - console.info("[subscription-upgrade-saga] refund_invoice", { - accountId, - invoiceId, - }); -} - -async function deprovisionSeats( - accountId: string, - entitlementId: string | null -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "compensating", action: "deprovisionSeats" }); - await delay(STEP_DELAY_MS.deprovisionSeats); - await writer.write({ type: "compensated", action: "deprovisionSeats" }); - } finally { - writer.releaseLock(); - } - - console.info("[subscription-upgrade-saga] deprovision_seats", { - accountId, - entitlementId, - }); -} - -async function emitDone(status: "completed" | "rolled_back"): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", status }); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with full Node.js access - [`FatalError`](/docs/api-reference/workflow/fatal-error) — non-retryable error that triggers compensation -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/payments/transactional-outbox.mdx b/docs/content/docs/cookbook/payments/transactional-outbox.mdx index d1550bec8a..2ef27ec15b 100644 --- a/docs/content/docs/cookbook/payments/transactional-outbox.mdx +++ b/docs/content/docs/cookbook/payments/transactional-outbox.mdx @@ -5,13 +5,11 @@ type: guide summary: Persist an order and relay it to a message broker in one transaction for at-least-once delivery. --- -Use the transactional outbox pattern when you need to write business data and publish an event atomically. The workflow persists the order and an outbox record together, then a relay step polls the outbox and publishes to the broker. +Use the transactional outbox pattern when you need to write business data and publish an event atomically. ## Pattern -The workflow splits the operation into four durable steps: persist the order with an outbox entry, poll and relay the outbox entry, publish to the broker, and mark the outbox entry as sent. Because each step is persisted in the event log, the relay will resume after any crash, guaranteeing at-least-once delivery. - -### Simplified +The workflow persists the order with an outbox entry, then a relay step polls and publishes to the broker. Because each step is persisted in the event log, the relay resumes after any crash, guaranteeing at-least-once delivery. ```typescript lineNumbers declare function persistOrder(orderId: string, payload: string): Promise<{ outboxId: string }>; // @setup @@ -22,142 +20,15 @@ declare function markSent(orderId: string, outboxId: string, brokerId: string): export async function transactionalOutbox(orderId: string, payload: string) { "use workflow"; - // Step 1: Persist order + outbox entry in one transaction const { outboxId } = await persistOrder(orderId, payload); - - // Step 2: Relay polls outbox for unsent entries const { brokerId } = await pollRelay(outboxId); - - // Step 3: Publish to message broker await publishEvent(outboxId, brokerId); - // Step 4: Mark outbox entry as sent - return markSent(orderId, outboxId, brokerId); -} -``` - -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type OutboxEvent = - | { type: "persisting"; orderId: string } - | { type: "persisted"; orderId: string; outboxId: string } - | { type: "relaying"; outboxId: string } - | { type: "published"; outboxId: string; brokerId: string } - | { type: "marking_sent"; outboxId: string } - | { type: "confirmed"; outboxId: string } - | { type: "done"; orderId: string; outboxId: string; brokerId: string }; - -type OutboxResult = { - orderId: string; - outboxId: string; - brokerId: string; - status: "confirmed"; -}; - -// Demo: simulate real-world processing latency so the UI can show progress. -const PERSIST_DELAY_MS = 600; -const RELAY_DELAY_MS = 800; -const PUBLISH_DELAY_MS = 700; -const MARK_SENT_DELAY_MS = 400; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function transactionalOutbox( - orderId: string, - payload: string -): Promise { - "use workflow"; - - const { outboxId } = await persistOrder(orderId, payload); - const { brokerId } = await pollRelay(outboxId); - await publishEvent(outboxId, brokerId); return markSent(orderId, outboxId, brokerId); } - -async function persistOrder( - orderId: string, - payload: string -): Promise<{ outboxId: string }> { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "persisting", orderId }); - await delay(PERSIST_DELAY_MS); - - const outboxId = `obx_${orderId}_${payload.length}`; - await writer.write({ type: "persisted", orderId, outboxId }); - - return { outboxId }; - } finally { - writer.releaseLock(); - } -} - -async function pollRelay( - outboxId: string -): Promise<{ brokerId: string }> { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "relaying", outboxId }); - await delay(RELAY_DELAY_MS); - - const brokerId = `brk_${outboxId}_${Date.now()}`; - await writer.write({ type: "published", outboxId, brokerId }); - - return { brokerId }; - } finally { - writer.releaseLock(); - } -} - -async function publishEvent( - outboxId: string, - brokerId: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "marking_sent", outboxId }); - await delay(PUBLISH_DELAY_MS); - - await writer.write({ type: "confirmed", outboxId }); - } finally { - writer.releaseLock(); - } -} - -async function markSent( - orderId: string, - outboxId: string, - brokerId: string -): Promise { - "use step"; - const writer = getWritable().getWriter(); - - try { - await delay(MARK_SENT_DELAY_MS); - await writer.write({ type: "done", orderId, outboxId, brokerId }); - - return { orderId, outboxId, brokerId, status: "confirmed" }; - } finally { - writer.releaseLock(); - } -} ``` ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — declares step functions with automatic retry -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams outbox progress to the client +- [`"use step"`](/docs/api-reference/workflow/use-step) — each step retries automatically on failure diff --git a/docs/content/docs/cookbook/resilience/bulkhead.mdx b/docs/content/docs/cookbook/resilience/bulkhead.mdx index 74bf77b366..4fa051b782 100644 --- a/docs/content/docs/cookbook/resilience/bulkhead.mdx +++ b/docs/content/docs/cookbook/resilience/bulkhead.mdx @@ -5,13 +5,11 @@ type: guide summary: Partition order items into isolated groups so one bad SKU doesn't block the rest of the shipment. --- -Partition order items into isolated groups so one bad SKU doesn't block the rest of the shipment. +Use bulkhead when failures in one group of work should not affect other groups. ## Pattern -The workflow splits items into fixed-size compartments and processes each compartment with `Promise.allSettled()`. Failures in one compartment are isolated — they don't affect items in other compartments. A pacing `sleep()` between compartments prevents overload. - -### Simplified +Split items into fixed-size compartments and process each with `Promise.allSettled()`. Failures are isolated to their compartment. A pacing `sleep()` between compartments prevents overload. ```typescript lineNumbers import { sleep } from "workflow"; @@ -50,173 +48,8 @@ export async function bulkhead(items: string[], maxConcurrency: number) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need this unless it has its own streaming UI. -import { getWritable, sleep } from "workflow"; - -export type BulkheadEvent = - | { type: "compartment_start"; compartment: number; items: string[] } - | { type: "item_processing"; compartment: number; item: string } - | { type: "item_success"; compartment: number; item: string; durationMs: number } - | { type: "item_failure"; compartment: number; item: string; error: string } - | { type: "pacing"; compartment: number } - | { type: "summarizing" } - | { - type: "done"; - summary: { - total: number; - succeeded: number; - failed: number; - compartments: number; - }; - }; - -type ItemResult = { - item: string; - compartment: number; - ok: boolean; - durationMs?: number; - error?: string; -}; - -type BulkheadResult = { - status: "done"; - total: number; - succeeded: number; - failed: number; - compartments: number; - results: ItemResult[]; -}; - -// Demo: staggered delays per item position for visual progression -const ITEM_DELAY_MS = [600, 750, 900]; - -// Demo: compartment 2, item index 1 fails to show isolation -const FAIL_COMPARTMENT = 2; -const FAIL_INDEX = 1; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function bulkhead( - jobId: string, - items: string[], - maxConcurrency: number -): Promise { - "use workflow"; - - const results: ItemResult[] = []; - let compartmentIndex = 0; - - for (let i = 0; i < items.length; i += maxConcurrency) { - compartmentIndex++; - const batch = items.slice(i, i + maxConcurrency); - - // Run compartment in parallel — failures are isolated - const outcomes = await Promise.allSettled( - batch.map((item, idx) => - processItem(jobId, item, compartmentIndex, idx) - ) - ); - - for (let j = 0; j < outcomes.length; j++) { - const outcome = outcomes[j]; - if (outcome.status === "fulfilled") { - results.push(outcome.value); - } else { - results.push({ - item: batch[j], - compartment: compartmentIndex, - ok: false, - error: String(outcome.reason), - }); - } - } - - // Pacing delay between compartments - if (i + maxConcurrency < items.length) { - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "pacing", compartment: compartmentIndex }); - } finally { - writer.releaseLock(); - } - await sleep("1s"); - } - } - - return summarizeResults(results, compartmentIndex); -} - -async function processItem( - jobId: string, - item: string, - compartment: number, - indexInBatch: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "item_processing", compartment, item }); - - const delayMs = ITEM_DELAY_MS[indexInBatch % ITEM_DELAY_MS.length]; - await delay(delayMs); - - // Demo: deterministic failure in compartment 2, index 1 - if (compartment === FAIL_COMPARTMENT && indexInBatch === FAIL_INDEX) { - const error = `Service unavailable for ${item}`; - await writer.write({ type: "item_failure", compartment, item, error }); - throw new Error(error); - } - - await writer.write({ - type: "item_success", - compartment, - item, - durationMs: delayMs, - }); - - return { item, compartment, ok: true, durationMs: delayMs }; - } finally { - writer.releaseLock(); - } -} - -async function summarizeResults( - results: ItemResult[], - compartments: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ type: "summarizing" }); - await delay(500); - - const succeeded = results.filter((r) => r.ok).length; - const failed = results.length - succeeded; - const summary = { total: results.length, succeeded, failed, compartments }; - - await writer.write({ type: "done", summary }); - - return { status: "done", ...summary, results }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — pacing delay between compartments - [`Promise.allSettled()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) — runs items in parallel, isolating failures -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/circuit-breaker.mdx b/docs/content/docs/cookbook/resilience/circuit-breaker.mdx index d3b6703483..05532aa13c 100644 --- a/docs/content/docs/cookbook/resilience/circuit-breaker.mdx +++ b/docs/content/docs/cookbook/resilience/circuit-breaker.mdx @@ -5,13 +5,11 @@ type: guide summary: Stop hammering a down payment gateway after 3 failures, wait 30s, then test with one probe request. --- -Stop hammering a down payment gateway after 3 failures, wait 30s, then test with one probe request. +Use circuit breaker when a dependency fails repeatedly — stop calling it, wait a cooldown, then probe with one request to test recovery. ## Pattern -The workflow tracks circuit state (`closed`, `open`, `half-open`) and a consecutive failure count. After the failure threshold is reached, the circuit opens and `sleep()` enforces a durable cooldown. The next request after cooldown is a probe — if it succeeds, the circuit closes again. - -### Simplified +Track circuit state (`closed`, `open`, `half-open`) and consecutive failures. After the threshold, open the circuit and use durable `sleep()` for cooldown. The next request after cooldown is a probe — if it succeeds, close the circuit. ```typescript lineNumbers import { sleep } from "workflow"; @@ -50,177 +48,7 @@ export async function circuitBreaker(maxRequests: number = 10) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type CircuitState = "closed" | "open" | "half-open"; - -export type CircuitEvent = - | { type: "request_attempt"; requestNum: number; circuitState: CircuitState } - | { type: "request_success"; requestNum: number; circuitState: CircuitState } - | { type: "request_fail"; requestNum: number; circuitState: CircuitState } - | { type: "circuit_open"; requestNum: number } - | { type: "cooldown_start"; requestNum: number; cooldownMs: number } - | { type: "cooldown_end"; requestNum: number } - | { type: "circuit_half_open"; requestNum: number } - | { type: "circuit_closed"; requestNum: number } - | { - type: "done"; - status: "recovered" | "failed"; - totalRequests: number; - totalFailures: number; - circuitOpened: number; - }; - -export interface CircuitBreakerResult { - serviceId: string; - status: "recovered" | "failed"; - totalRequests: number; - totalFailures: number; - circuitOpened: number; -} - -// Demo timing: simulate realistic request latency for the UI -const REQUEST_DELAY_MS = 500; -const COOLDOWN_MS = 3000; -const FAILURE_THRESHOLD = 3; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function circuitBreakerFlow( - serviceId: string, - maxRequests: number = 10, - failStart: number = 4, - failEnd: number = 6 -): Promise { - "use workflow"; - - let state: CircuitState = "closed"; - let consecutiveFailures = 0; - let totalRequests = 0; - let totalFailures = 0; - let circuitOpened = 0; - - for (let i = 1; i <= maxRequests; i++) { - if (state === "open") { - await emitEvent({ - type: "cooldown_start", - requestNum: i, - cooldownMs: COOLDOWN_MS, - }); - await sleep(`${COOLDOWN_MS}ms`); - state = "half-open"; - await emitEvent({ type: "cooldown_end", requestNum: i }); - await emitEvent({ type: "circuit_half_open", requestNum: i }); - } - - const success = await callPaymentService( - serviceId, - i, - state, - failStart, - failEnd - ); - totalRequests++; - - if (success) { - consecutiveFailures = 0; - if (state === "half-open") { - state = "closed"; - await emitEvent({ type: "circuit_closed", requestNum: i }); - } - } else { - totalFailures++; - consecutiveFailures++; - if (consecutiveFailures >= FAILURE_THRESHOLD) { - state = "open"; - circuitOpened++; - consecutiveFailures = 0; - await emitEvent({ type: "circuit_open", requestNum: i }); - } - } - } - - const result: CircuitBreakerResult = { - serviceId, - status: state === "closed" ? "recovered" : "failed", - totalRequests, - totalFailures, - circuitOpened, - }; - - await emitEvent({ - type: "done", - status: result.status, - totalRequests: result.totalRequests, - totalFailures: result.totalFailures, - circuitOpened: result.circuitOpened, - }); - - return result; -} - -async function emitEvent(event: CircuitEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -async function callPaymentService( - serviceId: string, - requestNum: number, - circuitState: CircuitState, - failStart: number, - failEnd: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await writer.write({ - type: "request_attempt", - requestNum, - circuitState, - }); - - await delay(REQUEST_DELAY_MS); - - const shouldFail = requestNum >= failStart && requestNum <= failEnd; - - if (shouldFail) { - await writer.write({ - type: "request_fail", - requestNum, - circuitState, - }); - return false; - } - - await writer.write({ - type: "request_success", - requestNum, - circuitState, - }); - - return true; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access -- [`sleep()`](/docs/api-reference/workflow/sleep) — durable cooldown pause -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`sleep()`](/docs/api-reference/workflow/sleep) — durable cooldown pause that survives restarts diff --git a/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx b/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx index 22c3525cbd..729343820f 100644 --- a/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx +++ b/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx @@ -5,193 +5,51 @@ type: guide summary: Route undeliverable messages to a dead-letter queue after 3 retries for ops review. --- -Route undeliverable messages to a dead-letter queue after 3 retries for ops review. +Use dead letter queue when messages that fail repeatedly should be moved aside for inspection instead of retrying forever. ## Pattern -Each message is processed in a step that uses `getStepMetadata()` to track the attempt count. If the step fails and the attempt count reaches the maximum, the message is marked as dead-lettered instead of throwing again. This prevents infinite retry loops while preserving the message for inspection. - -### Simplified +A step uses `getStepMetadata()` to track the attempt count. If the attempt reaches the maximum, return a dead-lettered status instead of throwing again. This prevents infinite retry loops while preserving the message. ```typescript lineNumbers import { getStepMetadata } from "workflow"; +declare function deliverMessage(messageId: string): Promise; // @setup + const MAX_ATTEMPTS = 3; -export async function deadLetterQueue( - messages: string[], - poisonMessages: string[] = [] -) { +export async function deadLetterQueue(messages: string[]) { "use workflow"; const results = []; for (const messageId of messages) { - const isPoison = poisonMessages.includes(messageId); - const result = await processMessage(messageId, isPoison); + const result = await processMessage(messageId); results.push(result); } return results; } -async function processMessage(messageId: string, isPoison: boolean) { +async function processMessage(messageId: string) { "use step"; const { attempt } = getStepMetadata(); - if (isPoison) { - if (attempt >= MAX_ATTEMPTS) { - return { messageId, status: "dead_lettered", attempts: attempt }; - } - throw new Error(`Cannot parse message ${messageId}`); - } - - return { messageId, status: "delivered", attempts: attempt }; -} -``` - -### Full Implementation - -```typescript lineNumbers -// getWritable + getStepMetadata are used here to stream demo UI events. -// A production workflow wouldn't need these unless it has its own streaming UI. -import { getStepMetadata, getWritable } from "workflow"; - -export type MessageId = string; - -export type DLQEvent = - | { type: "processing"; messageId: string } - | { type: "attempt"; messageId: string; attempt: number } - | { type: "success"; messageId: string; attempt: number } - | { type: "retry"; messageId: string; attempt: number; error: string } - | { type: "dlq"; messageId: string; error: string; attempts: number } - | { type: "done"; summary: { delivered: number; deadLettered: number } }; - -type MessageResult = { - messageId: string; - status: "delivered" | "dead_lettered"; - attempts: number; - error?: string; -}; - -type BatchReport = { - status: "done"; - results: MessageResult[]; - summary: { - delivered: number; - deadLettered: number; - }; -}; - -// Demo: per-message processing latency so the UI can show progress -const PROCESS_DELAY_MS = 600; -const DLQ_DELAY_MS = 500; -const MAX_ATTEMPTS = 3; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function deadLetterQueue( - messages: string[], - poisonMessages: string[] = [] -): Promise { - "use workflow"; - - const results: MessageResult[] = []; - - for (const messageId of messages) { - const isPoison = poisonMessages.includes(messageId); - const result = await processMessage(messageId, isPoison); - results.push(result); - } - - return recordResults(results); -} - -async function processMessage( - messageId: string, - isPoison: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const { attempt } = getStepMetadata(); - try { - await writer.write({ type: "processing", messageId }); - await writer.write({ type: "attempt", messageId, attempt }); - await delay(PROCESS_DELAY_MS); - - if (isPoison) { - throw new Error(`Malformed payload: cannot parse message ${messageId}`); - } - - await writer.write({ type: "success", messageId, attempt }); + await deliverMessage(messageId); return { messageId, status: "delivered", attempts: attempt }; - } catch (error: unknown) { - const message = - error instanceof Error ? error.message : "Unknown processing error"; - + } catch (error) { if (attempt >= MAX_ATTEMPTS) { - await writer.write({ - type: "dlq", - messageId, - error: message, - attempts: attempt, - }); - return { - messageId, - status: "dead_lettered", - attempts: attempt, - error: message, - }; + return { messageId, status: "dead_lettered", attempts: attempt }; } - - await writer.write({ - type: "retry", - messageId, - attempt, - error: message, - }); - - throw error instanceof Error ? error : new Error(message); - } finally { - writer.releaseLock(); - } -} - -async function recordResults( - results: MessageResult[] -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await delay(DLQ_DELAY_MS); - - const delivered = results.filter((r) => r.status === "delivered").length; - const deadLettered = results.length - delivered; - - const report: BatchReport = { - status: "done", - results, - summary: { delivered, deadLettered }, - }; - - await writer.write({ type: "done", summary: report.summary }); - return report; - } finally { - writer.releaseLock(); + throw error; } } ``` ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access -- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number to decide when to dead-letter -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — marks functions with full Node.js access +- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number diff --git a/docs/content/docs/cookbook/resilience/hedge-request.mdx b/docs/content/docs/cookbook/resilience/hedge-request.mdx index 576fc7c32f..aeea31a0ae 100644 --- a/docs/content/docs/cookbook/resilience/hedge-request.mdx +++ b/docs/content/docs/cookbook/resilience/hedge-request.mdx @@ -5,166 +5,27 @@ type: guide summary: Fire the same search query to two replicas and use whichever responds first. --- -Fire the same search query to two replicas and use whichever responds first. +Use hedge request when tail latency matters more than redundant work — fire the same request at multiple providers and take the first response. ## Pattern -The workflow launches the same step against multiple providers using `Promise.race()`. The first provider to respond wins, and the result is returned immediately. Because steps run in parallel, this cuts tail latency at the cost of redundant work. - -### Simplified - -```typescript lineNumbers -export async function hedgeRequest( - query: string, - providers: { name: string }[] -) { - "use workflow"; - - const result = await Promise.race( - providers.map((provider) => callProvider(provider.name, query)) - ); - - return { winner: result.provider, result: result.data }; -} - -async function callProvider(provider: string, query: string) { - "use step"; - const data = await fetch(`https://${provider}.example.com/search?q=${query}`); - return { provider, data: await data.json() }; -} -``` - -### Full Implementation +Launch the same step against multiple providers using `Promise.race()`. The first to respond wins. Because steps run in parallel, this cuts tail latency at the cost of extra calls. ```typescript lineNumbers -import { getWritable } from "workflow"; - -export type HedgeEvent = - | { type: "config"; providers: string[]; query: string } - | { type: "provider_started"; provider: string } - | { type: "provider_responded"; provider: string; latencyMs: number } - | { type: "provider_lost"; provider: string; latencyMs: number } - | { type: "winner"; provider: string; latencyMs: number; result: string } - | { type: "done"; winner: string; latencyMs: number; totalProviders: number }; - -export interface HedgeResult { - winner: string; - latencyMs: number; - totalProviders: number; -} - -export interface HedgeInput { - query: string; - providers: ProviderConfig[]; -} +declare function queryProvider(name: string, query: string): Promise<{ provider: string; data: unknown }>; // @setup -export type ProviderConfig = { - name: string; - simulatedLatencyMs: number; -}; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function hedgeRequestFlow( - input: HedgeInput -): Promise { +export async function hedgeRequest(query: string, providers: string[]) { "use workflow"; - const { query, providers } = input; - - await emitEvent({ - type: "config", - providers: providers.map((p) => p.name), - query, - }); - - // Launch all providers in parallel, race for fastest - const raceResult = await Promise.race( - providers.map((provider) => callProvider(provider, query)) + const result = await Promise.race( + providers.map((name) => queryProvider(name, query)) ); - // Mark losers - for (const provider of providers) { - if (provider.name !== raceResult.provider) { - await emitEvent({ - type: "provider_lost", - provider: provider.name, - latencyMs: provider.simulatedLatencyMs, - }); - } - } - - await emitEvent({ - type: "done", - winner: raceResult.provider, - latencyMs: raceResult.latencyMs, - totalProviders: providers.length, - }); - - return { - winner: raceResult.provider, - latencyMs: raceResult.latencyMs, - totalProviders: providers.length, - }; -} - -export async function callProvider( - provider: ProviderConfig, - query: string -): Promise<{ provider: string; latencyMs: number; result: string }> { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ - type: "provider_started", - provider: provider.name, - }); - - // Simulate variable latency - await delay(provider.simulatedLatencyMs); - - const result = `${provider.name} processed "${query}"`; - - await writer.write({ - type: "provider_responded", - provider: provider.name, - latencyMs: provider.simulatedLatencyMs, - }); - - await writer.write({ - type: "winner", - provider: provider.name, - latencyMs: provider.simulatedLatencyMs, - result, - }); - - return { - provider: provider.name, - latencyMs: provider.simulatedLatencyMs, - result, - }; - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: HedgeEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } + return { winner: result.provider, data: result.data }; } ``` ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — returns the first provider to respond -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/retry-backoff.mdx b/docs/content/docs/cookbook/resilience/retry-backoff.mdx index 06de00087c..a2c1fd3031 100644 --- a/docs/content/docs/cookbook/resilience/retry-backoff.mdx +++ b/docs/content/docs/cookbook/resilience/retry-backoff.mdx @@ -5,16 +5,14 @@ type: guide summary: Retry a flaky email API with 1s, 2s, 4s backoff instead of failing on the first hiccup. --- -Retry a flaky email API with 1s, 2s, 4s backoff instead of failing on the first hiccup. +Use retry with backoff when a step may fail transiently and you want exponentially increasing delays between attempts. ## Pattern -The workflow loops through attempts, calling a step that may fail. On failure, `sleep()` pauses with exponentially increasing delay before the next attempt. Because `sleep()` is durable, the backoff survives cold starts and replays. - -### Simplified +Loop through attempts, calling a step that may fail. On failure, `sleep()` pauses with exponentially increasing delay. Because `sleep()` is durable, the backoff survives cold starts and replays. ```typescript lineNumbers -import { sleep, FatalError } from "workflow"; +import { sleep } from "workflow"; declare function syncContactToCrm(contactId: string, attempt: number): Promise; // @setup @@ -40,141 +38,7 @@ export async function retryBackoff( } ``` -### Full Implementation - -```typescript lineNumbers -import { sleep, getWritable, FatalError } from "workflow"; - -export type RetryEvent = - | { type: "attempt_start"; attempt: number; contactId: string } - | { type: "attempt_fail"; attempt: number; error: string; sleepMs: number } - | { type: "attempt_success"; attempt: number; contactId: string } - | { type: "done"; status: "completed" | "failed"; attempts: number }; - -export interface ContactSyncResult { - contactId: string; - status: "completed" | "failed"; - attempts: number; - lastError?: string; -} - -const MAX_BACKOFF_MS = 8_000; -const STEP_DELAY_MS = 650; // Demo: visual pacing - -function backoffDelayMs(baseMs: number, attempt: number): number { - return Math.min(MAX_BACKOFF_MS, baseMs * 2 ** (attempt - 1)); -} - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -async function safeWrite( - writer: WritableStreamDefaultWriter, - event: RetryEvent -): Promise { - try { - await writer.write(event); - } catch { - // Best-effort streaming; step logic should continue on stream errors. - } -} - -export async function retryBackoffContactSync( - contactId: string, - maxAttempts: number = 5, - baseDelayMs: number = 1_000, - failuresBeforeSuccess: number = 2 -): Promise { - "use workflow"; - - for (let attempt = 1; attempt <= maxAttempts; attempt += 1) { - const nextSleepMs = - attempt < maxAttempts ? backoffDelayMs(baseDelayMs, attempt) : 0; - - try { - await syncContactToCrm( - contactId, - attempt, - failuresBeforeSuccess, - nextSleepMs - ); - await emitDone("completed", attempt); - return { contactId, status: "completed", attempts: attempt }; - } catch (error) { - const lastError = - error instanceof Error ? error.message : String(error); - - if (attempt >= maxAttempts) { - await emitDone("failed", attempt); - return { - contactId, - status: "failed", - attempts: attempt, - lastError, - }; - } - - await sleep(`${nextSleepMs}ms`); - } - } - - await emitDone("failed", maxAttempts); - return { contactId, status: "failed", attempts: maxAttempts }; -} - -async function emitDone( - status: "completed" | "failed", - attempts: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await safeWrite(writer, { type: "done", status, attempts }); - } finally { - writer.releaseLock(); - } -} - -async function syncContactToCrm( - contactId: string, - attempt: number, - failuresBeforeSuccess: number, - nextSleepMs: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await safeWrite(writer, { type: "attempt_start", attempt, contactId }); - await delay(STEP_DELAY_MS); // Demo: simulate network latency - - if (attempt <= failuresBeforeSuccess) { - const error = "CRM API returned HTTP 503 Service Unavailable"; - await safeWrite(writer, { - type: "attempt_fail", - attempt, - error, - sleepMs: nextSleepMs, - }); - throw new FatalError(error); - } - - await safeWrite(writer, { type: "attempt_success", attempt, contactId }); - } finally { - writer.releaseLock(); - } -} - -syncContactToCrm.maxRetries = 0; -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durable pause that survives replay -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retry so the workflow controls retry logic -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller diff --git a/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx b/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx index 4a3eb272b6..fa87c9cb51 100644 --- a/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx +++ b/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx @@ -5,13 +5,11 @@ type: guide summary: Sync contacts to an external CRM and auto-retry when the API returns 429 with retry-after. --- -Sync contacts to an external CRM and auto-retry when the API returns 429 with retry-after. +Use retryable rate limit when an external API returns 429 and you want the runtime to automatically reschedule the step after the specified delay. ## Pattern -A step throws `RetryableError` with a `retryAfter` duration when it receives a 429 response. The Workflow DevKit runtime automatically reschedules the step after the specified delay, using the built-in retry mechanism instead of manual sleep loops. - -### Simplified +A step throws `RetryableError` with a `retryAfter` duration when it receives a 429 response. The runtime automatically reschedules the step after the specified delay — no manual sleep loops needed. ```typescript lineNumbers import { RetryableError } from "workflow"; @@ -43,115 +41,8 @@ async function fetchContactFromCrm(contactId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { RetryableError, getStepMetadata, getWritable } from "workflow"; - -export type RateLimitEvent = - | { - type: "attempt_start"; - attempt: number; - contactId: string; - idempotencyKey: string; - } - | { type: "http_429"; attempt: number; retryAfterMs: number } - | { type: "retry_scheduled"; attempt: number; retryAfterMs: number } - | { type: "step_done"; step: "fetch" | "upsert"; attempt: number } - | { - type: "done"; - contactId: string; - status: "synced"; - totalAttempts: number; - }; - -export type SyncResult = { - contactId: string; - status: "synced" | "failed"; - attempts?: number; -}; - -export async function syncCrmContact( - contactId: string, - failuresBeforeSuccess: number = 2 -): Promise { - "use workflow"; - - const contact = await fetchContactFromCrm(contactId, failuresBeforeSuccess); - await upsertIntoWarehouse(contactId, contact); - - return { contactId, status: "synced" }; -} - -async function fetchContactFromCrm( - contactId: string, - failuresBeforeSuccess: number -) { - "use step"; - - const { stepId, attempt } = getStepMetadata(); - const writer = getWritable().getWriter(); - const idempotencyKey = `crm-sync:${contactId}:${stepId}`; - - try { - await writer.write({ - type: "attempt_start", - attempt, - contactId, - idempotencyKey, - }); - - // Simulate CRM API latency - await new Promise((r) => setTimeout(r, 650)); - - if (attempt <= failuresBeforeSuccess) { - const retryAfterMs = - attempt === 1 ? 2000 : attempt === 2 ? 1500 : 1000; - - await writer.write({ type: "http_429", attempt, retryAfterMs }); - await writer.write({ type: "retry_scheduled", attempt, retryAfterMs }); - - throw new RetryableError("CRM rate-limited (429)", { - retryAfter: retryAfterMs, - }); - } - - await writer.write({ type: "step_done", step: "fetch", attempt }); - return { id: contactId, name: "Jane Doe", email: "jane@example.com" }; - } finally { - writer.releaseLock(); - } -} - -async function upsertIntoWarehouse(contactId: string, contact: unknown) { - "use step"; - - const { attempt } = getStepMetadata(); - const writer = getWritable().getWriter(); - - try { - // Simulate warehouse write latency - await new Promise((r) => setTimeout(r, 600)); - - await writer.write({ type: "step_done", step: "upsert", attempt }); - await writer.write({ - type: "done", - contactId, - status: "synced", - totalAttempts: attempt, - }); - } finally { - writer.releaseLock(); - } - - void contact; -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access -- [`RetryableError`](/docs/api-reference/workflow/retryable-error) — signals the runtime to retry the step after a delay -- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number and step ID -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — marks functions with full Node.js access +- [`RetryableError`](/docs/api-reference/workflow/retryable-error) — signals the runtime to retry after a delay diff --git a/docs/content/docs/cookbook/resilience/throttle.mdx b/docs/content/docs/cookbook/resilience/throttle.mdx index e4c8dacb03..1074c9959b 100644 --- a/docs/content/docs/cookbook/resilience/throttle.mdx +++ b/docs/content/docs/cookbook/resilience/throttle.mdx @@ -5,15 +5,15 @@ type: guide summary: Cap outbound API calls to 10/second so you don't blow your third-party rate limit. --- -Cap outbound API calls to 10/second so you don't blow your third-party rate limit. +Use throttle when you need to limit how many requests go through so you don't exceed an external rate limit. ## Pattern -The workflow maintains a token bucket in its orchestrator state. Before processing each request, it checks for available tokens and accepts or rejects accordingly. Since workflow state is durably persisted, the throttle survives restarts. - -### Simplified +Maintain a token bucket in the orchestrator. Before processing each request, check for available tokens. Since workflow state is durably persisted, the throttle survives restarts. ```typescript lineNumbers +declare function processRequest(requestId: string): Promise; // @setup + export async function throttleFlow( requests: { id: string }[], capacity: number, @@ -34,162 +34,15 @@ export async function throttleFlow( rejected++; } - // Refill a token every N requests - if ((i + 1) % refillRate === 0 && tokens < capacity) { - tokens++; - } - } - - return { accepted, rejected, total: requests.length }; -} - -async function processRequest(requestId: string) { - "use step"; - // Call your rate-limited API here -} -``` - -### Full Implementation - -```typescript lineNumbers -import { getWritable } from "workflow"; - -export type ThrottleEvent = - | { type: "config"; capacity: number; refillRate: number; requestCount: number } - | { type: "request_received"; requestId: string; position: number } - | { type: "token_check"; requestId: string; tokensAvailable: number } - | { type: "request_accepted"; requestId: string; tokensRemaining: number } - | { type: "request_rejected"; requestId: string; retryAfterMs: number } - | { type: "token_refilled"; tokensAvailable: number } - | { type: "done"; accepted: number; rejected: number; total: number }; - -export interface ThrottleResult { - accepted: number; - rejected: number; - total: number; -} - -export type RequestItem = { - id: string; - label: string; -}; - -export interface ThrottleInput { - capacity: number; - refillRate: number; - requests: RequestItem[]; -} - -// Demo timing -const PROCESS_DELAY_MS = 300; -const CHECK_DELAY_MS = 200; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function throttleFlow( - input: ThrottleInput -): Promise { - "use workflow"; - - const { capacity, refillRate, requests } = input; - let tokens = capacity; - let accepted = 0; - let rejected = 0; - - await emitEvent({ - type: "config", - capacity, - refillRate, - requestCount: requests.length, - }); - - for (let i = 0; i < requests.length; i++) { - const req = requests[i]; - const hasToken = tokens > 0; - - await evaluateRequest(req, i + 1, tokens, refillRate); - - if (hasToken) { - tokens--; - accepted++; - } else { - rejected++; - } - - // Refill: every refillRate requests, add 1 token back (simulates time passing) if ((i + 1) % refillRate === 0 && tokens < capacity) { tokens++; - await emitEvent({ type: "token_refilled", tokensAvailable: tokens }); } } - await emitEvent({ - type: "done", - accepted, - rejected, - total: requests.length, - }); - return { accepted, rejected, total: requests.length }; } - -async function evaluateRequest( - req: RequestItem, - position: number, - tokens: number, - refillRate: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ - type: "request_received", - requestId: req.id, - position, - }); - await delay(PROCESS_DELAY_MS); - - await writer.write({ - type: "token_check", - requestId: req.id, - tokensAvailable: tokens, - }); - await delay(CHECK_DELAY_MS); - - if (tokens > 0) { - await writer.write({ - type: "request_accepted", - requestId: req.id, - tokensRemaining: tokens - 1, - }); - } else { - await writer.write({ - type: "request_rejected", - requestId: req.id, - retryAfterMs: refillRate * 1000, - }); - } - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: ThrottleEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} ``` ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions that run with full Node.js access -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams events to the caller +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function diff --git a/docs/content/docs/cookbook/routing/content-based-router.mdx b/docs/content/docs/cookbook/routing/content-based-router.mdx index c676174e40..f47bdfb174 100644 --- a/docs/content/docs/cookbook/routing/content-based-router.mdx +++ b/docs/content/docs/cookbook/routing/content-based-router.mdx @@ -5,13 +5,11 @@ type: guide summary: Classify a support ticket and route it to billing, technical, account, or feedback handlers. --- -When incoming messages need different processing paths depending on their content, use a content-based router. A support ticket about a payment issue should go to the billing team, while a bug report goes to engineering. +Use a content-based router when incoming messages need different processing paths depending on their content. ## Pattern -The workflow inspects the payload, classifies it, then branches with a standard `if`/`else` to call the appropriate step handler. Each handler is a separate step function with full Node.js access. - -### Simplified +The workflow inspects the payload, classifies it, then branches with a standard `if`/`else` to call the appropriate step handler. ```typescript lineNumbers declare function classifyTicket(ticketId: string, subject: string): Promise<{ ticketType: string; confidence: number }>; // @setup @@ -43,241 +41,7 @@ export async function contentBasedRouterFlow( } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type TicketType = "billing" | "technical" | "account" | "feedback"; -export type TicketPriority = "low" | "medium" | "high" | "urgent"; - -export type RouterEvent = - | { type: "ticket_received"; ticketId: string; subject: string } - | { type: "classifying"; ticketId: string } - | { type: "classified"; ticketId: string; ticketType: TicketType; confidence: number } - | { type: "routing"; ticketId: string; destination: TicketType } - | { type: "handler_processing"; ticketId: string; destination: TicketType; step: string } - | { type: "handler_complete"; ticketId: string; destination: TicketType; resolution: string } - | { type: "done"; ticketId: string; routedTo: TicketType; totalSteps: number }; - -export interface ContentBasedRouterResult { - ticketId: string; - routedTo: TicketType; - totalSteps: number; -} - -// Simulated classification keywords per ticket type -const CLASSIFICATION_RULES: Record = { - billing: ["invoice", "charge", "payment", "refund", "subscription", "billing", "price"], - technical: ["error", "bug", "crash", "timeout", "api", "deploy", "technical", "broken"], - account: ["password", "login", "access", "permissions", "account", "profile", "settings"], - feedback: ["feature", "suggestion", "improvement", "feedback", "request", "wishlist"], -}; - -// Demo timing -const CLASSIFY_DELAY_MS = 800; -const ROUTE_DELAY_MS = 400; -const HANDLER_STEP_DELAY_MS = 600; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function classifyContent(subject: string): { ticketType: TicketType; confidence: number } { - const lower = subject.toLowerCase(); - let bestType: TicketType = "feedback"; - let bestScore = 0; - - for (const [type, keywords] of Object.entries(CLASSIFICATION_RULES) as [TicketType, string[]][]) { - const score = keywords.filter((kw) => lower.includes(kw)).length; - if (score > bestScore) { - bestScore = score; - bestType = type; - } - } - - const confidence = bestScore > 0 ? Math.min(0.99, 0.7 + bestScore * 0.1) : 0.5; - return { ticketType: bestType, confidence }; -} - -export async function contentBasedRouterFlow( - ticketId: string, - subject: string, - priority: TicketPriority = "medium" -): Promise { - "use workflow"; - - // Step 1: Receive ticket - await emitEvent({ type: "ticket_received", ticketId, subject }); - - // Step 2: Classify ticket content - const { ticketType, confidence } = await classifyTicket(ticketId, subject); - - // Step 3: Route to appropriate handler - await emitEvent({ type: "routing", ticketId, destination: ticketType }); - await sleep(`${ROUTE_DELAY_MS}ms`); - - // Step 4: Branch to specialized handler based on classification - let totalSteps: number; - if (ticketType === "billing") { - totalSteps = await handleBilling(ticketId, subject, priority); - } else if (ticketType === "technical") { - totalSteps = await handleTechnical(ticketId, subject, priority); - } else if (ticketType === "account") { - totalSteps = await handleAccount(ticketId, subject, priority); - } else { - totalSteps = await handleFeedback(ticketId, subject, priority); - } - - // Step 5: Emit completion - await emitEvent({ type: "done", ticketId, routedTo: ticketType, totalSteps }); - - return { ticketId, routedTo: ticketType, totalSteps }; -} - -async function classifyTicket( - ticketId: string, - subject: string -): Promise<{ ticketType: TicketType; confidence: number }> { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "classifying", ticketId }); - await delay(CLASSIFY_DELAY_MS); - - const result = classifyContent(subject); - await writer.write({ - type: "classified", - ticketId, - ticketType: result.ticketType, - confidence: result.confidence, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function handleBilling( - ticketId: string, - _subject: string, - _priority: TicketPriority -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const steps = ["Verify account billing status", "Check payment history", "Generate resolution"]; - try { - for (const step of steps) { - await writer.write({ type: "handler_processing", ticketId, destination: "billing", step }); - await delay(HANDLER_STEP_DELAY_MS); - } - await writer.write({ - type: "handler_complete", - ticketId, - destination: "billing", - resolution: "Billing inquiry resolved — invoice adjustment applied", - }); - return steps.length; - } finally { - writer.releaseLock(); - } -} - -async function handleTechnical( - ticketId: string, - _subject: string, - _priority: TicketPriority -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const steps = ["Reproduce issue", "Analyze stack trace", "Apply fix", "Verify resolution"]; - try { - for (const step of steps) { - await writer.write({ type: "handler_processing", ticketId, destination: "technical", step }); - await delay(HANDLER_STEP_DELAY_MS); - } - await writer.write({ - type: "handler_complete", - ticketId, - destination: "technical", - resolution: "Technical issue resolved — patch deployed to staging", - }); - return steps.length; - } finally { - writer.releaseLock(); - } -} - -async function handleAccount( - ticketId: string, - _subject: string, - _priority: TicketPriority -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const steps = ["Verify identity", "Update account settings", "Confirm changes"]; - try { - for (const step of steps) { - await writer.write({ type: "handler_processing", ticketId, destination: "account", step }); - await delay(HANDLER_STEP_DELAY_MS); - } - await writer.write({ - type: "handler_complete", - ticketId, - destination: "account", - resolution: "Account issue resolved — access restored", - }); - return steps.length; - } finally { - writer.releaseLock(); - } -} - -async function handleFeedback( - ticketId: string, - _subject: string, - _priority: TicketPriority -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const steps = ["Log feedback", "Categorize suggestion", "Notify product team"]; - try { - for (const step of steps) { - await writer.write({ type: "handler_processing", ticketId, destination: "feedback", step }); - await delay(HANDLER_STEP_DELAY_MS); - } - await writer.write({ - type: "handler_complete", - ticketId, - destination: "feedback", - resolution: "Feedback logged — added to product backlog", - }); - return steps.length; - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: RouterEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each handler as a durable step -- [`sleep()`](/docs/api-reference/workflow/sleep) — durable delay between routing and handling -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams progress events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each handler is a durable step diff --git a/docs/content/docs/cookbook/routing/content-enricher.mdx b/docs/content/docs/cookbook/routing/content-enricher.mdx index f11eb8f916..720646caba 100644 --- a/docs/content/docs/cookbook/routing/content-enricher.mdx +++ b/docs/content/docs/cookbook/routing/content-enricher.mdx @@ -5,13 +5,11 @@ type: guide summary: Enrich a sales lead by querying CRM, social, and Clearbit in parallel before routing to sales. --- -When a message arrives with minimal data and downstream steps need a richer picture, use a content enricher. The workflow fetches supplementary data from multiple sources and merges it into the message before passing it along. +Use a content enricher when a message arrives with minimal data and downstream steps need a richer picture. ## Pattern -The workflow looks up a base contact, then fans out to multiple enrichment sources in parallel using `Promise.allSettled`. Results are merged into a single enriched profile. Failed sources degrade gracefully without blocking the pipeline. - -### Simplified +The workflow looks up a base contact, then fans out to multiple enrichment sources in parallel using `Promise.allSettled`. Failed sources degrade gracefully without blocking the pipeline. ```typescript lineNumbers type BaseLead = { email: string; name: string; domain: string }; @@ -46,315 +44,7 @@ export async function enrichLeadProfile(email: string) { } ``` -### Full Implementation - -```typescript lineNumbers -// getWritable is used here to stream demo UI events. -// A production workflow wouldn't need this unless it has its own streaming UI. -import { getWritable } from "workflow"; - -export type EnrichmentSource = "crm" | "social" | "clearbit" | "github"; - -export type EnrichmentEvent = - | { type: "base_lookup" } - | { type: "base_done"; name: string; domain: string } - | { type: "source_start"; source: EnrichmentSource } - | { type: "source_done"; source: EnrichmentSource; data: unknown } - | { type: "source_failed"; source: EnrichmentSource; error: string } - | { type: "merging" } - | { type: "done"; profile: EnrichedLeadProfile }; - -export type BaseLead = { - email: string; - name: string; - domain: string; -}; - -export type CrmEnrichment = { - company: string; - title: string; - segment: "enterprise" | "mid-market"; -}; - -export type SocialEnrichment = { - followers: number; - location: string; - profileUrl: string; -}; - -export type ClearbitEnrichment = { - company: string; - employees: number; - score: number; -}; - -export type GitHubEnrichment = { - username: string; - publicRepos: number; - stars: number; -}; - -export type PartialEnrichmentPayload = { - crm: CrmEnrichment | null; - social: SocialEnrichment | null; - clearbit: ClearbitEnrichment | null; - github: GitHubEnrichment | null; -}; - -export type EnrichedLeadProfile = { - email: string; - name: string; - domain: string; - company: string | null; - title: string | null; - followers: number | null; - location: string | null; - githubUsername: string | null; - githubStars: number | null; - clearbitScore: number | null; - segment: string | null; -}; - -export type LeadEnrichmentResult = { - email: string; - baseLead: BaseLead; - sources: Record; - profile: EnrichedLeadProfile; -}; - -// Demo: simulate real-world network latency so the UI can show progress. -const SOURCE_DELAY_MS: Record = { - crm: 700, - social: 640, - clearbit: 810, - github: 760, -}; - -const BASE_DELAY_MS = 500; -const MERGE_DELAY_MS = 500; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function enrichLeadProfile(email: string): Promise { - "use workflow"; - - const baseLead = await lookupBaseContact(email); - - const [crm, social, clearbit, github] = await Promise.allSettled([ - fetchCrmEnrichment(baseLead), - fetchSocialEnrichment(baseLead), - fetchClearbitEnrichment(baseLead), - fetchGitHubEnrichment(baseLead), - ]); - - const profile = await mergeEnrichmentProfile(baseLead, { - crm: crm.status === "fulfilled" ? crm.value : null, - social: social.status === "fulfilled" ? social.value : null, - clearbit: clearbit.status === "fulfilled" ? clearbit.value : null, - github: github.status === "fulfilled" ? github.value : null, - }); - - return { - email: baseLead.email, - baseLead, - sources: { - crm: crm.status, - social: social.status, - clearbit: clearbit.status, - github: github.status, - }, - profile, - }; -} - -async function lookupBaseContact(email: string): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "base_lookup" }); - await delay(BASE_DELAY_MS); - - const normalized = email.trim().toLowerCase(); - const [localPart = "lead", domain = "example.com"] = normalized.split("@"); - const lead: BaseLead = { - email: normalized, - name: humanizeLocalPart(localPart), - domain, - }; - - await writer.write({ type: "base_done", name: lead.name, domain: lead.domain }); - return lead; - } finally { - writer.releaseLock(); - } -} - -async function fetchCrmEnrichment(baseLead: BaseLead): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "source_start", source: "crm" }); - await delay(SOURCE_DELAY_MS.crm); - - if (shouldForceFailure(baseLead.email, "crm")) { - const error = "CRM enrichment temporarily unavailable"; - await writer.write({ type: "source_failed", source: "crm", error }); - throw new Error(error); - } - - const data: CrmEnrichment = { - company: titleCase(baseLead.domain.split(".")[0] ?? "Example"), - title: "Senior Product Manager", - segment: "mid-market", - }; - - await writer.write({ type: "source_done", source: "crm", data }); - return data; - } finally { - writer.releaseLock(); - } -} - -async function fetchSocialEnrichment(baseLead: BaseLead): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "source_start", source: "social" }); - await delay(SOURCE_DELAY_MS.social); - - if (shouldForceFailure(baseLead.email, "social")) { - const error = "Social enrichment temporarily unavailable"; - await writer.write({ type: "source_failed", source: "social", error }); - throw new Error(error); - } - - const data: SocialEnrichment = { - followers: 1830, - location: "San Francisco, CA", - profileUrl: `https://linkedin.com/in/${baseLead.name.toLowerCase().replace(/\s+/g, "-")}`, - }; - - await writer.write({ type: "source_done", source: "social", data }); - return data; - } finally { - writer.releaseLock(); - } -} - -async function fetchClearbitEnrichment(baseLead: BaseLead): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "source_start", source: "clearbit" }); - await delay(SOURCE_DELAY_MS.clearbit); - - if (shouldForceFailure(baseLead.email, "clearbit")) { - const error = "Clearbit enrichment temporarily unavailable"; - await writer.write({ type: "source_failed", source: "clearbit", error }); - throw new Error(error); - } - - const data: ClearbitEnrichment = { - company: titleCase(baseLead.domain.split(".")[0] ?? "Example"), - employees: 240, - score: 78, - }; - - await writer.write({ type: "source_done", source: "clearbit", data }); - return data; - } finally { - writer.releaseLock(); - } -} - -async function fetchGitHubEnrichment(baseLead: BaseLead): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "source_start", source: "github" }); - await delay(SOURCE_DELAY_MS.github); - - if (shouldForceFailure(baseLead.email, "github")) { - const error = "GitHub enrichment temporarily unavailable"; - await writer.write({ type: "source_failed", source: "github", error }); - throw new Error(error); - } - - const data: GitHubEnrichment = { - username: baseLead.name.toLowerCase().replace(/\s+/g, ""), - publicRepos: 23, - stars: 412, - }; - - await writer.write({ type: "source_done", source: "github", data }); - return data; - } finally { - writer.releaseLock(); - } -} - -async function mergeEnrichmentProfile( - baseLead: BaseLead, - sources: PartialEnrichmentPayload -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "merging" }); - await delay(MERGE_DELAY_MS); - - const profile: EnrichedLeadProfile = { - email: baseLead.email, - name: baseLead.name, - domain: baseLead.domain, - company: sources.crm?.company ?? sources.clearbit?.company ?? null, - title: sources.crm?.title ?? null, - followers: sources.social?.followers ?? null, - location: sources.social?.location ?? null, - githubUsername: sources.github?.username ?? null, - githubStars: sources.github?.stars ?? null, - clearbitScore: sources.clearbit?.score ?? null, - segment: sources.crm?.segment ?? null, - }; - - await writer.write({ type: "done", profile }); - return profile; - } finally { - writer.releaseLock(); - } -} - -function shouldForceFailure(email: string, source: EnrichmentSource): boolean { - const localPart = email.split("@")[0] ?? ""; - const plusSection = localPart.split("+")[1] ?? ""; - const flags = plusSection.split(".").filter(Boolean); - return flags.includes(`fail-${source}`) || flags.includes(`fail${source}`); -} - -function humanizeLocalPart(localPart: string): string { - return localPart - .split(/[._-]+/) - .filter(Boolean) - .map((part) => titleCase(part)) - .join(" "); -} - -function titleCase(value: string): string { - if (!value) return ""; - return value.charAt(0).toUpperCase() + value.slice(1); -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each enrichment source lookup as a durable step -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams enrichment progress to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each enrichment source is a durable step diff --git a/docs/content/docs/cookbook/routing/detour.mdx b/docs/content/docs/cookbook/routing/detour.mdx index 58cf6c99d3..e4ac82a72f 100644 --- a/docs/content/docs/cookbook/routing/detour.mdx +++ b/docs/content/docs/cookbook/routing/detour.mdx @@ -5,13 +5,11 @@ type: guide summary: Toggle a QA review stage on/off in a deploy pipeline based on a runtime feature flag. --- -When you need to conditionally insert or skip processing stages at runtime -- for maintenance windows, A/B tests, or feature flags -- use the detour pattern. A simple boolean controls whether extra steps execute. +Use the detour pattern when you need to conditionally insert or skip processing stages at runtime — for maintenance windows, A/B tests, or feature flags. ## Pattern -The workflow uses a standard `if` check on a runtime flag to conditionally call additional step functions. No special APIs are needed -- JavaScript control flow handles the detour. - -### Simplified +This is just normal JavaScript control flow in a durable workflow. A simple `if` check on a runtime flag controls whether extra steps execute. ```typescript lineNumbers declare function runBuild(deployId: string): Promise; // @setup @@ -19,183 +17,25 @@ declare function runLint(deployId: string): Promise; // @setup declare function runQaDetour(deployId: string): Promise; // @setup declare function runDeploy(deployId: string): Promise; // @setup -export async function detourFlow( - deployId: string, - qaMode: boolean = false, -) { - "use workflow"; - - let stepCount = 0; - - stepCount += await runBuild(deployId); - stepCount += await runLint(deployId); - - // Conditional detour — QA stages only when qaMode is true - if (qaMode) { - stepCount += await runQaDetour(deployId); - } - - stepCount += await runDeploy(deployId); - - return { deployId, totalSteps: stepCount, qaMode, status: "done" }; -} -``` - -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type DetourEvent = - | { type: "pipeline_started"; deployId: string; qaMode: boolean } - | { type: "step_running"; deployId: string; step: string } - | { type: "step_complete"; deployId: string; step: string; result: string } - | { type: "detour_entered"; deployId: string } - | { type: "detour_exited"; deployId: string } - | { type: "done"; deployId: string; totalSteps: number; qaMode: boolean }; - -export interface DetourResult { - deployId: string; - totalSteps: number; - qaMode: boolean; - status: "done"; -} - -// Demo timing -const STEP_DELAY_MS = 600; -const QA_STEP_DELAY_MS = 800; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function detourFlow( - deployId: string, - qaMode: boolean = false -): Promise { +export async function detourFlow(deployId: string, qaMode: boolean = false) { "use workflow"; let stepCount = 0; - // Step 1: Emit pipeline start - await emitEvent({ type: "pipeline_started", deployId, qaMode }); - - // Step 2: Build stepCount += await runBuild(deployId); - - // Step 3: Lint stepCount += await runLint(deployId); - // Step 4: Conditional detour — QA stages only when qaMode is true if (qaMode) { stepCount += await runQaDetour(deployId); } - // Step 5: Deploy stepCount += await runDeploy(deployId); - // Step 6: Emit completion - await emitEvent({ type: "done", deployId, totalSteps: stepCount, qaMode }); - return { deployId, totalSteps: stepCount, qaMode, status: "done" }; } - -async function runBuild(deployId: string): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", deployId, step: "build" }); - await delay(STEP_DELAY_MS); - await writer.write({ - type: "step_complete", - deployId, - step: "build", - result: "Build succeeded — 42 modules compiled", - }); - return 1; - } finally { - writer.releaseLock(); - } -} - -async function runLint(deployId: string): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", deployId, step: "lint" }); - await delay(STEP_DELAY_MS); - await writer.write({ - type: "step_complete", - deployId, - step: "lint", - result: "Lint passed — 0 warnings, 0 errors", - }); - return 1; - } finally { - writer.releaseLock(); - } -} - -async function runQaDetour(deployId: string): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const qaSteps = [ - { step: "qa-review", result: "QA review approved — all acceptance criteria met" }, - { step: "staging-test", result: "Staging tests passed — 128/128 assertions green" }, - { step: "security-scan", result: "Security scan clear — no vulnerabilities found" }, - ]; - - try { - await writer.write({ type: "detour_entered", deployId }); - - for (const { step, result } of qaSteps) { - await writer.write({ type: "step_running", deployId, step }); - await delay(QA_STEP_DELAY_MS); - await writer.write({ type: "step_complete", deployId, step, result }); - } - - await writer.write({ type: "detour_exited", deployId }); - return qaSteps.length; - } finally { - writer.releaseLock(); - } -} - -async function runDeploy(deployId: string): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "step_running", deployId, step: "deploy" }); - await delay(STEP_DELAY_MS); - await writer.write({ - type: "step_complete", - deployId, - step: "deploy", - result: "Deployed to production — v2.4.1 live", - }); - return 1; - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: DetourEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} ``` ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each pipeline stage as a durable step -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams progress events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each pipeline stage is a durable step diff --git a/docs/content/docs/cookbook/routing/message-filter.mdx b/docs/content/docs/cookbook/routing/message-filter.mdx index 336664f3ef..84d26b2b41 100644 --- a/docs/content/docs/cookbook/routing/message-filter.mdx +++ b/docs/content/docs/cookbook/routing/message-filter.mdx @@ -5,28 +5,22 @@ type: guide summary: Drop low-priority log events before they hit the expensive analytics pipeline. --- -When you need to discard messages that don't meet criteria before they reach expensive downstream processing, use a message filter. Each filter stage applies a rule and passes only qualifying messages to the next stage. +Use a message filter when you need to discard messages that don't meet criteria before they reach expensive downstream processing. ## Pattern -The workflow chains multiple filter steps in sequence. Each step receives a batch of messages, applies a rule (fraud score, minimum amount, allowed region), and returns the messages that pass along with the rejected ones. Results accumulate across stages. - -### Simplified +The workflow chains multiple filter steps in sequence. Each step applies a rule (fraud score, minimum amount, allowed region) and returns the messages that pass. Results accumulate across stages. ```typescript lineNumbers -import { FatalError } from "workflow"; - type Order = { id: string; amount: number; region: string; fraudScore: number }; declare function applyFraudCheck(orders: Order[], threshold: number): Promise<{ passed: Order[]; rejected: { order: Order; reason: string }[] }>; // @setup declare function applyAmountThreshold(orders: Order[], min: number): Promise<{ passed: Order[]; rejected: { order: Order; reason: string }[] }>; // @setup declare function applyRegionFilter(orders: Order[], regions: string[]): Promise<{ passed: Order[]; rejected: { order: Order; reason: string }[] }>; // @setup -export async function orderFilter() { +export async function orderFilter(orders: Order[]) { "use workflow"; - const orders = SAMPLE_ORDERS; - const afterFraud = await applyFraudCheck(orders, 70); const afterAmount = await applyAmountThreshold(afterFraud.passed, 10); const afterRegion = await applyRegionFilter(afterAmount.passed, ["US", "EU", "CA"]); @@ -38,225 +32,7 @@ export async function orderFilter() { } ``` -### Full Implementation - -```typescript lineNumbers -"use workflow"; - -import { FatalError } from "workflow"; - -// --- Types --- -export type Order = { - id: string; - amount: number; - region: string; - fraudScore: number; - customer: string; -}; - -export type FilterVerdict = "pass" | "reject"; - -export type FilterEvent = { - type: "filter_start" | "filter_check" | "filter_result" | "filter_done"; - orderId: string; - stage?: string; - verdict?: FilterVerdict; - reason?: string; - passedOrders?: Order[]; - rejectedOrders?: { order: Order; stage: string; reason: string }[]; -}; - -export type DemoConfig = { - fraudThreshold: number; - minAmount: number; - allowedRegions: string[]; -}; - -const DEFAULT_CONFIG: DemoConfig = { - fraudThreshold: 70, - minAmount: 10, - allowedRegions: ["US", "EU", "CA"], -}; - -const SAMPLE_ORDERS: Order[] = [ - { id: "ORD-001", amount: 250, region: "US", fraudScore: 12, customer: "Alice" }, - { id: "ORD-002", amount: 5, region: "EU", fraudScore: 8, customer: "Bob" }, - { id: "ORD-003", amount: 1200, region: "CN", fraudScore: 45, customer: "Charlie" }, - { id: "ORD-004", amount: 89, region: "US", fraudScore: 92, customer: "Diana" }, - { id: "ORD-005", amount: 430, region: "CA", fraudScore: 15, customer: "Eve" }, - { id: "ORD-006", amount: 75, region: "BR", fraudScore: 55, customer: "Frank" }, - { id: "ORD-007", amount: 3, region: "EU", fraudScore: 88, customer: "Grace" }, - { id: "ORD-008", amount: 610, region: "US", fraudScore: 5, customer: "Hank" }, -]; - -// --- Entry point --- -export async function orderFilter(config?: Partial) { - "use workflow"; - const cfg = { ...DEFAULT_CONFIG, ...config }; - const orders = SAMPLE_ORDERS; - - const afterFraud = await applyFraudCheck(orders, cfg.fraudThreshold); - const afterAmount = await applyAmountThreshold(afterFraud.passed, cfg.minAmount); - const afterRegion = await applyRegionFilter(afterAmount.passed, cfg.allowedRegions); - - await emitResults( - afterRegion.passed, - [...afterFraud.rejected, ...afterAmount.rejected, ...afterRegion.rejected] - ); -} - -type StageResult = { - passed: Order[]; - rejected: { order: Order; stage: string; reason: string }[]; -}; - -// --- Step: Fraud check --- -export async function applyFraudCheck(orders: Order[], threshold: number): Promise { - "use step"; - const { getWritable } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - const passed: Order[] = []; - const rejected: StageResult["rejected"] = []; - - for (const order of orders) { - await writer.write({ - type: "filter_check", - orderId: order.id, - stage: "fraud", - }); - await new Promise((r) => setTimeout(r, 300)); - - if (order.fraudScore > threshold) { - rejected.push({ order, stage: "fraud", reason: `Fraud score ${order.fraudScore} > ${threshold}` }); - await writer.write({ - type: "filter_result", - orderId: order.id, - stage: "fraud", - verdict: "reject", - reason: `Fraud score ${order.fraudScore} exceeds threshold ${threshold}`, - }); - } else { - passed.push(order); - await writer.write({ - type: "filter_result", - orderId: order.id, - stage: "fraud", - verdict: "pass", - }); - } - } - - writer.close(); - return { passed, rejected }; -} - -// --- Step: Amount threshold --- -export async function applyAmountThreshold(orders: Order[], minAmount: number): Promise { - "use step"; - const { getWritable } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - const passed: Order[] = []; - const rejected: StageResult["rejected"] = []; - - for (const order of orders) { - await writer.write({ - type: "filter_check", - orderId: order.id, - stage: "amount", - }); - await new Promise((r) => setTimeout(r, 300)); - - if (order.amount < minAmount) { - rejected.push({ order, stage: "amount", reason: `Amount $${order.amount} < $${minAmount}` }); - await writer.write({ - type: "filter_result", - orderId: order.id, - stage: "amount", - verdict: "reject", - reason: `Order amount $${order.amount} below minimum $${minAmount}`, - }); - } else { - passed.push(order); - await writer.write({ - type: "filter_result", - orderId: order.id, - stage: "amount", - verdict: "pass", - }); - } - } - - writer.close(); - return { passed, rejected }; -} - -// --- Step: Region filter --- -export async function applyRegionFilter(orders: Order[], allowedRegions: string[]): Promise { - "use step"; - const { getWritable } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - const passed: Order[] = []; - const rejected: StageResult["rejected"] = []; - - for (const order of orders) { - await writer.write({ - type: "filter_check", - orderId: order.id, - stage: "region", - }); - await new Promise((r) => setTimeout(r, 300)); - - if (!allowedRegions.includes(order.region)) { - rejected.push({ order, stage: "region", reason: `Region ${order.region} not in ${allowedRegions.join(",")}` }); - await writer.write({ - type: "filter_result", - orderId: order.id, - stage: "region", - verdict: "reject", - reason: `Region "${order.region}" not in allowed regions [${allowedRegions.join(", ")}]`, - }); - } else { - passed.push(order); - await writer.write({ - type: "filter_result", - orderId: order.id, - stage: "region", - verdict: "pass", - }); - } - } - - writer.close(); - return { passed, rejected }; -} - -// --- Step: Emit final results --- -export async function emitResults( - passedOrders: Order[], - rejectedOrders: { order: Order; stage: string; reason: string }[] -) { - "use step"; - const { getWritable } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - - await writer.write({ - type: "filter_done", - orderId: "summary", - passedOrders, - rejectedOrders, - }); - - writer.close(); -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each filter stage as a durable step -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — available for halting on critical filter failures -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams per-order filter verdicts to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each filter stage is a durable step diff --git a/docs/content/docs/cookbook/routing/message-translator.mdx b/docs/content/docs/cookbook/routing/message-translator.mdx index 1a5abe704b..ce689fc4bf 100644 --- a/docs/content/docs/cookbook/routing/message-translator.mdx +++ b/docs/content/docs/cookbook/routing/message-translator.mdx @@ -5,13 +5,11 @@ type: guide summary: Convert partner XML orders into your internal JSON schema at the API boundary. --- -When your system receives messages in formats that don't match your internal schema -- XML from one partner, CSV from another, legacy JSON from a third -- use a message translator at the boundary. The workflow detects the format, maps fields to a canonical shape, validates, and delivers. +Use a message translator when your system receives messages in formats that don't match your internal schema — XML from one partner, CSV from another, legacy JSON from a third. ## Pattern -The workflow chains step functions sequentially: detect the source format, translate fields from source to canonical names, validate the output, and deliver to the target system. Each step is independently retriable. - -### Simplified +The workflow chains step functions sequentially: detect the source format, translate fields to canonical names, validate the output, and deliver. Each step is independently retriable. ```typescript lineNumbers type SourceFormat = "xml" | "csv" | "legacy-json"; @@ -21,233 +19,19 @@ declare function translateFields(messageId: string, format: SourceFormat): Promi declare function validateOutput(messageId: string, fieldCount: number): Promise; // @setup declare function deliverMessage(messageId: string): Promise; // @setup -export async function messageTranslatorFlow( - messageId: string, - sourceFormat: SourceFormat = "xml", -) { - "use workflow"; - - const rawMessage = SAMPLE_MESSAGES[sourceFormat]; - - const detected = await detectSourceFormat(messageId, rawMessage); - const fieldsTranslated = await translateFields(messageId, detected.sourceFormat); - await validateOutput(messageId, fieldsTranslated); - await deliverMessage(messageId); - - return { messageId, sourceFormat: detected.sourceFormat, fieldsTranslated }; -} -``` - -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type SourceFormat = "xml" | "csv" | "legacy-json"; - -export type TranslatorEvent = - | { type: "message_received"; messageId: string; sourceFormat: SourceFormat } - | { type: "detecting_format"; messageId: string } - | { type: "format_detected"; messageId: string; sourceFormat: SourceFormat; confidence: number } - | { type: "translating"; messageId: string; sourceFormat: SourceFormat; step: string } - | { type: "field_mapped"; messageId: string; sourceField: string; canonicalField: string; value: string } - | { type: "validating"; messageId: string } - | { type: "validation_passed"; messageId: string; fieldCount: number } - | { type: "delivering"; messageId: string; destination: string } - | { type: "done"; messageId: string; sourceFormat: SourceFormat; fieldsTranslated: number }; - -export interface MessageTranslatorResult { - messageId: string; - sourceFormat: SourceFormat; - fieldsTranslated: number; -} - -// Simulated raw messages from three legacy systems -const SAMPLE_MESSAGES: Record = { - xml: `ORD-5501ACME Corp2499.00USD2026-03-14`, - csv: `order_id,customer_name,total_amount,currency,order_date\nORD-5502,Globex Inc,1875.50,EUR,2026-03-14`, - "legacy-json": `{"oid":"ORD-5503","c_name":"Initech LLC","total":"3200.00","cur":"GBP","dt":"20260314"}`, -}; - -// Field mappings per source format → canonical fields -const FIELD_MAPS: Record> = { - xml: [ - { source: "id", canonical: "orderId" }, - { source: "cust", canonical: "customerName" }, - { source: "amt", canonical: "amount" }, - { source: "curr", canonical: "currency" }, - { source: "date", canonical: "orderDate" }, - ], - csv: [ - { source: "order_id", canonical: "orderId" }, - { source: "customer_name", canonical: "customerName" }, - { source: "total_amount", canonical: "amount" }, - { source: "currency", canonical: "currency" }, - { source: "order_date", canonical: "orderDate" }, - ], - "legacy-json": [ - { source: "oid", canonical: "orderId" }, - { source: "c_name", canonical: "customerName" }, - { source: "total", canonical: "amount" }, - { source: "cur", canonical: "currency" }, - { source: "dt", canonical: "orderDate" }, - ], -}; - -// Simulated values extracted from each format -const EXTRACTED_VALUES: Record = { - xml: ["ORD-5501", "ACME Corp", "2499.00", "USD", "2026-03-14"], - csv: ["ORD-5502", "Globex Inc", "1875.50", "EUR", "2026-03-14"], - "legacy-json": ["ORD-5503", "Initech LLC", "3200.00", "GBP", "2026-03-14"], -}; - -// Demo timing -const DETECT_DELAY_MS = 600; -const FIELD_MAP_DELAY_MS = 400; -const VALIDATE_DELAY_MS = 500; -const DELIVER_DELAY_MS = 600; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function detectFormat(rawMessage: string): { sourceFormat: SourceFormat; confidence: number } { - if (rawMessage.trimStart().startsWith("<")) return { sourceFormat: "xml", confidence: 0.97 }; - if (rawMessage.includes(",") && rawMessage.includes("\n")) return { sourceFormat: "csv", confidence: 0.94 }; - if (rawMessage.trimStart().startsWith("{") && rawMessage.includes("oid")) return { sourceFormat: "legacy-json", confidence: 0.92 }; - return { sourceFormat: "csv", confidence: 0.5 }; -} - -export async function messageTranslatorFlow( - messageId: string, - sourceFormat: SourceFormat = "xml" -): Promise { +export async function messageTranslatorFlow(messageId: string, rawMessage: string) { "use workflow"; - const rawMessage = SAMPLE_MESSAGES[sourceFormat]; - - // Step 1: Receive message - await emitEvent({ type: "message_received", messageId, sourceFormat }); - - // Step 2: Detect format const detected = await detectSourceFormat(messageId, rawMessage); - - // Step 3: Translate fields const fieldsTranslated = await translateFields(messageId, detected.sourceFormat); - - // Step 4: Validate canonical output await validateOutput(messageId, fieldsTranslated); - - // Step 5: Deliver to target system await deliverMessage(messageId); - // Step 6: Done - await emitEvent({ type: "done", messageId, sourceFormat: detected.sourceFormat, fieldsTranslated }); - return { messageId, sourceFormat: detected.sourceFormat, fieldsTranslated }; } - -async function detectSourceFormat( - messageId: string, - rawMessage: string -): Promise<{ sourceFormat: SourceFormat; confidence: number }> { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "detecting_format", messageId }); - await delay(DETECT_DELAY_MS); - - const result = detectFormat(rawMessage); - await writer.write({ - type: "format_detected", - messageId, - sourceFormat: result.sourceFormat, - confidence: result.confidence, - }); - - return result; - } finally { - writer.releaseLock(); - } -} - -async function translateFields( - messageId: string, - sourceFormat: SourceFormat -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const fieldMap = FIELD_MAPS[sourceFormat]; - const values = EXTRACTED_VALUES[sourceFormat]; - - try { - await writer.write({ type: "translating", messageId, sourceFormat, step: "Parsing raw message" }); - await delay(FIELD_MAP_DELAY_MS); - - for (let i = 0; i < fieldMap.length; i++) { - const mapping = fieldMap[i]; - await writer.write({ - type: "field_mapped", - messageId, - sourceField: mapping.source, - canonicalField: mapping.canonical, - value: values[i], - }); - await delay(FIELD_MAP_DELAY_MS); - } - - return fieldMap.length; - } finally { - writer.releaseLock(); - } -} - -async function validateOutput( - messageId: string, - fieldCount: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "validating", messageId }); - await delay(VALIDATE_DELAY_MS); - - await writer.write({ type: "validation_passed", messageId, fieldCount }); - } finally { - writer.releaseLock(); - } -} - -async function deliverMessage( - messageId: string -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "delivering", messageId, destination: "canonical-orders-api" }); - await delay(DELIVER_DELAY_MS); - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: TranslatorEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} ``` ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each translation stage as a durable step -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams field mapping progress to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each translation stage is a durable step diff --git a/docs/content/docs/cookbook/routing/normalizer.mdx b/docs/content/docs/cookbook/routing/normalizer.mdx index 49cb5f5781..ee1087b264 100644 --- a/docs/content/docs/cookbook/routing/normalizer.mdx +++ b/docs/content/docs/cookbook/routing/normalizer.mdx @@ -5,13 +5,11 @@ type: guide summary: Accept orders as XML, CSV, or legacy JSON and transform them into a single canonical shape. --- -When your system ingests messages from multiple sources in different formats, use a normalizer to convert them all into a single canonical shape before further processing. This decouples downstream logic from source-specific quirks. +Use a normalizer when your system ingests messages from multiple sources in different formats. It converts them all into a single canonical shape before further processing. ## Pattern -The workflow detects the format of each incoming message, parses it into a canonical structure, and emits the normalized results. Each phase is a separate step function, so format detection and parsing are independently retriable. - -### Simplified +The workflow detects the format of each incoming message, parses it into a canonical structure, and emits the normalized results. Each phase is a separate step, so format detection and parsing are independently retriable. ```typescript lineNumbers import { FatalError } from "workflow"; @@ -23,14 +21,13 @@ type CanonicalOrder = { orderId: string; customer: string; amount: number; curre declare function detectFormats(messages: RawMessage[]): Promise; // @setup declare function parseToCanonical(messages: RawMessage[]): Promise<{ successful: CanonicalOrder[]; failed: { messageId: string; error: string }[] }>; // @setup -export async function normalizer(config?: { strictMode?: boolean }) { +export async function normalizer(messages: RawMessage[], strictMode = false) { "use workflow"; - const messages = SAMPLE_MESSAGES; const detected = await detectFormats(messages); const parsed = await parseToCanonical(detected); - if (config?.strictMode && parsed.failed.length > 0) { + if (strictMode && parsed.failed.length > 0) { throw new FatalError(`${parsed.failed.length} messages failed normalization`); } @@ -38,275 +35,8 @@ export async function normalizer(config?: { strictMode?: boolean }) { } ``` -### Full Implementation - -```typescript lineNumbers -"use workflow"; - - -// --- Types --- -export type RawFormat = "xml" | "csv" | "legacy-json"; - -export type RawMessage = { - id: string; - format: RawFormat; - payload: string; -}; - -export type CanonicalOrder = { - orderId: string; - customer: string; - amount: number; - currency: string; - sourceFormat: RawFormat; -}; - -export type NormalizeEvent = { - type: - | "normalize_detect" - | "normalize_parse" - | "normalize_result" - | "normalize_done"; - messageId: string; - detectedFormat?: RawFormat; - canonical?: CanonicalOrder; - error?: string; - results?: { - successful: CanonicalOrder[]; - failed: { messageId: string; error: string }[]; - }; -}; - -export type DemoConfig = { - strictMode: boolean; // if true, fail on unknown formats; if false, skip them -}; - -const DEFAULT_CONFIG: DemoConfig = { - strictMode: false, -}; - -const SAMPLE_MESSAGES: RawMessage[] = [ - { - id: "MSG-001", - format: "xml", - payload: - 'Alice250.00USD', - }, - { - id: "MSG-002", - format: "csv", - payload: "C-202,Bob,89.50,EUR", - }, - { - id: "MSG-003", - format: "legacy-json", - payload: JSON.stringify({ - order_num: "L-303", - cust_name: "Charlie", - total: 1200, - cur: "GBP", - }), - }, - { - id: "MSG-004", - format: "xml", - payload: - 'Diana430.00USD', - }, - { - id: "MSG-005", - format: "csv", - payload: "C-505,Eve,75.25,CAD", - }, - { - id: "MSG-006", - format: "legacy-json", - payload: JSON.stringify({ - order_num: "L-606", - cust_name: "Frank", - total: 610, - cur: "JPY", - }), - }, -]; - -// --- Entry point --- -export async function normalizer(config?: Partial) { - "use workflow"; - const cfg = { ...DEFAULT_CONFIG, ...config }; - const messages = SAMPLE_MESSAGES; - - const detected = await detectFormats(messages); - const parsed = await parseToCanonical(detected); - await emitNormalized(parsed.successful, parsed.failed, cfg.strictMode); -} - -type DetectedMessage = RawMessage & { detectedFormat: RawFormat }; - -// --- Step: Detect format --- -export async function detectFormats( - messages: RawMessage[] -): Promise { - "use step"; - const { getWritable } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - const results: DetectedMessage[] = []; - - for (const msg of messages) { - await new Promise((r) => setTimeout(r, 250)); - - // In a real system, format detection would inspect the payload - // Here we trust the declared format but still emit detection events - const detectedFormat = msg.format; - - await writer.write({ - type: "normalize_detect", - messageId: msg.id, - detectedFormat, - }); - - results.push({ ...msg, detectedFormat }); - } - - writer.close(); - return results; -} - -type ParseResult = { - successful: CanonicalOrder[]; - failed: { messageId: string; error: string }[]; -}; - -// --- Step: Parse to canonical --- -export async function parseToCanonical( - messages: DetectedMessage[] -): Promise { - "use step"; - const { getWritable } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - const successful: CanonicalOrder[] = []; - const failed: ParseResult["failed"] = []; - - for (const msg of messages) { - await new Promise((r) => setTimeout(r, 300)); - - try { - const canonical = parseMessage(msg); - successful.push(canonical); - - await writer.write({ - type: "normalize_parse", - messageId: msg.id, - canonical, - }); - } catch (err) { - const error = err instanceof Error ? err.message : String(err); - failed.push({ messageId: msg.id, error }); - - await writer.write({ - type: "normalize_result", - messageId: msg.id, - error, - }); - } - } - - writer.close(); - return { successful, failed }; -} - -// --- Step: Emit normalized results --- -export async function emitNormalized( - successful: CanonicalOrder[], - failed: { messageId: string; error: string }[], - strictMode: boolean -) { - "use step"; - const { getWritable, FatalError: Fatal } = await import("workflow"); - const writable = getWritable(); - const writer = writable.getWriter(); - - if (strictMode && failed.length > 0) { - await writer.write({ - type: "normalize_done", - messageId: "summary", - error: `Strict mode: ${failed.length} message(s) failed to normalize`, - results: { successful, failed }, - }); - writer.close(); - throw new Fatal( - `Strict mode: ${failed.length} messages failed normalization` - ); - } - - await writer.write({ - type: "normalize_done", - messageId: "summary", - results: { successful, failed }, - }); - - writer.close(); -} - -// --- Format parsers --- -function parseMessage(msg: DetectedMessage): CanonicalOrder { - switch (msg.detectedFormat) { - case "xml": - return parseXml(msg); - case "csv": - return parseCsv(msg); - case "legacy-json": - return parseLegacyJson(msg); - default: - throw new Error(`Unknown format: ${msg.detectedFormat}`); - } -} - -function parseXml(msg: DetectedMessage): CanonicalOrder { - const id = msg.payload.match(/id="([^"]+)"/)?.[1] ?? "unknown"; - const customer = - msg.payload.match(/([^<]+)<\/customer>/)?.[1] ?? "unknown"; - const amount = parseFloat( - msg.payload.match(/([^<]+)<\/amount>/)?.[1] ?? "0" - ); - const currency = - msg.payload.match(/([^<]+)<\/currency>/)?.[1] ?? "USD"; - return { orderId: id, customer, amount, currency, sourceFormat: "xml" }; -} - -function parseCsv(msg: DetectedMessage): CanonicalOrder { - const [orderId, customer, amountStr, currency] = msg.payload.split(","); - return { - orderId: orderId ?? "unknown", - customer: customer ?? "unknown", - amount: parseFloat(amountStr ?? "0"), - currency: currency ?? "USD", - sourceFormat: "csv", - }; -} - -function parseLegacyJson(msg: DetectedMessage): CanonicalOrder { - const data = JSON.parse(msg.payload) as { - order_num: string; - cust_name: string; - total: number; - cur: string; - }; - return { - orderId: data.order_num, - customer: data.cust_name, - amount: data.total, - currency: data.cur, - sourceFormat: "legacy-json", - }; -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks detection, parsing, and emission as durable steps -- [`FatalError`](/docs/api-reference/workflow/fatal-error) — stops retries when strict mode validation fails -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams normalization progress to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — detection and parsing are durable steps +- [`FatalError`](/docs/api-reference/workflow/fatal-error) — halts retries when strict validation fails diff --git a/docs/content/docs/cookbook/routing/routing-slip.mdx b/docs/content/docs/cookbook/routing/routing-slip.mdx index 2a3198f5d7..ceeb8a438f 100644 --- a/docs/content/docs/cookbook/routing/routing-slip.mdx +++ b/docs/content/docs/cookbook/routing/routing-slip.mdx @@ -5,13 +5,11 @@ type: guide summary: Execute a flexible sequence of processing stages defined per-request in a routing slip. --- -When each request needs a different sequence of processing stages -- and that sequence is determined at request time rather than hardcoded -- use a routing slip. The caller passes an ordered list of stages, and the workflow iterates through them. +Use a routing slip when each request needs a different sequence of processing stages determined at request time rather than hardcoded. ## Pattern -The workflow receives a slip (an array of stage names) alongside the payload. It loops through the slip, calling a step function for each stage. This lets different requests follow completely different processing paths without changing the workflow code. - -### Simplified +The caller passes an ordered list of stage names alongside the payload. The workflow loops through the slip, calling a step function for each stage. Different requests can follow completely different paths without changing the workflow code. ```typescript lineNumbers type SlipStage = "inventory" | "payment" | "packaging" | "shipping" | "notification"; @@ -32,118 +30,7 @@ export async function routingSlip(orderId: string, slip: SlipStage[]) { } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable } from "workflow"; - -export type SlipStage = "inventory" | "payment" | "packaging" | "shipping" | "notification"; - -export type SlipEvent = - | { type: "stage_start"; stage: SlipStage; index: number } - | { type: "stage_complete"; stage: SlipStage; index: number; message: string; durationMs: number } - | { type: "done"; totalMs: number; stageCount: number }; - -export type StageResult = { - stage: SlipStage; - status: "completed"; - message: string; - durationMs: number; -}; - -export type RoutingSlipResult = { - status: "completed"; - orderId: string; - stages: StageResult[]; - totalMs: number; -}; - -// Demo: per-stage delays so the UI shows staggered progression -const STAGE_DELAY_MS: Record = { - inventory: 600, - payment: 750, - packaging: 800, - shipping: 900, - notification: 650, -}; - -export async function routingSlip( - orderId: string, - slip: SlipStage[] -): Promise { - "use workflow"; - - const results: StageResult[] = []; - const startMs = Date.now(); - - for (let i = 0; i < slip.length; i++) { - const result = await processStage(orderId, slip[i], i); - results.push(result); - } - - const totalMs = Date.now() - startMs; - await emitDone(totalMs, results.length); - - return { - status: "completed", - orderId, - stages: results, - totalMs, - }; -} - -async function processStage( - orderId: string, - stage: SlipStage, - index: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - const startMs = Date.now(); - - try { - await writer.write({ type: "stage_start", stage, index }); - - // Demo: simulate processing time for visualization - await new Promise((r) => setTimeout(r, STAGE_DELAY_MS[stage])); - - const messages: Record = { - inventory: `Verified stock for order ${orderId}`, - payment: `Payment processed for order ${orderId}`, - packaging: `Package prepared for order ${orderId}`, - shipping: `Shipment dispatched for order ${orderId}`, - notification: `Customer notified for order ${orderId}`, - }; - - const durationMs = Date.now() - startMs; - await writer.write({ type: "stage_complete", stage, index, message: messages[stage], durationMs }); - - return { - stage, - status: "completed", - message: messages[stage], - durationMs, - }; - } finally { - writer.releaseLock(); - } -} - -async function emitDone(totalMs: number, stageCount: number): Promise { - "use step"; - - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "done", totalMs, stageCount }); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/api-reference/directives/use-workflow) — marks the orchestrator function -- [`"use step"`](/docs/api-reference/directives/use-step) — marks each stage processor as a durable step -- [`getWritable()`](/docs/api-reference/step/get-writable) — streams stage progress events to the client +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function +- [`"use step"`](/docs/api-reference/workflow/use-step) — each stage processor is a durable step diff --git a/docs/content/docs/cookbook/webhooks/async-request-reply.mdx b/docs/content/docs/cookbook/webhooks/async-request-reply.mdx index aa3fabb114..edeebaeafa 100644 --- a/docs/content/docs/cookbook/webhooks/async-request-reply.mdx +++ b/docs/content/docs/cookbook/webhooks/async-request-reply.mdx @@ -5,13 +5,11 @@ type: guide summary: Submit a request to a vendor API and resume when the webhook callback arrives. --- -Submit a request to a vendor API and resume when the webhook callback arrives. Use this pattern when the external service responds asynchronously via a callback URL instead of returning a result inline. +Submit a request to a vendor API and resume when the webhook callback arrives. Use this when the external service responds asynchronously via a callback URL instead of returning a result inline. ## Pattern -Create a webhook, pass its token to the vendor, then race the incoming callback against a heartbeat-driven timeout. The workflow suspends with zero compute cost until the callback arrives or the deadline expires. - -### Simplified +Create a webhook, pass its token to the vendor, then race the incoming callback against a timeout. The workflow suspends with zero compute cost until the callback arrives or the deadline expires. ```typescript lineNumbers import { createWebhook, sleep, FatalError } from "workflow"; @@ -27,7 +25,6 @@ export async function asyncRequestReply(documentId: string) { await submitVerification(documentId, correlationId, webhook.token); - // Race: wait for vendor callback OR timeout const result = await Promise.race([ (async () => { for await (const request of webhook) { @@ -47,217 +44,9 @@ export async function asyncRequestReply(documentId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { - createWebhook, - getWritable, - sleep, - FatalError, - type RequestWithResponse, - type Webhook, -} from "workflow"; - -// ── Event types (discriminated union) ──────────────────────────────────── - -export type CallbackPayload = - | { status: "approved"; details: string } - | { status: "rejected"; reason: string }; - -export type AsyncReplyEvent = - | { type: "submitted"; documentId: string; correlationId: string; webhookToken: string } - | { type: "waiting"; correlationId: string; timeoutMs: number } - | { type: "heartbeat"; elapsed: number; timeoutMs: number } - | { type: "callback_received"; correlationId: string; payload: CallbackPayload } - | { type: "duplicate_callback_ignored"; correlationId: string } - | { type: "timed_out"; correlationId: string } - | { type: "finalized"; outcome: "verified" | "rejected" | "timed_out"; details: string } - | { type: "done"; outcome: "verified" | "rejected" | "timed_out" }; - -const TIMEOUT_MS = 30_000; // 30 seconds for demo -const HEARTBEAT_MS = 3_000; // heartbeat every 3 seconds - -// ── Main workflow ──────────────────────────────────────────────────────── - -export async function asyncRequestReply(documentId: string) { - "use workflow"; - - const correlationId = `doc:${documentId}`; - - // Phase 1 — Submit verification and register webhook - const webhook = createWebhook({ respondWith: "manual" }); - await submitVerification(documentId, correlationId, webhook.token); - - // Phase 2 — Wait for vendor callback or timeout - const result = await awaitCallbackOrTimeout(correlationId, webhook); - - // Phase 3 — Finalize and emit done - await finalizeResult(result); - - return { documentId, correlationId, outcome: result.outcome }; -} - -// ── Phase 1: Submit verification ───────────────────────────────────────── - -async function submitVerification( - documentId: string, - correlationId: string, - webhookToken: string -) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ type: "submitted", documentId, correlationId, webhookToken }); - } finally { - writer.releaseLock(); - } -} - -// ── Phase 2: Await callback or timeout ─────────────────────────────────── - -type WaitResult = - | { outcome: "verified"; details: string } - | { outcome: "rejected"; details: string } - | { outcome: "timed_out"; details: string }; - -async function awaitCallbackOrTimeout( - correlationId: string, - webhook: Webhook -): Promise { - await emit({ type: "waiting", correlationId, timeoutMs: TIMEOUT_MS }); - - // Shared flag so the losing branch stops emitting after the race settles - let settled = false; - - const result = await Promise.race([ - // Branch A: listen for vendor callbacks via webhook - (async (): Promise => { - let first = true; - - for await (const request of webhook) { - const payload = await processCallback(request, correlationId, first); - - if (first) { - first = false; - settled = true; - return payload.status === "approved" - ? { outcome: "verified" as const, details: payload.details } - : { outcome: "rejected" as const, details: payload.reason }; - } - // After first callback, loop continues to catch duplicates - // until the timeout branch wins the race - } - - throw new FatalError("Webhook stream closed without receiving a callback"); - })(), - - // Branch B: heartbeat loop that eventually times out - (async (): Promise => { - let elapsed = 0; - while (elapsed < TIMEOUT_MS && !settled) { - await sleep(new Date(Date.now() + HEARTBEAT_MS)); - elapsed += HEARTBEAT_MS; - if (settled) break; - if (elapsed < TIMEOUT_MS) { - await emit({ type: "heartbeat", elapsed, timeoutMs: TIMEOUT_MS }); - } - } - if (settled) { - return { outcome: "timed_out", details: "Cancelled — callback already received" }; - } - settled = true; - await emit({ type: "timed_out", correlationId }); - return { outcome: "timed_out", details: `No vendor response within ${TIMEOUT_MS / 1000}s` }; - })(), - ]); - - return result; -} - -// ── Step: process a single webhook callback ────────────────────────────── - -async function processCallback( - request: RequestWithResponse, - correlationId: string, - isFirst: boolean -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - const body = await request.json().catch(() => ({})); - const payload = parseCallbackPayload(body); - - if (isFirst) { - await request.respondWith(Response.json({ ack: true, status: payload.status })); - await writer.write({ type: "callback_received", correlationId, payload }); - } else { - await request.respondWith( - Response.json({ ack: false, duplicate: true }, { status: 409 }) - ); - await writer.write({ type: "duplicate_callback_ignored", correlationId }); - } - - return payload; - } finally { - writer.releaseLock(); - } -} - -// ── Phase 3: Finalize result ───────────────────────────────────────────── - -async function finalizeResult(result: WaitResult) { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write({ - type: "finalized", - outcome: result.outcome, - details: result.details, - }); - await writer.write({ type: "done", outcome: result.outcome }); - } finally { - writer.releaseLock(); - } -} - -// ── Helpers ────────────────────────────────────────────────────────────── - -function parseCallbackPayload(body: Record): CallbackPayload { - if (body?.status === "approved") { - return { - status: "approved", - details: typeof body.details === "string" ? body.details : "Document verified", - }; - } - return { - status: "rejected", - reason: typeof body.reason === "string" ? body.reason : "Verification failed", - }; -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: AsyncReplyEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`createWebhook()`](/docs/api-reference/workflow/create-webhook) — creates an HTTP endpoint the workflow can await - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts - [`FatalError`](/docs/api-reference/workflow/fatal-error) — prevents automatic retries on permanent failures -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/claim-check.mdx b/docs/content/docs/cookbook/webhooks/claim-check.mdx index 0390f936c3..2520c3b1dd 100644 --- a/docs/content/docs/cookbook/webhooks/claim-check.mdx +++ b/docs/content/docs/cookbook/webhooks/claim-check.mdx @@ -5,14 +5,12 @@ type: guide summary: Accept a lightweight token instead of passing a 50 MB file through every workflow step. --- -Accept a lightweight token instead of passing a 50 MB file through every workflow step. Use this pattern when payloads are too large to serialize into the event log efficiently. +Accept a lightweight token instead of passing a 50 MB file through every workflow step. Use this when payloads are too large to serialize into the event log efficiently. ## Pattern The workflow receives a small identifier (the "claim check") instead of the full payload. A hook suspends execution until the external system signals that the blob is ready, then a step fetches and processes it using the token. -### Simplified - ```typescript lineNumbers import { defineHook } from "workflow"; @@ -35,73 +33,7 @@ export async function claimCheckImport(importId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable } from "workflow"; - -// Typed events streamed to the UI via getWritable() -export type ClaimCheckEvent = - | { type: "start"; importId: string; hookToken: string } - | { type: "waiting"; importId: string; hookToken: string } - | { type: "upload_received"; importId: string; blobToken: string } - | { type: "processing"; importId: string; blobToken: string } - | { type: "completed"; importId: string; blobToken: string } - | { type: "done"; importId: string; status: "indexed" }; - -export const blobReady = defineHook<{ blobToken: string }>(); - -export async function claimCheckImport(importId: string) { - "use workflow"; - - const hookToken = `upload:${importId}`; - - await emit({ type: "start", importId, hookToken }); - await emit({ type: "waiting", importId, hookToken }); - - // Claim-check: only a token enters the workflow (not a 50MB payload). - const { blobToken } = await blobReady.create({ token: hookToken }); - - await emit({ type: "upload_received", importId, blobToken }); - - await emit({ type: "processing", importId, blobToken }); - await processBlob(blobToken); - - await emit({ type: "completed", importId, blobToken }); - await emit({ type: "done", importId, status: "indexed" }); - - return { importId, blobToken, status: "indexed" as const }; -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -async function processBlob(blobToken: string) { - "use step"; - // Simulate fetching + indexing a large blob by its token - await delay(700); - console.info("[claim-check] process_blob", { blobToken }); -} - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/event-gateway.mdx b/docs/content/docs/cookbook/webhooks/event-gateway.mdx index 80bdda3533..0dadd3a740 100644 --- a/docs/content/docs/cookbook/webhooks/event-gateway.mdx +++ b/docs/content/docs/cookbook/webhooks/event-gateway.mdx @@ -5,13 +5,11 @@ type: guide summary: Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. --- -Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. Use this pattern when a workflow must collect multiple independent signals before proceeding. +Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. Use this when a workflow must collect multiple independent signals before proceeding. ## Pattern -Create one hook per expected signal, then race `Promise.all` (all signals arrived) against `sleep` (deadline expired). If all signals arrive in time, continue to the next phase. Otherwise, report which signals are missing. - -### Simplified +Create one hook per expected signal, then race `Promise.all` (all signals arrived) against `sleep` (deadline expired). If all signals arrive in time, continue. Otherwise, report which signals are missing. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; @@ -43,104 +41,8 @@ export async function eventGateway(orderId: string, timeoutMs: number = 10_000) } ``` -### Full Implementation - -```typescript lineNumbers -import { defineHook, getWritable, sleep } from "workflow"; - -// Typed events streamed to the UI via getWritable() -export type GatewayEvent = - | { type: "waiting"; orderId: string; tokens: Record; timeoutMs: number } - | { type: "signal_received"; orderId: string; signal: string; token: string } - | { type: "all_received"; orderId: string } - | { type: "shipping"; orderId: string } - | { type: "shipped"; orderId: string } - | { type: "timeout"; orderId: string; missing: string[] } - | { type: "done"; orderId: string; status: "shipped" | "timeout" }; - -export const orderSignal = defineHook<{ ok: true }>(); - -const SIGNAL_KINDS = ["payment", "inventory", "fraud"] as const; -export type SignalKind = (typeof SIGNAL_KINDS)[number]; - -export async function eventGateway(orderId: string, timeoutMs: number = 10_000) { - "use workflow"; - - const tokens: Record = {}; - const hooks = SIGNAL_KINDS.map((kind) => { - const token = `${kind}:${orderId}`; - tokens[kind] = token; - return { kind, hook: orderSignal.create({ token }), token }; - }); - - await emit({ - type: "waiting", - orderId, - tokens, - timeoutMs, - }); - - // Track which signals have been received - const received = new Set(); - - const signalPromises = hooks.map(({ kind, hook, token }) => - hook.then(() => { - received.add(kind); - return { kind, token }; - }) - ); - - const outcome = await Promise.race([ - Promise.all(signalPromises).then((results) => ({ type: "ready" as const, results })), - sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const, results: [] as { kind: string; token: string }[] })), - ]); - - // Emit signal_received events for all signals that arrived - for (const { kind, token } of outcome.results) { - await emit({ type: "signal_received", orderId, signal: kind, token }); - } - - if (outcome.type === "timeout") { - const missing = SIGNAL_KINDS.filter((k) => !received.has(k)); - await emit({ type: "timeout", orderId, missing }); - await emit({ type: "done", orderId, status: "timeout" }); - return { orderId, status: "timeout" as const }; - } - - await emit({ type: "all_received", orderId }); - await emit({ type: "shipping", orderId }); - await shipOrder(orderId); - await emit({ type: "shipped", orderId }); - await emit({ type: "done", orderId, status: "shipped" }); - return { orderId, status: "shipped" as const }; -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -async function shipOrder(orderId: string) { - "use step"; - // Simulate shipping API call - await new Promise((resolve) => setTimeout(resolve, 600)); - console.info("[event-gateway] ship_order", { orderId }); -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/request-reply.mdx b/docs/content/docs/cookbook/webhooks/request-reply.mdx index 893b6bdaa5..2dea261f36 100644 --- a/docs/content/docs/cookbook/webhooks/request-reply.mdx +++ b/docs/content/docs/cookbook/webhooks/request-reply.mdx @@ -9,9 +9,7 @@ Send a request to a service, wait for a correlated reply with a deadline, and re ## Pattern -For each downstream service, send the request and wait up to a deadline for the reply. If the deadline expires, retry up to a configurable maximum. The workflow orchestrates the sequential fan-out while each step handles the actual RPC call. - -### Simplified +For each downstream service, send the request and wait up to a deadline for the reply. If the deadline expires, retry up to a configurable maximum. ```typescript lineNumbers import { sleep } from "workflow"; @@ -39,175 +37,7 @@ export async function requestReplyFlow( } ``` -### Full Implementation - -```typescript lineNumbers -import { getWritable, sleep } from "workflow"; - -export type RequestReplyEvent = - | { type: "request_sent"; requestId: string; service: string; payload: string } - | { type: "waiting_for_reply"; requestId: string; service: string; deadline: string } - | { type: "reply_received"; requestId: string; service: string; response: string; latencyMs: number } - | { type: "timeout"; requestId: string; service: string; attempt: number } - | { type: "retrying"; requestId: string; service: string; attempt: number; maxAttempts: number } - | { type: "all_replies_collected"; requestId: string; results: Array<{ service: string; response: string }> } - | { type: "failed"; requestId: string; service: string; reason: string } - | { type: "done"; requestId: string; totalServices: number; successCount: number; failCount: number }; - -export interface RequestReplyResult { - requestId: string; - results: Array<{ service: string; response: string | null; success: boolean }>; -} - -// Demo timing -const REQUEST_DELAY_MS = 400; -const REPLY_BASE_DELAY_MS = 600; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -// Simulated service responses — in production these would be real RPC calls -const SERVICE_RESPONSES: Record = { - "user-service": { response: "user_profile={name:'Ada',plan:'pro'}", latencyMs: 350, failOnFirstAttempt: false }, - "inventory-service": { response: "stock={sku:'WF-100',qty:42}", latencyMs: 1200, failOnFirstAttempt: true }, - "payment-service": { response: "payment={method:'card',last4:'4242'}", latencyMs: 500, failOnFirstAttempt: false }, -}; - -export async function requestReplyFlow( - requestId: string, - services: string[] = ["user-service", "inventory-service", "payment-service"], - timeoutMs: number = 800, - maxAttempts: number = 2 -): Promise { - "use workflow"; - - const results: Array<{ service: string; response: string | null; success: boolean }> = []; - - for (const service of services) { - const result = await sendRequest(requestId, service, `lookup:${requestId}`, timeoutMs, maxAttempts); - results.push(result); - } - - const successResults = results - .filter((r) => r.success && r.response) - .map((r) => ({ service: r.service, response: r.response! })); - - await emitEvent({ - type: "all_replies_collected", - requestId, - results: successResults, - }); - - await emitEvent({ - type: "done", - requestId, - totalServices: services.length, - successCount: results.filter((r) => r.success).length, - failCount: results.filter((r) => !r.success).length, - }); - - return { requestId, results }; -} - -async function sendRequest( - requestId: string, - service: string, - payload: string, - timeoutMs: number, - maxAttempts: number -): Promise<{ service: string; response: string | null; success: boolean }> { - "use step"; - - const writer = getWritable().getWriter(); - const serviceConfig = SERVICE_RESPONSES[service] ?? { - response: "ok", - latencyMs: 400, - failOnFirstAttempt: false, - }; - - try { - for (let attempt = 1; attempt <= maxAttempts; attempt++) { - await writer.write({ - type: "request_sent", - requestId, - service, - payload, - }); - - await delay(REQUEST_DELAY_MS); - - await writer.write({ - type: "waiting_for_reply", - requestId, - service, - deadline: `${timeoutMs}ms`, - }); - - // Simulate: first attempt of a slow service exceeds timeout - const simulatedLatency = - serviceConfig.failOnFirstAttempt && attempt === 1 - ? timeoutMs + 500 // will exceed deadline - : serviceConfig.latencyMs; - - if (simulatedLatency > timeoutMs) { - // Timeout — service too slow - await delay(timeoutMs); - await writer.write({ type: "timeout", requestId, service, attempt }); - - if (attempt < maxAttempts) { - await writer.write({ - type: "retrying", - requestId, - service, - attempt: attempt + 1, - maxAttempts, - }); - } - continue; - } - - // Reply arrives within deadline - await delay(simulatedLatency); - await writer.write({ - type: "reply_received", - requestId, - service, - response: serviceConfig.response, - latencyMs: simulatedLatency, - }); - - return { service, response: serviceConfig.response, success: true }; - } - - // Exhausted all attempts - await writer.write({ - type: "failed", - requestId, - service, - reason: `No reply after ${maxAttempts} attempts`, - }); - - return { service, response: null, success: false }; - } finally { - writer.releaseLock(); - } -} - -async function emitEvent(event: RequestReplyEvent): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/status-poller.mdx b/docs/content/docs/cookbook/webhooks/status-poller.mdx index 197cba771d..05ff84cdef 100644 --- a/docs/content/docs/cookbook/webhooks/status-poller.mdx +++ b/docs/content/docs/cookbook/webhooks/status-poller.mdx @@ -5,14 +5,12 @@ type: guide summary: Poll a video transcoding job until it's ready, sleeping between checks with a max-poll safety valve. --- -Poll a video transcoding job until it's ready, sleeping between checks with a max-poll safety valve. Use this pattern when an external system has no callback mechanism and you need to wait for completion. +Poll a video transcoding job until it's ready, sleeping between checks with a max-poll safety valve. Use this when an external system has no callback mechanism and you need to wait for completion. ## Pattern Loop up to a maximum number of polls. Each iteration calls a step to check the job status. If the job is not ready, use `sleep()` to wait before the next check. The durable sleep means the workflow consumes zero compute while waiting. -### Simplified - ```typescript lineNumbers import { sleep } from "workflow"; @@ -41,181 +39,7 @@ export async function pollTranscodeStatus( } ``` -### Full Implementation - -```typescript lineNumbers -import { sleep, getWritable } from "workflow"; - -export type JobState = - | "queued" - | "processing" - | "encoding" - | "finalizing" - | "ready"; - -export type PollEvent = - | { type: "poll_start"; poll: number; jobId: string } - | { - type: "poll_result"; - poll: number; - jobState: JobState; - outcome: "not_ready" | "ready"; - } - | { type: "sleep_start"; poll: number; durationMs: number } - | { type: "sleep_end"; poll: number } - | { type: "completed"; poll: number; jobId: string } - | { type: "timeout"; poll: number; jobId: string } - | { type: "done"; jobId: string; status: "completed" | "timeout"; pollCount: number }; - -export interface PollResult { - jobId: string; - status: "completed" | "timeout"; - pollCount: number; - finalState?: string; -} - -const STEP_DELAY_MS = 500; -const JOB_STATE_SEQUENCE: JobState[] = [ - "queued", - "processing", - "encoding", - "finalizing", - "ready", -]; - -function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -async function safeWrite( - writer: WritableStreamDefaultWriter, - event: PollEvent -): Promise { - try { - await writer.write(event); - } catch { - // Best-effort streaming - } -} - -export async function pollTranscodeStatus( - jobId: string, - maxPolls: number = 8, - intervalMs: number = 1000, - readyAtPoll: number = 4 -): Promise { - "use workflow"; - - for (let poll = 1; poll <= maxPolls; poll++) { - const state = await checkTranscodeJob(jobId, poll, readyAtPoll); - - if (state === "ready") { - await emitDone(jobId, "completed", poll); - return { jobId, status: "completed", pollCount: poll, finalState: state }; - } - - if (poll < maxPolls) { - await emitSleepStart(poll, intervalMs); - await sleep(`${intervalMs}ms`); - await emitSleepEnd(poll); - } - } - - await emitTimeout(maxPolls, jobId); - await emitDone(jobId, "timeout", maxPolls); - - return { jobId, status: "timeout", pollCount: maxPolls }; -} - -async function checkTranscodeJob( - jobId: string, - poll: number, - readyAtPoll: number -): Promise { - "use step"; - - const writer = getWritable().getWriter(); - - try { - await safeWrite(writer, { type: "poll_start", poll, jobId }); - await delay(STEP_DELAY_MS); - - let jobState: JobState; - if (poll >= readyAtPoll) { - jobState = "ready"; - } else { - const statesBeforeReady = JOB_STATE_SEQUENCE.slice(0, -1); - const idx = Math.min(poll - 1, statesBeforeReady.length - 1); - jobState = statesBeforeReady[idx]; - } - - const outcome = jobState === "ready" ? "ready" : "not_ready"; - await safeWrite(writer, { type: "poll_result", poll, jobState, outcome }); - - if (outcome === "ready") { - await safeWrite(writer, { type: "completed", poll, jobId }); - } - - return jobState; - } finally { - writer.releaseLock(); - } -} - -checkTranscodeJob.maxRetries = 0; - -async function emitSleepStart(poll: number, durationMs: number): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await safeWrite(writer, { type: "sleep_start", poll, durationMs }); - } finally { - writer.releaseLock(); - } -} - -emitSleepStart.maxRetries = 0; - -async function emitSleepEnd(poll: number): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await safeWrite(writer, { type: "sleep_end", poll }); - } finally { - writer.releaseLock(); - } -} - -emitSleepEnd.maxRetries = 0; - -async function emitTimeout(poll: number, jobId: string): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await safeWrite(writer, { type: "timeout", poll, jobId }); - } finally { - writer.releaseLock(); - } -} - -emitTimeout.maxRetries = 0; - -async function emitDone(jobId: string, status: "completed" | "timeout", pollCount: number): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await safeWrite(writer, { type: "done", jobId, status, pollCount }); - } finally { - writer.releaseLock(); - } -} - -emitDone.maxRetries = 0; -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/cookbook/webhooks/webhook-basics.mdx b/docs/content/docs/cookbook/webhooks/webhook-basics.mdx index e9f1c1b7ad..f228becde2 100644 --- a/docs/content/docs/cookbook/webhooks/webhook-basics.mdx +++ b/docs/content/docs/cookbook/webhooks/webhook-basics.mdx @@ -5,14 +5,12 @@ type: guide summary: Accept Stripe or GitHub webhooks, validate signatures, and kick off internal workflow steps. --- -Accept Stripe or GitHub webhooks, validate signatures, and kick off internal workflow steps. Use this pattern whenever an external system pushes events to your application via HTTP callbacks. +Accept Stripe or GitHub webhooks, validate signatures, and kick off internal workflow steps. Use this whenever an external system pushes events to your application via HTTP callbacks. ## Pattern Create a webhook with manual response control, then iterate over incoming requests in a `for await` loop. Each request is processed in its own step, letting you validate, respond, and record the event durably. -### Simplified - ```typescript lineNumbers import { createWebhook, type RequestWithResponse } from "workflow"; @@ -35,103 +33,7 @@ export async function paymentWebhook(orderId: string) { } ``` -### Full Implementation - -```typescript lineNumbers -import { createWebhook, getWritable, type RequestWithResponse } from "workflow"; - -export type WebhookEvent = - | { type: "webhook_ready"; token: string } - | { type: "event_received"; eventType: string; amount?: number } - | { type: "response_sent"; eventType: string; action: string } - | { type: "done"; status: "settled"; ledgerSize: number }; - -const MAX_EVENTS = 50; - -export async function paymentWebhook(orderId: string) { - "use workflow"; - - const webhook = createWebhook({ - respondWith: "manual", - }); - - await emit({ type: "webhook_ready", token: webhook.token }); - - const ledger: { type: string; amount?: number; processedAt: string }[] = []; - - for await (const request of webhook) { - const entry = await processPaymentEvent(request); - ledger.push(entry); - if (entry.type === "refund.created" || entry.type === "order.completed") break; - if (ledger.length >= MAX_EVENTS) break; - } - - await emit({ type: "done", status: "settled", ledgerSize: ledger.length }); - - return { orderId, webhookUrl: webhook.url, ledger, status: "settled" as const }; -} - -/** - * Step: Emit a single event to the UI stream. - * Re-acquires the writer inside the step so it survives durable suspension. - */ -async function emit(event: T): Promise { - "use step"; - const writer = getWritable().getWriter(); - try { - await writer.write(event); - } finally { - writer.releaseLock(); - } -} - -async function processPaymentEvent( - request: RequestWithResponse -) { - "use step"; - - const writer = getWritable().getWriter(); - - try { - const body = await request.json().catch(() => ({})); - const type = body?.type ?? "unknown"; - const amount = typeof body?.amount === "number" ? body.amount : undefined; - - await writer.write({ type: "event_received", eventType: type, amount }); - - let action = "ignored"; - - if (type === "payment.created") { - action = "received"; - await request.respondWith(Response.json({ ack: true, action })); - } else if (type === "payment.requires_action") { - action = "awaiting customer"; - await request.respondWith(Response.json({ ack: true, action })); - } else if (type === "payment.succeeded") { - action = "captured"; - await request.respondWith(Response.json({ ack: true, action })); - } else if (type === "payment.failed") { - action = "flagged for review"; - await request.respondWith(Response.json({ ack: true, action })); - } else if (type === "refund.created") { - action = "refunded"; - await request.respondWith(Response.json({ ack: true, action })); - } else { - await request.respondWith(Response.json({ ack: true, action })); - } - - await writer.write({ type: "response_sent", eventType: type, action }); - - return { type, amount, processedAt: new Date().toISOString() }; - } finally { - writer.releaseLock(); - } -} -``` - ## Key APIs -- [`"use workflow"`](/docs/foundations/workflows-and-steps) — marks the orchestrator function -- [`"use step"`](/docs/foundations/workflows-and-steps) — marks functions with full Node.js access +- [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function - [`createWebhook()`](/docs/api-reference/workflow/create-webhook) — creates an HTTP endpoint the workflow can await -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — streams events to the client diff --git a/docs/content/docs/meta.json b/docs/content/docs/meta.json index 0c8ad884cd..eda30fd046 100644 --- a/docs/content/docs/meta.json +++ b/docs/content/docs/meta.json @@ -4,7 +4,6 @@ "---", "getting-started", "foundations", - "cookbook", "how-it-works", "observability", "ai", diff --git a/docs/geistdocs.tsx b/docs/geistdocs.tsx index 834c7debbd..684fc6fa6f 100644 --- a/docs/geistdocs.tsx +++ b/docs/geistdocs.tsx @@ -27,6 +27,10 @@ export const nav = [ label: 'Docs', href: '/docs', }, + { + label: 'Cookbooks', + href: '/cookbooks', + }, { label: 'Worlds', href: '/worlds', diff --git a/docs/lib/geistdocs/cookbook-source.ts b/docs/lib/geistdocs/cookbook-source.ts new file mode 100644 index 0000000000..d4c6db71ad --- /dev/null +++ b/docs/lib/geistdocs/cookbook-source.ts @@ -0,0 +1,53 @@ +import { source } from './source'; + +/** + * Extract the cookbook subtree from the docs page tree, + * rewriting URLs from /docs/cookbook/... to /cookbooks/... + */ +export function getCookbookTree(lang: string) { + const fullTree = source.pageTree[lang]; + + // Find the cookbook folder in the tree + const cookbookNode = fullTree.children.find( + (node) => node.type === 'folder' && node.name === 'Cookbook' + ); + + if (!cookbookNode || cookbookNode.type !== 'folder') { + return { name: 'Cookbooks', children: [] }; + } + + // Deep-clone and rewrite URLs + return { + name: 'Cookbooks', + children: rewriteUrls(cookbookNode.children), + }; +} + +function rewriteUrls(nodes: T[]): T[] { + return nodes.map((node) => { + const n = node as Record; + const rewritten = { ...n }; + + if (typeof rewritten.url === 'string') { + rewritten.url = rewritten.url.replace( + /\/docs\/cookbook\//, + '/cookbooks/' + ); + } + + if (Array.isArray(rewritten.children)) { + rewritten.children = rewriteUrls(rewritten.children); + } + + // Handle index page inside folders + if (rewritten.index && typeof rewritten.index === 'object') { + const idx = { ...(rewritten.index as Record) }; + if (typeof idx.url === 'string') { + idx.url = idx.url.replace(/\/docs\/cookbook\//, '/cookbooks/'); + } + rewritten.index = idx; + } + + return rewritten as T; + }); +} diff --git a/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs new file mode 100644 index 0000000000..0183a18222 --- /dev/null +++ b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs @@ -0,0 +1,123 @@ +// biome-ignore-all lint: generated file +/* eslint-disable */ + +var __defProp = Object.defineProperty; +var __name = (target, value) => + __defProp(target, 'name', { value, configurable: true }); + +// ../../../../packages/workflow/dist/internal/builtins.js +import { registerStepFunction } from 'workflow/internal/private'; +async function __builtin_response_array_buffer() { + return this.arrayBuffer(); +} +__name(__builtin_response_array_buffer, '__builtin_response_array_buffer'); +async function __builtin_response_json() { + return this.json(); +} +__name(__builtin_response_json, '__builtin_response_json'); +async function __builtin_response_text() { + return this.text(); +} +__name(__builtin_response_text, '__builtin_response_text'); +registerStepFunction( + '__builtin_response_array_buffer', + __builtin_response_array_buffer +); +registerStepFunction('__builtin_response_json', __builtin_response_json); +registerStepFunction('__builtin_response_text', __builtin_response_text); + +// ../../../../packages/workflow/dist/stdlib.js +import { registerStepFunction as registerStepFunction2 } from 'workflow/internal/private'; +async function fetch(...args) { + return globalThis.fetch(...args); +} +__name(fetch, 'fetch'); +registerStepFunction2('step//./packages/workflow/dist/stdlib//fetch', fetch); + +// workflows/purchase-approval.ts +import { registerStepFunction as registerStepFunction3 } from 'workflow/internal/private'; + +// ../../../../packages/utils/dist/index.js +import { pluralize } from '../../../../../packages/utils/dist/pluralize.js'; +import { + parseClassName, + parseStepName, + parseWorkflowName, +} from '../../../../../packages/utils/dist/parse-name.js'; +import { + once, + withResolvers, +} from '../../../../../packages/utils/dist/promise.js'; +import { parseDurationToDate } from '../../../../../packages/utils/dist/time.js'; +import { + isVercelWorldTarget, + resolveWorkflowTargetWorld, + usesVercelWorld, +} from '../../../../../packages/utils/dist/world-target.js'; + +// ../../../../packages/errors/dist/index.js +import { RUN_ERROR_CODES } from '../../../../../packages/errors/dist/error-codes.js'; + +// ../../../../packages/core/dist/index.js +import { + createHook, + createWebhook, +} from '../../../../../packages/core/dist/create-hook.js'; +import { defineHook } from '../../../../../packages/core/dist/define-hook.js'; +import { sleep } from '../../../../../packages/core/dist/sleep.js'; +import { getStepMetadata } from '../../../../../packages/core/dist/step/get-step-metadata.js'; +import { getWorkflowMetadata } from '../../../../../packages/core/dist/step/get-workflow-metadata.js'; +import { getWritable } from '../../../../../packages/core/dist/step/writable-stream.js'; + +// workflows/purchase-approval.ts +var notifyApprover = /* @__PURE__ */ __name( + async (poNumber, approverId, template) => { + await notifications.send({ + idempotencyKey: `notify:${template}:${poNumber}`, + to: approverId, + template, + }); + }, + 'notifyApprover' +); +var recordDecision = /* @__PURE__ */ __name( + async (poNumber, status, decidedBy) => { + await db.purchaseOrders.update({ + where: { + poNumber, + }, + data: { + status, + decidedBy, + decidedAt: /* @__PURE__ */ new Date(), + }, + }); + return { + poNumber, + status, + decidedBy, + }; + }, + 'recordDecision' +); +async function purchaseApproval(poNumber, amount, managerId, directorId) { + throw new Error( + 'You attempted to execute workflow purchaseApproval function directly. To start a workflow, use start(purchaseApproval) from workflow/api' + ); +} +__name(purchaseApproval, 'purchaseApproval'); +purchaseApproval.workflowId = + 'workflow//./workflows/purchase-approval//purchaseApproval'; +registerStepFunction3( + 'step//./workflows/purchase-approval//notifyApprover', + notifyApprover +); +registerStepFunction3( + 'step//./workflows/purchase-approval//recordDecision', + recordDecision +); + +// virtual-entry.js +import { stepEntrypoint } from 'workflow/runtime'; +export { stepEntrypoint as POST }; +//# sourceMappingURL=data:application/json;base64,{
  "version": 3,
  "sources": ["../../../../../packages/workflow/src/internal/builtins.ts", "../../../../../packages/workflow/src/stdlib.ts", "../workflows/purchase-approval.ts", "../../../../../packages/utils/src/index.ts", "../../../../../packages/errors/src/index.ts", "../../../../../packages/core/src/index.ts", "../virtual-entry.js"],
  "sourcesContent": ["/**\n * These are the built-in steps that are \"automatically available\" in the workflow scope. They are\n * similar to \"stdlib\" except that are not meant to be imported by users, but are instead \"just available\"\n * alongside user defined steps. They are used internally by the runtime\n */\n\nexport async function __builtin_response_array_buffer(\n  this: Request | Response\n) {\n  'use step';\n  return this.arrayBuffer();\n}\n\nexport async function __builtin_response_json(this: Request | Response) {\n  'use step';\n  return this.json();\n}\n\nexport async function __builtin_response_text(this: Request | Response) {\n  'use step';\n  return this.text();\n}\n", "/**\n * This is the \"standard library\" of steps that we make available to all workflow users.\n * The can be imported like so: `import { fetch } from 'workflow'`. and used in workflow.\n * The need to be exported directly in this package and cannot live in `core` to prevent\n * circular dependencies post-compilation.\n */\n\n/**\n * A hoisted `fetch()` function that is executed as a \"step\" function,\n * for use within workflow functions.\n *\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API\n */\nexport async function fetch(...args: Parameters<typeof globalThis.fetch>) {\n  'use step';\n  return globalThis.fetch(...args);\n}\n", "import { registerStepFunction } from \"workflow/internal/private\";\nimport { createHook, sleep } from \"workflow\";\n/**__internal_workflows{\"workflows\":{\"workflows/purchase-approval.ts\":{\"default\":{\"workflowId\":\"workflow//./workflows/purchase-approval//purchaseApproval\"}}},\"steps\":{\"workflows/purchase-approval.ts\":{\"notifyApprover\":{\"stepId\":\"step//./workflows/purchase-approval//notifyApprover\"},\"recordDecision\":{\"stepId\":\"step//./workflows/purchase-approval//recordDecision\"}}}}*/;\nconst notifyApprover = async (poNumber, approverId, template)=>{\n    await notifications.send({\n        idempotencyKey: `notify:${template}:${poNumber}`,\n        to: approverId,\n        template\n    });\n};\nconst recordDecision = async (poNumber, status, decidedBy)=>{\n    await db.purchaseOrders.update({\n        where: {\n            poNumber\n        },\n        data: {\n            status,\n            decidedBy,\n            decidedAt: new Date()\n        }\n    });\n    return {\n        poNumber,\n        status,\n        decidedBy\n    };\n};\nexport default async function purchaseApproval(poNumber, amount, managerId, directorId) {\n    throw new Error(\"You attempted to execute workflow purchaseApproval function directly. To start a workflow, use start(purchaseApproval) from workflow/api\");\n}\npurchaseApproval.workflowId = \"workflow//./workflows/purchase-approval//purchaseApproval\";\nregisterStepFunction(\"step//./workflows/purchase-approval//notifyApprover\", notifyApprover);\nregisterStepFunction(\"step//./workflows/purchase-approval//recordDecision\", recordDecision);\n", "export { pluralize } from './pluralize.js';\nexport {\n  parseClassName,\n  parseStepName,\n  parseWorkflowName,\n} from './parse-name.js';\nexport { once, type PromiseWithResolvers, withResolvers } from './promise.js';\nexport { parseDurationToDate } from './time.js';\nexport {\n  isVercelWorldTarget,\n  resolveWorkflowTargetWorld,\n  usesVercelWorld,\n} from './world-target.js';\n", "import { parseDurationToDate } from '@workflow/utils';\nimport type { StructuredError } from '@workflow/world';\nimport type { StringValue } from 'ms';\n\nconst BASE_URL = 'https://useworkflow.dev/err';\n\n/**\n * @internal\n * Check if a value is an Error without relying on Node.js utilities.\n * This is needed for error classes that can be used in VM contexts where\n * Node.js imports are not available.\n */\nfunction isError(value: unknown): value is { name: string; message: string } {\n  return (\n    typeof value === 'object' &&\n    value !== null &&\n    'name' in value &&\n    'message' in value\n  );\n}\n\n/**\n * @internal\n * All the slugs of the errors used for documentation links.\n */\nexport const ERROR_SLUGS = {\n  NODE_JS_MODULE_IN_WORKFLOW: 'node-js-module-in-workflow',\n  START_INVALID_WORKFLOW_FUNCTION: 'start-invalid-workflow-function',\n  SERIALIZATION_FAILED: 'serialization-failed',\n  WEBHOOK_INVALID_RESPOND_WITH_VALUE: 'webhook-invalid-respond-with-value',\n  WEBHOOK_RESPONSE_NOT_SENT: 'webhook-response-not-sent',\n  FETCH_IN_WORKFLOW_FUNCTION: 'fetch-in-workflow',\n  TIMEOUT_FUNCTIONS_IN_WORKFLOW: 'timeout-in-workflow',\n  HOOK_CONFLICT: 'hook-conflict',\n  CORRUPTED_EVENT_LOG: 'corrupted-event-log',\n  STEP_NOT_REGISTERED: 'step-not-registered',\n  WORKFLOW_NOT_REGISTERED: 'workflow-not-registered',\n} as const;\n\ntype ErrorSlug = (typeof ERROR_SLUGS)[keyof typeof ERROR_SLUGS];\n\ninterface WorkflowErrorOptions extends ErrorOptions {\n  /**\n   * The slug of the error. This will be used to generate a link to the error documentation.\n   */\n  slug?: ErrorSlug;\n}\n\n/**\n * The base class for all Workflow-related errors.\n *\n * This error is thrown by the Workflow DevKit when internal operations fail.\n * You can use this class with `instanceof` to catch any Workflow DevKit error.\n *\n * @example\n * ```ts\n * try {\n *   await getRun(runId);\n * } catch (error) {\n *   if (error instanceof WorkflowError) {\n *     console.error('Workflow DevKit error:', error.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowError extends Error {\n  readonly cause?: unknown;\n\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    const msgDocs = options?.slug\n      ? `${message}\\n\\nLearn more: ${BASE_URL}/${options.slug}`\n      : message;\n    super(msgDocs, { cause: options?.cause });\n    this.cause = options?.cause;\n\n    if (options?.cause instanceof Error) {\n      this.stack = `${this.stack}\\nCaused by: ${options.cause.stack}`;\n    }\n  }\n\n  static is(value: unknown): value is WorkflowError {\n    return isError(value) && value.name === 'WorkflowError';\n  }\n}\n\n/**\n * Thrown when a world (storage backend) operation fails unexpectedly.\n *\n * This is the catch-all error for world implementations. Specific,\n * well-known failure modes have dedicated error types (e.g.\n * EntityConflictError, RunExpiredError, ThrottleError). This error\n * covers everything else \u2014 validation failures, missing entities\n * without a dedicated type, or unexpected HTTP errors from world-vercel.\n */\nexport class WorkflowWorldError extends WorkflowError {\n  status?: number;\n  code?: string;\n  url?: string;\n  /** Retry-After value in seconds, present on 429 and 425 responses */\n  retryAfter?: number;\n\n  constructor(\n    message: string,\n    options?: {\n      status?: number;\n      url?: string;\n      code?: string;\n      retryAfter?: number;\n      cause?: unknown;\n    }\n  ) {\n    super(message, {\n      cause: options?.cause,\n    });\n    this.name = 'WorkflowWorldError';\n    this.status = options?.status;\n    this.code = options?.code;\n    this.url = options?.url;\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is WorkflowWorldError {\n    return isError(value) && value.name === 'WorkflowWorldError';\n  }\n}\n\n/**\n * Thrown when a workflow run fails during execution.\n *\n * This error indicates that the workflow encountered a fatal error and cannot\n * continue. It is thrown when awaiting `run.returnValue` on a run whose status\n * is `'failed'`. The `cause` property contains the underlying error with its\n * message, stack trace, and optional error code.\n *\n * Use the static `WorkflowRunFailedError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunFailedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunFailedError.is(error)) {\n *     console.error(`Run ${error.runId} failed:`, error.cause.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunFailedError extends WorkflowError {\n  runId: string;\n  declare cause: Error & { code?: string };\n\n  constructor(runId: string, error: StructuredError) {\n    // Create a proper Error instance from the StructuredError to set as cause\n    // NOTE: custom error types do not get serialized/deserialized. Everything is an Error\n    const causeError = new Error(error.message);\n    if (error.stack) {\n      causeError.stack = error.stack;\n    }\n    if (error.code) {\n      (causeError as any).code = error.code;\n    }\n\n    super(`Workflow run \"${runId}\" failed: ${error.message}`, {\n      cause: causeError,\n    });\n    this.name = 'WorkflowRunFailedError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunFailedError {\n    return isError(value) && value.name === 'WorkflowRunFailedError';\n  }\n}\n\n/**\n * Thrown when attempting to get results from an incomplete workflow run.\n *\n * This error occurs when you try to access the result of a workflow\n * that is still running or hasn't completed yet.\n */\nexport class WorkflowRunNotCompletedError extends WorkflowError {\n  runId: string;\n  status: string;\n\n  constructor(runId: string, status: string) {\n    super(`Workflow run \"${runId}\" has not completed`, {});\n    this.name = 'WorkflowRunNotCompletedError';\n    this.runId = runId;\n    this.status = status;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotCompletedError {\n    return isError(value) && value.name === 'WorkflowRunNotCompletedError';\n  }\n}\n\n/**\n * Thrown when the Workflow runtime encounters an internal error.\n *\n * This error indicates an issue with workflow execution, such as\n * serialization failures, starting an invalid workflow function, or\n * other runtime problems.\n */\nexport class WorkflowRuntimeError extends WorkflowError {\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    super(message, {\n      ...options,\n    });\n    this.name = 'WorkflowRuntimeError';\n  }\n\n  static is(value: unknown): value is WorkflowRuntimeError {\n    return isError(value) && value.name === 'WorkflowRuntimeError';\n  }\n}\n\n/**\n * Thrown when a step function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means\n * something went wrong with the bundling/build tooling that caused the step\n * to not get built correctly.\n *\n * When this happens, the step fails (like a FatalError) and control is passed back\n * to the workflow function, which can optionally handle the failure gracefully.\n */\nexport class StepNotRegisteredError extends WorkflowRuntimeError {\n  stepName: string;\n\n  constructor(stepName: string) {\n    super(\n      `Step \"${stepName}\" is not registered in the current deployment. This usually indicates a build or bundling issue that caused the step to not be included in the deployment.`,\n      { slug: ERROR_SLUGS.STEP_NOT_REGISTERED }\n    );\n    this.name = 'StepNotRegisteredError';\n    this.stepName = stepName;\n  }\n\n  static is(value: unknown): value is StepNotRegisteredError {\n    return isError(value) && value.name === 'StepNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when a workflow function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means:\n * - A run was started against a deployment that does not have the workflow\n *   (e.g., the workflow was renamed or moved and a new run targeted the latest deployment)\n * - Something went wrong with the bundling/build tooling that caused the workflow\n *   to not get built correctly\n *\n * When this happens, the run fails with a `RUNTIME_ERROR` error code.\n */\nexport class WorkflowNotRegisteredError extends WorkflowRuntimeError {\n  workflowName: string;\n\n  constructor(workflowName: string) {\n    super(\n      `Workflow \"${workflowName}\" is not registered in the current deployment. This usually means a run was started against a deployment that does not have this workflow, or there was a build/bundling issue.`,\n      { slug: ERROR_SLUGS.WORKFLOW_NOT_REGISTERED }\n    );\n    this.name = 'WorkflowNotRegisteredError';\n    this.workflowName = workflowName;\n  }\n\n  static is(value: unknown): value is WorkflowNotRegisteredError {\n    return isError(value) && value.name === 'WorkflowNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when performing operations on a workflow run that does not exist.\n *\n * This error occurs when you call methods on a run object (e.g. `run.status`,\n * `run.cancel()`, `run.returnValue`) but the underlying run ID does not match\n * any known workflow run. Note that `getRun(id)` itself is synchronous and will\n * not throw \u2014 this error is raised when subsequent operations discover the run\n * is missing.\n *\n * Use the static `WorkflowRunNotFoundError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (WorkflowRunNotFoundError.is(error)) {\n *     console.error(`Run ${error.runId} does not exist`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunNotFoundError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" not found`, {});\n    this.name = 'WorkflowRunNotFoundError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotFoundError {\n    return isError(value) && value.name === 'WorkflowRunNotFoundError';\n  }\n}\n\n/**\n * Thrown when a hook token is already in use by another active workflow run.\n *\n * This is a user error \u2014 it means the same custom token was passed to\n * `createHook` in two or more concurrent runs. Use a unique token per run\n * (or omit the token to let the runtime generate one automatically).\n */\nexport class HookConflictError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super(`Hook token \"${token}\" is already in use by another workflow`, {\n      slug: ERROR_SLUGS.HOOK_CONFLICT,\n    });\n    this.name = 'HookConflictError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookConflictError {\n    return isError(value) && value.name === 'HookConflictError';\n  }\n}\n\n/**\n * Thrown when calling `resumeHook()` or `resumeWebhook()` with a token that\n * does not match any active hook.\n *\n * Common causes:\n * - The hook has expired (past its TTL)\n * - The hook was already disposed after being consumed\n * - The workflow has not started yet, so the hook does not exist\n *\n * A common pattern is to catch this error and start a new workflow run when\n * the hook does not exist yet (the \"resume or start\" pattern).\n *\n * Use the static `HookNotFoundError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { HookNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   await resumeHook(token, payload);\n * } catch (error) {\n *   if (HookNotFoundError.is(error)) {\n *     // Hook doesn't exist \u2014 start a new workflow run instead\n *     await startWorkflow(\"myWorkflow\", payload);\n *   }\n * }\n * ```\n */\nexport class HookNotFoundError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super('Hook not found', {});\n    this.name = 'HookNotFoundError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookNotFoundError {\n    return isError(value) && value.name === 'HookNotFoundError';\n  }\n}\n\n/**\n * Thrown when an operation conflicts with the current state of an entity.\n * This includes attempts to modify an entity already in a terminal state,\n * create an entity that already exists, or any other 409-style conflict.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class EntityConflictError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'EntityConflictError';\n  }\n\n  static is(value: unknown): value is EntityConflictError {\n    return isError(value) && value.name === 'EntityConflictError';\n  }\n}\n\n/**\n * Thrown when a run is no longer available \u2014 either because it has been\n * cleaned up, expired, or already reached a terminal state (completed/failed).\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class RunExpiredError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'RunExpiredError';\n  }\n\n  static is(value: unknown): value is RunExpiredError {\n    return isError(value) && value.name === 'RunExpiredError';\n  }\n}\n\n/**\n * Thrown when an operation cannot proceed because a required timestamp\n * (e.g. retryAfter) has not been reached yet.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n *\n * @property retryAfter - Delay in seconds before the operation can be retried.\n */\nexport class TooEarlyError extends WorkflowWorldError {\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message, { retryAfter: options?.retryAfter });\n    this.name = 'TooEarlyError';\n  }\n\n  static is(value: unknown): value is TooEarlyError {\n    return isError(value) && value.name === 'TooEarlyError';\n  }\n}\n\n/**\n * Thrown when a request is rate limited by the workflow backend.\n *\n * The workflow runtime handles this error automatically with retry logic.\n * Users interacting with world storage backends directly may encounter it\n * if retries are exhausted.\n *\n * @property retryAfter - Delay in seconds before the request can be retried.\n */\nexport class ThrottleError extends WorkflowWorldError {\n  retryAfter?: number;\n\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message);\n    this.name = 'ThrottleError';\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is ThrottleError {\n    return isError(value) && value.name === 'ThrottleError';\n  }\n}\n\n/**\n * Thrown when awaiting `run.returnValue` on a workflow run that was cancelled.\n *\n * This error indicates that the workflow was explicitly cancelled (via\n * `run.cancel()`) and will not produce a return value. You can check for\n * cancellation before awaiting the return value by inspecting `run.status`.\n *\n * Use the static `WorkflowRunCancelledError.is()` method for type-safe\n * checking in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunCancelledError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunCancelledError.is(error)) {\n *     console.log(`Run ${error.runId} was cancelled`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunCancelledError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" cancelled`, {});\n    this.name = 'WorkflowRunCancelledError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunCancelledError {\n    return isError(value) && value.name === 'WorkflowRunCancelledError';\n  }\n}\n\n/**\n * Thrown when attempting to operate on a workflow run that requires a newer World version.\n *\n * This error occurs when a run was created with a newer spec version than the\n * current World implementation supports. To resolve this, upgrade your\n * `workflow` packages to a version that supports the required spec version.\n *\n * Use the static `RunNotSupportedError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { RunNotSupportedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (RunNotSupportedError.is(error)) {\n *     console.error(\n *       `Run requires spec v${error.runSpecVersion}, ` +\n *       `but world supports v${error.worldSpecVersion}`\n *     );\n *   }\n * }\n * ```\n */\nexport class RunNotSupportedError extends WorkflowError {\n  readonly runSpecVersion: number;\n  readonly worldSpecVersion: number;\n\n  constructor(runSpecVersion: number, worldSpecVersion: number) {\n    super(\n      `Run requires spec version ${runSpecVersion}, but world supports version ${worldSpecVersion}. ` +\n        `Please upgrade 'workflow' package.`\n    );\n    this.name = 'RunNotSupportedError';\n    this.runSpecVersion = runSpecVersion;\n    this.worldSpecVersion = worldSpecVersion;\n  }\n\n  static is(value: unknown): value is RunNotSupportedError {\n    return isError(value) && value.name === 'RunNotSupportedError';\n  }\n}\n\n/**\n * A fatal error is an error that cannot be retried.\n * It will cause the step to fail and the error will\n * be bubbled up to the workflow logic.\n */\nexport class FatalError extends Error {\n  fatal = true;\n\n  constructor(message: string) {\n    super(message);\n    this.name = 'FatalError';\n  }\n\n  static is(value: unknown): value is FatalError {\n    return isError(value) && value.name === 'FatalError';\n  }\n}\n\nexport interface RetryableErrorOptions {\n  /**\n   * The number of milliseconds to wait before retrying the step.\n   * Can also be a duration string (e.g., \"5s\", \"2m\") or a Date object.\n   * If not provided, the step will be retried after 1 second (1000 milliseconds).\n   */\n  retryAfter?: number | StringValue | Date;\n}\n\n/**\n * An error that can happen during a step execution, allowing\n * for configuration of the retry behavior.\n */\nexport class RetryableError extends Error {\n  /**\n   * The Date when the step should be retried.\n   */\n  retryAfter: Date;\n\n  constructor(message: string, options: RetryableErrorOptions = {}) {\n    super(message);\n    this.name = 'RetryableError';\n\n    if (options.retryAfter !== undefined) {\n      this.retryAfter = parseDurationToDate(options.retryAfter);\n    } else {\n      // Default to 1 second (1000 milliseconds)\n      this.retryAfter = new Date(Date.now() + 1000);\n    }\n  }\n\n  static is(value: unknown): value is RetryableError {\n    return isError(value) && value.name === 'RetryableError';\n  }\n}\n\nexport const VERCEL_403_ERROR_MESSAGE =\n  'Your current vercel account does not have access to this resource. Use `vercel login` or `vercel switch` to ensure you are linked to the right account.';\n\nexport { RUN_ERROR_CODES, type RunErrorCode } from './error-codes.js';\n", "/**\n * Just the core utilities that are meant to be imported by user\n * steps/workflows. This allows the bundler to tree-shake and limit what goes\n * into the final user bundles. Logic for running/handling steps/workflows\n * should live in runtime. Eventually these might be separate packages\n * `workflow` and `workflow/runtime`?\n *\n * Everything here will get re-exported under the 'workflow' top level package.\n * This should be a minimal set of APIs so **do not anything here** unless it's\n * needed for userland workflow code.\n */\n\nexport {\n  FatalError,\n  RetryableError,\n  type RetryableErrorOptions,\n} from '@workflow/errors';\nexport {\n  createHook,\n  createWebhook,\n  type Hook,\n  type HookOptions,\n  type RequestWithResponse,\n  type Webhook,\n  type WebhookOptions,\n} from './create-hook.js';\nexport { defineHook, type TypedHook } from './define-hook.js';\nexport { sleep } from './sleep.js';\nexport {\n  getStepMetadata,\n  type StepMetadata,\n} from './step/get-step-metadata.js';\nexport {\n  getWorkflowMetadata,\n  type WorkflowMetadata,\n} from './step/get-workflow-metadata.js';\nexport {\n  getWritable,\n  type WorkflowWritableStreamOptions,\n} from './step/writable-stream.js';\n", "\n    // Built in steps\n    import 'workflow/internal/builtins';\n    // User steps\n    import '../../../../packages/workflow/dist/stdlib.js';\nimport './workflows/purchase-approval.ts';\n    // Serde files for cross-context class registration\n    \n    // API entrypoint\n    export { stepEntrypoint as POST } from 'workflow/runtime';"],
  "mappings": ";;;;;;;AAAA,SAAA,4BAAA;AASE,eAAW,kCAAA;AACX,SAAO,KAAK,YAAW;AACzB;AAFa;AAIb,eAAsB,0BAAuB;AAC3C,SAAA,KAAW,KAAA;;AADS;AAGtB,eAAC,0BAAA;AAED,SAAO,KAAK,KAAA;;AAFX;qBAIiB,mCAAG,+BAAA;AACrB,qBAAC,2BAAA,uBAAA;;;;ACrBD,SAAA,wBAAAA,6BAAA;AAaA,eAAsB,SAAkD,MAAA;AACtE,SAAA,WAAW,MAAA,GAAA,IAAA;;AADS;AAGtBC,sBAAC,gDAAA,KAAA;;;AChBD,SAAS,wBAAAC,6BAA4B;;;ACArC,SAAS,iBAAiB;AAC1B,SACE,gBACA,eACA,yBACD;AACD,SAAS,MAAiC,qBAAqB;AAC/D,SAAS,2BAA2B;AACpC,SACE,qBACA,4BACA,uBACD;;;ACgjBD,SAAM,uBAAsB;;;AChjB5B,SACE,YACA,qBAED;AACD,SACE,kBACA;AAOF,SAAS,aAA4B;AACrC,SAAS,uBAAa;AACtB,SACE,2BAEK;AACP,SACE,mBAAmB;;;AH9BrB,IAAM,iBAAiB,8BAAO,UAAU,YAAY,aAAW;AAC3D,QAAM,cAAc,KAAK;AAAA,IACrB,gBAAgB,UAAU,QAAQ,IAAI,QAAQ;AAAA,IAC9C,IAAI;AAAA,IACJ;AAAA,EACJ,CAAC;AACL,GANuB;AAOvB,IAAM,iBAAiB,8BAAO,UAAU,QAAQ,cAAY;AACxD,QAAM,GAAG,eAAe,OAAO;AAAA,IAC3B,OAAO;AAAA,MACH;AAAA,IACJ;AAAA,IACA,MAAM;AAAA,MACF;AAAA,MACA;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,IACxB;AAAA,EACJ,CAAC;AACD,SAAO;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,EACJ;AACJ,GAhBuB;AAiBvB,eAAO,iBAAwC,UAAU,QAAQ,WAAW,YAAY;AACpF,QAAM,IAAI,MAAM,0IAA0I;AAC9J;AAF8B;AAG9B,iBAAiB,aAAa;AAC9BC,sBAAqB,uDAAuD,cAAc;AAC1FA,sBAAqB,uDAAuD,cAAc;;;AIvBtF,SAA2B,sBAAY;",
  "names": ["registerStepFunction", "registerStepFunction", "registerStepFunction", "registerStepFunction"]
}
 diff --git a/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs.debug.json b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs.debug.json new file mode 100644 index 0000000000..6f2fbb4e14 --- /dev/null +++ b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/steps.mjs.debug.json @@ -0,0 +1,10 @@ +{ + "stepFiles": [ + "/Users/johnlindquist/dev/workflow/packages/workflow/dist/stdlib.js", + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/approval-expiry-escalation/workflows/purchase-approval.ts" + ], + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/approval-expiry-escalation/workflows/purchase-approval.ts" + ], + "serdeOnlyFiles": [] +} diff --git a/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs new file mode 100644 index 0000000000..75a6f304fd --- /dev/null +++ b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs @@ -0,0 +1,212 @@ +// biome-ignore-all lint: generated file +/* eslint-disable */ +import { workflowEntrypoint } from 'workflow/runtime'; + +const workflowCode = `globalThis.__private_workflows = new Map(); +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __commonJS = (cb, mod) => function __require() { + return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// ../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js +var require_ms = __commonJS({ + "../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js"(exports, module2) { + var s = 1e3; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === "string" && val.length > 0) { + return parse(val); + } else if (type === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error("val is not a non-empty string or a valid number. val=" + JSON.stringify(val)); + }; + function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?\$/i.exec(str); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || "ms").toLowerCase(); + switch (type) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; + } + } + __name(parse, "parse"); + function fmtShort(ms2) { + var msAbs = Math.abs(ms2); + if (msAbs >= d) { + return Math.round(ms2 / d) + "d"; + } + if (msAbs >= h) { + return Math.round(ms2 / h) + "h"; + } + if (msAbs >= m) { + return Math.round(ms2 / m) + "m"; + } + if (msAbs >= s) { + return Math.round(ms2 / s) + "s"; + } + return ms2 + "ms"; + } + __name(fmtShort, "fmtShort"); + function fmtLong(ms2) { + var msAbs = Math.abs(ms2); + if (msAbs >= d) { + return plural(ms2, msAbs, d, "day"); + } + if (msAbs >= h) { + return plural(ms2, msAbs, h, "hour"); + } + if (msAbs >= m) { + return plural(ms2, msAbs, m, "minute"); + } + if (msAbs >= s) { + return plural(ms2, msAbs, s, "second"); + } + return ms2 + " ms"; + } + __name(fmtLong, "fmtLong"); + function plural(ms2, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms2 / n) + " " + name + (isPlural ? "s" : ""); + } + __name(plural, "plural"); + } +}); + +// ../../../../packages/utils/dist/time.js +var import_ms = __toESM(require_ms(), 1); + +// ../../../../packages/core/dist/symbols.js +var WORKFLOW_CREATE_HOOK = /* @__PURE__ */ Symbol.for("WORKFLOW_CREATE_HOOK"); +var WORKFLOW_SLEEP = /* @__PURE__ */ Symbol.for("WORKFLOW_SLEEP"); + +// ../../../../packages/core/dist/sleep.js +async function sleep(param) { + const sleepFn = globalThis[WORKFLOW_SLEEP]; + if (!sleepFn) { + throw new Error("\`sleep()\` can only be called inside a workflow function"); + } + return sleepFn(param); +} +__name(sleep, "sleep"); + +// ../../../../packages/core/dist/workflow/create-hook.js +function createHook(options) { + const createHookFn = globalThis[WORKFLOW_CREATE_HOOK]; + if (!createHookFn) { + throw new Error("\`createHook()\` can only be called inside a workflow function"); + } + return createHookFn(options); +} +__name(createHook, "createHook"); + +// ../../../../packages/workflow/dist/stdlib.js +var fetch = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./packages/workflow/dist/stdlib//fetch"); + +// workflows/purchase-approval.ts +var notifyApprover = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/purchase-approval//notifyApprover"); +var recordDecision = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/purchase-approval//recordDecision"); +async function purchaseApproval(poNumber, amount, managerId, directorId) { + await notifyApprover(poNumber, managerId, "approval-request"); + const managerHook = createHook(\`approval:po-\${poNumber}\`); + const managerTimeout = sleep("48h"); + const managerResult = await Promise.race([ + managerHook, + managerTimeout + ]); + if (managerResult !== void 0) { + return recordDecision(poNumber, managerResult.approved ? "approved" : "rejected", managerId); + } + await notifyApprover(poNumber, directorId, "escalation-request"); + const directorHook = createHook(\`escalation:po-\${poNumber}\`); + const directorTimeout = sleep("24h"); + const directorResult = await Promise.race([ + directorHook, + directorTimeout + ]); + if (directorResult !== void 0) { + return recordDecision(poNumber, directorResult.approved ? "approved" : "rejected", directorId); + } + await notifyApprover(poNumber, managerId, "auto-rejection-notice"); + return recordDecision(poNumber, "auto-rejected", "system"); +} +__name(purchaseApproval, "purchaseApproval"); +purchaseApproval.workflowId = "workflow//./workflows/purchase-approval//purchaseApproval"; +globalThis.__private_workflows.set("workflow//./workflows/purchase-approval//purchaseApproval", purchaseApproval); +//# sourceMappingURL=data:application/json;base64,{
  "version": 3,
  "sources": ["../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js", "../../../../packages/utils/src/time.ts", "../../../../packages/core/src/symbols.ts", "../../../../packages/core/src/sleep.ts", "../../../../packages/core/src/workflow/create-hook.ts", "../../../../packages/workflow/src/stdlib.ts", "workflows/purchase-approval.ts"],
  "sourcesContent": ["/**\n * Helpers.\n */ var s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n *  - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */ module.exports = function(val, options) {\n    options = options || {};\n    var type = typeof val;\n    if (type === 'string' && val.length > 0) {\n        return parse(val);\n    } else if (type === 'number' && isFinite(val)) {\n        return options.long ? fmtLong(val) : fmtShort(val);\n    }\n    throw new Error('val is not a non-empty string or a valid number. val=' + JSON.stringify(val));\n};\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */ function parse(str) {\n    str = String(str);\n    if (str.length > 100) {\n        return;\n    }\n    var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str);\n    if (!match) {\n        return;\n    }\n    var n = parseFloat(match[1]);\n    var type = (match[2] || 'ms').toLowerCase();\n    switch(type){\n        case 'years':\n        case 'year':\n        case 'yrs':\n        case 'yr':\n        case 'y':\n            return n * y;\n        case 'weeks':\n        case 'week':\n        case 'w':\n            return n * w;\n        case 'days':\n        case 'day':\n        case 'd':\n            return n * d;\n        case 'hours':\n        case 'hour':\n        case 'hrs':\n        case 'hr':\n        case 'h':\n            return n * h;\n        case 'minutes':\n        case 'minute':\n        case 'mins':\n        case 'min':\n        case 'm':\n            return n * m;\n        case 'seconds':\n        case 'second':\n        case 'secs':\n        case 'sec':\n        case 's':\n            return n * s;\n        case 'milliseconds':\n        case 'millisecond':\n        case 'msecs':\n        case 'msec':\n        case 'ms':\n            return n;\n        default:\n            return undefined;\n    }\n}\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */ function fmtShort(ms) {\n    var msAbs = Math.abs(ms);\n    if (msAbs >= d) {\n        return Math.round(ms / d) + 'd';\n    }\n    if (msAbs >= h) {\n        return Math.round(ms / h) + 'h';\n    }\n    if (msAbs >= m) {\n        return Math.round(ms / m) + 'm';\n    }\n    if (msAbs >= s) {\n        return Math.round(ms / s) + 's';\n    }\n    return ms + 'ms';\n}\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */ function fmtLong(ms) {\n    var msAbs = Math.abs(ms);\n    if (msAbs >= d) {\n        return plural(ms, msAbs, d, 'day');\n    }\n    if (msAbs >= h) {\n        return plural(ms, msAbs, h, 'hour');\n    }\n    if (msAbs >= m) {\n        return plural(ms, msAbs, m, 'minute');\n    }\n    if (msAbs >= s) {\n        return plural(ms, msAbs, s, 'second');\n    }\n    return ms + ' ms';\n}\n/**\n * Pluralization helper.\n */ function plural(ms, msAbs, n, name) {\n    var isPlural = msAbs >= n * 1.5;\n    return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n", "import type { StringValue } from 'ms';\nimport ms from 'ms';\n\n/**\n * Parses a duration parameter (string, number, or Date) and returns a Date object\n * representing when the duration should elapse.\n *\n * - For strings: Parses duration strings like \"1s\", \"5m\", \"1h\", etc. using the `ms` library\n * - For numbers: Treats as milliseconds from now\n * - For Date objects: Returns the date directly (handles both Date instances and date-like objects from deserialization)\n *\n * @param param - The duration parameter (StringValue, Date, or number of milliseconds)\n * @returns A Date object representing when the duration should elapse\n * @throws {Error} If the parameter is invalid or cannot be parsed\n */\nexport function parseDurationToDate(param: StringValue | Date | number): Date {\n  if (typeof param === 'string') {\n    const durationMs = ms(param);\n    if (typeof durationMs !== 'number' || durationMs < 0) {\n      throw new Error(\n        `Invalid duration: \"${param}\". Expected a valid duration string like \"1s\", \"1m\", \"1h\", etc.`\n      );\n    }\n    return new Date(Date.now() + durationMs);\n  } else if (typeof param === 'number') {\n    if (param < 0 || !Number.isFinite(param)) {\n      throw new Error(\n        `Invalid duration: ${param}. Expected a non-negative finite number of milliseconds.`\n      );\n    }\n    return new Date(Date.now() + param);\n  } else if (\n    param instanceof Date ||\n    (param &&\n      typeof param === 'object' &&\n      typeof (param as any).getTime === 'function')\n  ) {\n    // Handle both Date instances and date-like objects (from deserialization)\n    return param instanceof Date ? param : new Date((param as any).getTime());\n  } else {\n    throw new Error(\n      `Invalid duration parameter. Expected a duration string, number (milliseconds), or Date object.`\n    );\n  }\n}\n", "export const WORKFLOW_USE_STEP = Symbol.for('WORKFLOW_USE_STEP');\nexport const WORKFLOW_CREATE_HOOK = Symbol.for('WORKFLOW_CREATE_HOOK');\nexport const WORKFLOW_SLEEP = Symbol.for('WORKFLOW_SLEEP');\nexport const WORKFLOW_CONTEXT = Symbol.for('WORKFLOW_CONTEXT');\nexport const WORKFLOW_GET_STREAM_ID = Symbol.for('WORKFLOW_GET_STREAM_ID');\nexport const STABLE_ULID = Symbol.for('WORKFLOW_STABLE_ULID');\nexport const STREAM_NAME_SYMBOL = Symbol.for('WORKFLOW_STREAM_NAME');\nexport const STREAM_TYPE_SYMBOL = Symbol.for('WORKFLOW_STREAM_TYPE');\nexport const BODY_INIT_SYMBOL = Symbol.for('BODY_INIT');\nexport const WEBHOOK_RESPONSE_WRITABLE = Symbol.for(\n  'WEBHOOK_RESPONSE_WRITABLE'\n);\n\n/**\n * Symbol used to store the class registry on globalThis in workflow mode.\n * This allows the deserializer to find classes by classId in the VM context.\n */\nexport const WORKFLOW_CLASS_REGISTRY = Symbol.for('workflow-class-registry');\n", "import type { StringValue } from 'ms';\nimport { WORKFLOW_SLEEP } from './symbols.js';\n\n/**\n * Sleep within a workflow for a given duration.\n *\n * This is a built-in runtime function that uses timer events in the event log.\n *\n * @param duration - The duration to sleep for, this is a string in the format\n * of `\"1000ms\"`, `\"1s\"`, `\"1m\"`, `\"1h\"`, or `\"1d\"`.\n * @overload\n * @returns A promise that resolves when the sleep is complete.\n */\nexport async function sleep(duration: StringValue): Promise<void>;\n\n/**\n * Sleep within a workflow until a specific date.\n *\n * This is a built-in runtime function that uses timer events in the event log.\n *\n * @param date - The date to sleep until, this must be a future date.\n * @overload\n * @returns A promise that resolves when the sleep is complete.\n */\nexport async function sleep(date: Date): Promise<void>;\n\n/**\n * Sleep within a workflow for a given duration in milliseconds.\n *\n * This is a built-in runtime function that uses timer events in the event log.\n *\n * @param durationMs - The duration to sleep for in milliseconds.\n * @overload\n * @returns A promise that resolves when the sleep is complete.\n */\nexport async function sleep(durationMs: number): Promise<void>;\n\nexport async function sleep(param: StringValue | Date | number): Promise<void> {\n  // Inside the workflow VM, the sleep function is stored in the globalThis object behind a symbol\n  const sleepFn = (globalThis as any)[WORKFLOW_SLEEP];\n  if (!sleepFn) {\n    throw new Error('`sleep()` can only be called inside a workflow function');\n  }\n  return sleepFn(param);\n}\n", "import type {\n  Hook,\n  HookOptions,\n  RequestWithResponse,\n  Webhook,\n  WebhookOptions,\n} from '../create-hook.js';\nimport { WORKFLOW_CREATE_HOOK } from '../symbols.js';\nimport { getWorkflowMetadata } from './get-workflow-metadata.js';\n\nexport function createHook<T = any>(options?: HookOptions): Hook<T> {\n  // Inside the workflow VM, the hook function is stored in the globalThis object behind a symbol\n  const createHookFn = (globalThis as any)[\n    WORKFLOW_CREATE_HOOK\n  ] as typeof createHook<T>;\n  if (!createHookFn) {\n    throw new Error(\n      '`createHook()` can only be called inside a workflow function'\n    );\n  }\n  return createHookFn(options);\n}\n\nexport function createWebhook(\n  options: WebhookOptions & { respondWith: 'manual' }\n): Webhook<RequestWithResponse>;\nexport function createWebhook(options?: WebhookOptions): Webhook<Request>;\nexport function createWebhook(\n  options?: WebhookOptions\n): Webhook<Request> | Webhook<RequestWithResponse> {\n  const { respondWith, token, ...rest } = (options ?? {}) as WebhookOptions & {\n    token?: string;\n  };\n\n  if (token !== undefined) {\n    throw new Error(\n      '`createWebhook()` does not accept a `token` option. Webhook tokens are always randomly generated. Use `createHook()` with `resumeHook()` for deterministic token patterns.'\n    );\n  }\n\n  let metadata: Pick<WebhookOptions, 'respondWith'> | undefined;\n  if (typeof respondWith !== 'undefined') {\n    metadata = { respondWith };\n  }\n\n  const hook = createHook({ ...rest, metadata, isWebhook: true }) as\n    | Webhook<Request>\n    | Webhook<RequestWithResponse>;\n\n  const { url } = getWorkflowMetadata();\n  hook.url = `${url}/.well-known/workflow/v1/webhook/${encodeURIComponent(hook.token)}`;\n\n  return hook;\n}\n", "/**\n * This is the \"standard library\" of steps that we make available to all workflow users.\n * The can be imported like so: `import { fetch } from 'workflow'`. and used in workflow.\n * The need to be exported directly in this package and cannot live in `core` to prevent\n * circular dependencies post-compilation.\n */\n\n/**\n * A hoisted `fetch()` function that is executed as a \"step\" function,\n * for use within workflow functions.\n *\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API\n */\nexport async function fetch(...args: Parameters<typeof globalThis.fetch>) {\n  'use step';\n  return globalThis.fetch(...args);\n}\n", "import { createHook, sleep } from \"workflow\";\n/**__internal_workflows{\"workflows\":{\"workflows/purchase-approval.ts\":{\"default\":{\"workflowId\":\"workflow//./workflows/purchase-approval//purchaseApproval\"}}},\"steps\":{\"workflows/purchase-approval.ts\":{\"notifyApprover\":{\"stepId\":\"step//./workflows/purchase-approval//notifyApprover\"},\"recordDecision\":{\"stepId\":\"step//./workflows/purchase-approval//recordDecision\"}}}}*/;\nconst notifyApprover = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/purchase-approval//notifyApprover\");\nconst recordDecision = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/purchase-approval//recordDecision\");\nexport default async function purchaseApproval(poNumber, amount, managerId, directorId) {\n    // Step 1: Notify manager and wait for approval with 48h timeout\n    await notifyApprover(poNumber, managerId, \"approval-request\");\n    const managerHook = createHook(`approval:po-${poNumber}`);\n    const managerTimeout = sleep(\"48h\");\n    const managerResult = await Promise.race([\n        managerHook,\n        managerTimeout\n    ]);\n    if (managerResult !== undefined) {\n        // Manager responded\n        return recordDecision(poNumber, managerResult.approved ? \"approved\" : \"rejected\", managerId);\n    }\n    // Step 2: Manager timed out \u2014 escalate to director with 24h timeout\n    await notifyApprover(poNumber, directorId, \"escalation-request\");\n    const directorHook = createHook(`escalation:po-${poNumber}`);\n    const directorTimeout = sleep(\"24h\");\n    const directorResult = await Promise.race([\n        directorHook,\n        directorTimeout\n    ]);\n    if (directorResult !== undefined) {\n        // Director responded\n        return recordDecision(poNumber, directorResult.approved ? \"approved\" : \"rejected\", directorId);\n    }\n    // Step 3: Full timeout \u2014 auto-reject\n    await notifyApprover(poNumber, managerId, \"auto-rejection-notice\");\n    return recordDecision(poNumber, \"auto-rejected\", \"system\");\n}\npurchaseApproval.workflowId = \"workflow//./workflows/purchase-approval//purchaseApproval\";\nglobalThis.__private_workflows.set(\"workflow//./workflows/purchase-approval//purchaseApproval\", purchaseApproval);\n"],
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA,8EAAAA,SAAA;AAEI,QAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AAaR,IAAAA,QAAO,UAAU,SAAS,KAAK,SAAS;AACxC,gBAAU,WAAW,CAAC;AACtB,UAAI,OAAO,OAAO;AAClB,UAAI,SAAS,YAAY,IAAI,SAAS,GAAG;AACrC,eAAO,MAAM,GAAG;AAAA,MACpB,WAAW,SAAS,YAAY,SAAS,GAAG,GAAG;AAC3C,eAAO,QAAQ,OAAO,QAAQ,GAAG,IAAI,SAAS,GAAG;AAAA,MACrD;AACA,YAAM,IAAI,MAAM,0DAA0D,KAAK,UAAU,GAAG,CAAC;AAAA,IACjG;AAOI,aAAS,MAAM,KAAK;AACpB,YAAM,OAAO,GAAG;AAChB,UAAI,IAAI,SAAS,KAAK;AAClB;AAAA,MACJ;AACA,UAAI,QAAQ,mIAAmI,KAAK,GAAG;AACvJ,UAAI,CAAC,OAAO;AACR;AAAA,MACJ;AACA,UAAI,IAAI,WAAW,MAAM,CAAC,CAAC;AAC3B,UAAI,QAAQ,MAAM,CAAC,KAAK,MAAM,YAAY;AAC1C,cAAO,MAAK;AAAA,QACR,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO;AAAA,QACX;AACI,iBAAO;AAAA,MACf;AAAA,IACJ;AArDa;AA4DT,aAAS,SAASC,KAAI;AACtB,UAAI,QAAQ,KAAK,IAAIA,GAAE;AACvB,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,aAAOA,MAAK;AAAA,IAChB;AAfa;AAsBT,aAAS,QAAQA,KAAI;AACrB,UAAI,QAAQ,KAAK,IAAIA,GAAE;AACvB,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,KAAK;AAAA,MACrC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,MAAM;AAAA,MACtC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,QAAQ;AAAA,MACxC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,QAAQ;AAAA,MACxC;AACA,aAAOA,MAAK;AAAA,IAChB;AAfa;AAkBT,aAAS,OAAOA,KAAI,OAAO,GAAG,MAAM;AACpC,UAAI,WAAW,SAAS,IAAI;AAC5B,aAAO,KAAK,MAAMA,MAAK,CAAC,IAAI,MAAM,QAAQ,WAAW,MAAM;AAAA,IAC/D;AAHa;AAAA;AAAA;;;ACvIb,gBAAe;;;ACAR,IAAM,uBAAuB,uBAAO,IAAI,sBAAsB;AAC9D,IAAM,iBAAiB,uBAAO,IAAI,gBAAgB;;;ACmCzD,eAAsB,MAAM,OAAkC;AAE5D,QAAM,UAAW,WAAmB,cAAc;AAClD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,yDAAyD;EAC3E;AACA,SAAO,QAAQ,KAAK;AACtB;AAPsB;;;AC3BhB,SAAU,WAAoB,SAAqB;AAEvD,QAAM,eAAgB,WACpB,oBAAoB;AAEtB,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,MACR,8DAA8D;EAElE;AACA,SAAO,aAAa,OAAO;AAC7B;AAXgB;;;ACEb,IAAA,QAAA,WAAA,uBAAA,IAAA,mBAAA,CAAA,EAAA,8CAAA;;;ACVH,IAAM,iBAAiB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,qDAAqD;AACxH,IAAM,iBAAiB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,qDAAqD;AACxH,eAAO,iBAAwC,UAAU,QAAQ,WAAW,YAAY;AAEpF,QAAM,eAAe,UAAU,WAAW,kBAAkB;AAC5D,QAAM,cAAc,WAAW,eAAe,QAAQ,EAAE;AACxD,QAAM,iBAAiB,MAAM,KAAK;AAClC,QAAM,gBAAgB,MAAM,QAAQ,KAAK;AAAA,IACrC;AAAA,IACA;AAAA,EACJ,CAAC;AACD,MAAI,kBAAkB,QAAW;AAE7B,WAAO,eAAe,UAAU,cAAc,WAAW,aAAa,YAAY,SAAS;AAAA,EAC/F;AAEA,QAAM,eAAe,UAAU,YAAY,oBAAoB;AAC/D,QAAM,eAAe,WAAW,iBAAiB,QAAQ,EAAE;AAC3D,QAAM,kBAAkB,MAAM,KAAK;AACnC,QAAM,iBAAiB,MAAM,QAAQ,KAAK;AAAA,IACtC;AAAA,IACA;AAAA,EACJ,CAAC;AACD,MAAI,mBAAmB,QAAW;AAE9B,WAAO,eAAe,UAAU,eAAe,WAAW,aAAa,YAAY,UAAU;AAAA,EACjG;AAEA,QAAM,eAAe,UAAU,WAAW,uBAAuB;AACjE,SAAO,eAAe,UAAU,iBAAiB,QAAQ;AAC7D;AA5B8B;AA6B9B,iBAAiB,aAAa;AAC9B,WAAW,oBAAoB,IAAI,6DAA6D,gBAAgB;",
  "names": ["module", "ms"]
}
 +`; + +export const POST = workflowEntrypoint(workflowCode); diff --git a/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs.debug.json b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs.debug.json new file mode 100644 index 0000000000..d8db52fdf5 --- /dev/null +++ b/tests/fixtures/workflow-skills/approval-expiry-escalation/.workflow-vitest/workflows.mjs.debug.json @@ -0,0 +1,6 @@ +{ + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/approval-expiry-escalation/workflows/purchase-approval.ts" + ], + "serdeOnlyFiles": [] +} diff --git a/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs new file mode 100644 index 0000000000..99ca0c7234 --- /dev/null +++ b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs @@ -0,0 +1,151 @@ +// biome-ignore-all lint: generated file +/* eslint-disable */ + +var __defProp = Object.defineProperty; +var __name = (target, value) => + __defProp(target, 'name', { value, configurable: true }); + +// ../../../../packages/workflow/dist/internal/builtins.js +import { registerStepFunction } from 'workflow/internal/private'; +async function __builtin_response_array_buffer() { + return this.arrayBuffer(); +} +__name(__builtin_response_array_buffer, '__builtin_response_array_buffer'); +async function __builtin_response_json() { + return this.json(); +} +__name(__builtin_response_json, '__builtin_response_json'); +async function __builtin_response_text() { + return this.text(); +} +__name(__builtin_response_text, '__builtin_response_text'); +registerStepFunction( + '__builtin_response_array_buffer', + __builtin_response_array_buffer +); +registerStepFunction('__builtin_response_json', __builtin_response_json); +registerStepFunction('__builtin_response_text', __builtin_response_text); + +// ../../../../packages/workflow/dist/stdlib.js +import { registerStepFunction as registerStepFunction2 } from 'workflow/internal/private'; +async function fetch(...args) { + return globalThis.fetch(...args); +} +__name(fetch, 'fetch'); +registerStepFunction2('step//./packages/workflow/dist/stdlib//fetch', fetch); + +// workflows/order-saga.ts +import { registerStepFunction as registerStepFunction3 } from 'workflow/internal/private'; + +// ../../../../packages/utils/dist/index.js +import { pluralize } from '../../../../../packages/utils/dist/pluralize.js'; +import { + parseClassName, + parseStepName, + parseWorkflowName, +} from '../../../../../packages/utils/dist/parse-name.js'; +import { + once, + withResolvers, +} from '../../../../../packages/utils/dist/promise.js'; +import { parseDurationToDate } from '../../../../../packages/utils/dist/time.js'; +import { + isVercelWorldTarget, + resolveWorkflowTargetWorld, + usesVercelWorld, +} from '../../../../../packages/utils/dist/world-target.js'; + +// ../../../../packages/errors/dist/index.js +import { RUN_ERROR_CODES } from '../../../../../packages/errors/dist/error-codes.js'; + +// ../../../../packages/core/dist/index.js +import { + createHook, + createWebhook, +} from '../../../../../packages/core/dist/create-hook.js'; +import { defineHook } from '../../../../../packages/core/dist/define-hook.js'; +import { sleep } from '../../../../../packages/core/dist/sleep.js'; +import { getStepMetadata } from '../../../../../packages/core/dist/step/get-step-metadata.js'; +import { getWorkflowMetadata } from '../../../../../packages/core/dist/step/get-workflow-metadata.js'; +import { getWritable } from '../../../../../packages/core/dist/step/writable-stream.js'; + +// workflows/order-saga.ts +var reserveInventory = /* @__PURE__ */ __name(async (orderId, items) => { + const reservation = await warehouse.reserve({ + idempotencyKey: `inventory:${orderId}`, + items, + }); + return reservation; +}, 'reserveInventory'); +var chargePayment = /* @__PURE__ */ __name(async (orderId, amount) => { + const result = await paymentProvider.charge({ + idempotencyKey: `payment:${orderId}`, + amount, + }); + return result; +}, 'chargePayment'); +var bookShipment = /* @__PURE__ */ __name(async (orderId, address) => { + const shipment = await carrier.book({ + idempotencyKey: `shipment:${orderId}`, + address, + }); + return shipment; +}, 'bookShipment'); +var refundPayment = /* @__PURE__ */ __name(async (orderId, chargeId) => { + await paymentProvider.refund({ + idempotencyKey: `refund:${orderId}`, + chargeId, + }); +}, 'refundPayment'); +var releaseInventory = /* @__PURE__ */ __name( + async (orderId, reservationId) => { + await warehouse.release({ + idempotencyKey: `release:${orderId}`, + reservationId, + }); + }, + 'releaseInventory' +); +var sendConfirmation = /* @__PURE__ */ __name(async (orderId, email) => { + await emailService.send({ + idempotencyKey: `confirmation:${orderId}`, + to: email, + template: 'order-confirmed', + }); +}, 'sendConfirmation'); +async function orderSaga(orderId, amount, items, address, email) { + throw new Error( + 'You attempted to execute workflow orderSaga function directly. To start a workflow, use start(orderSaga) from workflow/api' + ); +} +__name(orderSaga, 'orderSaga'); +orderSaga.workflowId = 'workflow//./workflows/order-saga//orderSaga'; +registerStepFunction3( + 'step//./workflows/order-saga//reserveInventory', + reserveInventory +); +registerStepFunction3( + 'step//./workflows/order-saga//chargePayment', + chargePayment +); +registerStepFunction3( + 'step//./workflows/order-saga//bookShipment', + bookShipment +); +registerStepFunction3( + 'step//./workflows/order-saga//refundPayment', + refundPayment +); +registerStepFunction3( + 'step//./workflows/order-saga//releaseInventory', + releaseInventory +); +registerStepFunction3( + 'step//./workflows/order-saga//sendConfirmation', + sendConfirmation +); + +// virtual-entry.js +import { stepEntrypoint } from 'workflow/runtime'; +export { stepEntrypoint as POST }; +//# sourceMappingURL=data:application/json;base64,{
  "version": 3,
  "sources": ["../../../../../packages/workflow/src/internal/builtins.ts", "../../../../../packages/workflow/src/stdlib.ts", "../workflows/order-saga.ts", "../../../../../packages/utils/src/index.ts", "../../../../../packages/errors/src/index.ts", "../../../../../packages/core/src/index.ts", "../virtual-entry.js"],
  "sourcesContent": ["/**\n * These are the built-in steps that are \"automatically available\" in the workflow scope. They are\n * similar to \"stdlib\" except that are not meant to be imported by users, but are instead \"just available\"\n * alongside user defined steps. They are used internally by the runtime\n */\n\nexport async function __builtin_response_array_buffer(\n  this: Request | Response\n) {\n  'use step';\n  return this.arrayBuffer();\n}\n\nexport async function __builtin_response_json(this: Request | Response) {\n  'use step';\n  return this.json();\n}\n\nexport async function __builtin_response_text(this: Request | Response) {\n  'use step';\n  return this.text();\n}\n", "/**\n * This is the \"standard library\" of steps that we make available to all workflow users.\n * The can be imported like so: `import { fetch } from 'workflow'`. and used in workflow.\n * The need to be exported directly in this package and cannot live in `core` to prevent\n * circular dependencies post-compilation.\n */\n\n/**\n * A hoisted `fetch()` function that is executed as a \"step\" function,\n * for use within workflow functions.\n *\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API\n */\nexport async function fetch(...args: Parameters<typeof globalThis.fetch>) {\n  'use step';\n  return globalThis.fetch(...args);\n}\n", "import { registerStepFunction } from \"workflow/internal/private\";\nimport { FatalError } from \"workflow\";\n/**__internal_workflows{\"workflows\":{\"workflows/order-saga.ts\":{\"default\":{\"workflowId\":\"workflow//./workflows/order-saga//orderSaga\"}}},\"steps\":{\"workflows/order-saga.ts\":{\"bookShipment\":{\"stepId\":\"step//./workflows/order-saga//bookShipment\"},\"chargePayment\":{\"stepId\":\"step//./workflows/order-saga//chargePayment\"},\"refundPayment\":{\"stepId\":\"step//./workflows/order-saga//refundPayment\"},\"releaseInventory\":{\"stepId\":\"step//./workflows/order-saga//releaseInventory\"},\"reserveInventory\":{\"stepId\":\"step//./workflows/order-saga//reserveInventory\"},\"sendConfirmation\":{\"stepId\":\"step//./workflows/order-saga//sendConfirmation\"}}}}*/;\nconst reserveInventory = async (orderId, items)=>{\n    const reservation = await warehouse.reserve({\n        idempotencyKey: `inventory:${orderId}`,\n        items\n    });\n    return reservation;\n};\nconst chargePayment = async (orderId, amount)=>{\n    const result = await paymentProvider.charge({\n        idempotencyKey: `payment:${orderId}`,\n        amount\n    });\n    return result;\n};\nconst bookShipment = async (orderId, address)=>{\n    const shipment = await carrier.book({\n        idempotencyKey: `shipment:${orderId}`,\n        address\n    });\n    return shipment;\n};\nconst refundPayment = async (orderId, chargeId)=>{\n    await paymentProvider.refund({\n        idempotencyKey: `refund:${orderId}`,\n        chargeId\n    });\n};\nconst releaseInventory = async (orderId, reservationId)=>{\n    await warehouse.release({\n        idempotencyKey: `release:${orderId}`,\n        reservationId\n    });\n};\nconst sendConfirmation = async (orderId, email)=>{\n    await emailService.send({\n        idempotencyKey: `confirmation:${orderId}`,\n        to: email,\n        template: \"order-confirmed\"\n    });\n};\nexport default async function orderSaga(orderId, amount, items, address, email) {\n    throw new Error(\"You attempted to execute workflow orderSaga function directly. To start a workflow, use start(orderSaga) from workflow/api\");\n}\norderSaga.workflowId = \"workflow//./workflows/order-saga//orderSaga\";\nregisterStepFunction(\"step//./workflows/order-saga//reserveInventory\", reserveInventory);\nregisterStepFunction(\"step//./workflows/order-saga//chargePayment\", chargePayment);\nregisterStepFunction(\"step//./workflows/order-saga//bookShipment\", bookShipment);\nregisterStepFunction(\"step//./workflows/order-saga//refundPayment\", refundPayment);\nregisterStepFunction(\"step//./workflows/order-saga//releaseInventory\", releaseInventory);\nregisterStepFunction(\"step//./workflows/order-saga//sendConfirmation\", sendConfirmation);\n", "export { pluralize } from './pluralize.js';\nexport {\n  parseClassName,\n  parseStepName,\n  parseWorkflowName,\n} from './parse-name.js';\nexport { once, type PromiseWithResolvers, withResolvers } from './promise.js';\nexport { parseDurationToDate } from './time.js';\nexport {\n  isVercelWorldTarget,\n  resolveWorkflowTargetWorld,\n  usesVercelWorld,\n} from './world-target.js';\n", "import { parseDurationToDate } from '@workflow/utils';\nimport type { StructuredError } from '@workflow/world';\nimport type { StringValue } from 'ms';\n\nconst BASE_URL = 'https://useworkflow.dev/err';\n\n/**\n * @internal\n * Check if a value is an Error without relying on Node.js utilities.\n * This is needed for error classes that can be used in VM contexts where\n * Node.js imports are not available.\n */\nfunction isError(value: unknown): value is { name: string; message: string } {\n  return (\n    typeof value === 'object' &&\n    value !== null &&\n    'name' in value &&\n    'message' in value\n  );\n}\n\n/**\n * @internal\n * All the slugs of the errors used for documentation links.\n */\nexport const ERROR_SLUGS = {\n  NODE_JS_MODULE_IN_WORKFLOW: 'node-js-module-in-workflow',\n  START_INVALID_WORKFLOW_FUNCTION: 'start-invalid-workflow-function',\n  SERIALIZATION_FAILED: 'serialization-failed',\n  WEBHOOK_INVALID_RESPOND_WITH_VALUE: 'webhook-invalid-respond-with-value',\n  WEBHOOK_RESPONSE_NOT_SENT: 'webhook-response-not-sent',\n  FETCH_IN_WORKFLOW_FUNCTION: 'fetch-in-workflow',\n  TIMEOUT_FUNCTIONS_IN_WORKFLOW: 'timeout-in-workflow',\n  HOOK_CONFLICT: 'hook-conflict',\n  CORRUPTED_EVENT_LOG: 'corrupted-event-log',\n  STEP_NOT_REGISTERED: 'step-not-registered',\n  WORKFLOW_NOT_REGISTERED: 'workflow-not-registered',\n} as const;\n\ntype ErrorSlug = (typeof ERROR_SLUGS)[keyof typeof ERROR_SLUGS];\n\ninterface WorkflowErrorOptions extends ErrorOptions {\n  /**\n   * The slug of the error. This will be used to generate a link to the error documentation.\n   */\n  slug?: ErrorSlug;\n}\n\n/**\n * The base class for all Workflow-related errors.\n *\n * This error is thrown by the Workflow DevKit when internal operations fail.\n * You can use this class with `instanceof` to catch any Workflow DevKit error.\n *\n * @example\n * ```ts\n * try {\n *   await getRun(runId);\n * } catch (error) {\n *   if (error instanceof WorkflowError) {\n *     console.error('Workflow DevKit error:', error.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowError extends Error {\n  readonly cause?: unknown;\n\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    const msgDocs = options?.slug\n      ? `${message}\\n\\nLearn more: ${BASE_URL}/${options.slug}`\n      : message;\n    super(msgDocs, { cause: options?.cause });\n    this.cause = options?.cause;\n\n    if (options?.cause instanceof Error) {\n      this.stack = `${this.stack}\\nCaused by: ${options.cause.stack}`;\n    }\n  }\n\n  static is(value: unknown): value is WorkflowError {\n    return isError(value) && value.name === 'WorkflowError';\n  }\n}\n\n/**\n * Thrown when a world (storage backend) operation fails unexpectedly.\n *\n * This is the catch-all error for world implementations. Specific,\n * well-known failure modes have dedicated error types (e.g.\n * EntityConflictError, RunExpiredError, ThrottleError). This error\n * covers everything else \u2014 validation failures, missing entities\n * without a dedicated type, or unexpected HTTP errors from world-vercel.\n */\nexport class WorkflowWorldError extends WorkflowError {\n  status?: number;\n  code?: string;\n  url?: string;\n  /** Retry-After value in seconds, present on 429 and 425 responses */\n  retryAfter?: number;\n\n  constructor(\n    message: string,\n    options?: {\n      status?: number;\n      url?: string;\n      code?: string;\n      retryAfter?: number;\n      cause?: unknown;\n    }\n  ) {\n    super(message, {\n      cause: options?.cause,\n    });\n    this.name = 'WorkflowWorldError';\n    this.status = options?.status;\n    this.code = options?.code;\n    this.url = options?.url;\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is WorkflowWorldError {\n    return isError(value) && value.name === 'WorkflowWorldError';\n  }\n}\n\n/**\n * Thrown when a workflow run fails during execution.\n *\n * This error indicates that the workflow encountered a fatal error and cannot\n * continue. It is thrown when awaiting `run.returnValue` on a run whose status\n * is `'failed'`. The `cause` property contains the underlying error with its\n * message, stack trace, and optional error code.\n *\n * Use the static `WorkflowRunFailedError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunFailedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunFailedError.is(error)) {\n *     console.error(`Run ${error.runId} failed:`, error.cause.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunFailedError extends WorkflowError {\n  runId: string;\n  declare cause: Error & { code?: string };\n\n  constructor(runId: string, error: StructuredError) {\n    // Create a proper Error instance from the StructuredError to set as cause\n    // NOTE: custom error types do not get serialized/deserialized. Everything is an Error\n    const causeError = new Error(error.message);\n    if (error.stack) {\n      causeError.stack = error.stack;\n    }\n    if (error.code) {\n      (causeError as any).code = error.code;\n    }\n\n    super(`Workflow run \"${runId}\" failed: ${error.message}`, {\n      cause: causeError,\n    });\n    this.name = 'WorkflowRunFailedError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunFailedError {\n    return isError(value) && value.name === 'WorkflowRunFailedError';\n  }\n}\n\n/**\n * Thrown when attempting to get results from an incomplete workflow run.\n *\n * This error occurs when you try to access the result of a workflow\n * that is still running or hasn't completed yet.\n */\nexport class WorkflowRunNotCompletedError extends WorkflowError {\n  runId: string;\n  status: string;\n\n  constructor(runId: string, status: string) {\n    super(`Workflow run \"${runId}\" has not completed`, {});\n    this.name = 'WorkflowRunNotCompletedError';\n    this.runId = runId;\n    this.status = status;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotCompletedError {\n    return isError(value) && value.name === 'WorkflowRunNotCompletedError';\n  }\n}\n\n/**\n * Thrown when the Workflow runtime encounters an internal error.\n *\n * This error indicates an issue with workflow execution, such as\n * serialization failures, starting an invalid workflow function, or\n * other runtime problems.\n */\nexport class WorkflowRuntimeError extends WorkflowError {\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    super(message, {\n      ...options,\n    });\n    this.name = 'WorkflowRuntimeError';\n  }\n\n  static is(value: unknown): value is WorkflowRuntimeError {\n    return isError(value) && value.name === 'WorkflowRuntimeError';\n  }\n}\n\n/**\n * Thrown when a step function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means\n * something went wrong with the bundling/build tooling that caused the step\n * to not get built correctly.\n *\n * When this happens, the step fails (like a FatalError) and control is passed back\n * to the workflow function, which can optionally handle the failure gracefully.\n */\nexport class StepNotRegisteredError extends WorkflowRuntimeError {\n  stepName: string;\n\n  constructor(stepName: string) {\n    super(\n      `Step \"${stepName}\" is not registered in the current deployment. This usually indicates a build or bundling issue that caused the step to not be included in the deployment.`,\n      { slug: ERROR_SLUGS.STEP_NOT_REGISTERED }\n    );\n    this.name = 'StepNotRegisteredError';\n    this.stepName = stepName;\n  }\n\n  static is(value: unknown): value is StepNotRegisteredError {\n    return isError(value) && value.name === 'StepNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when a workflow function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means:\n * - A run was started against a deployment that does not have the workflow\n *   (e.g., the workflow was renamed or moved and a new run targeted the latest deployment)\n * - Something went wrong with the bundling/build tooling that caused the workflow\n *   to not get built correctly\n *\n * When this happens, the run fails with a `RUNTIME_ERROR` error code.\n */\nexport class WorkflowNotRegisteredError extends WorkflowRuntimeError {\n  workflowName: string;\n\n  constructor(workflowName: string) {\n    super(\n      `Workflow \"${workflowName}\" is not registered in the current deployment. This usually means a run was started against a deployment that does not have this workflow, or there was a build/bundling issue.`,\n      { slug: ERROR_SLUGS.WORKFLOW_NOT_REGISTERED }\n    );\n    this.name = 'WorkflowNotRegisteredError';\n    this.workflowName = workflowName;\n  }\n\n  static is(value: unknown): value is WorkflowNotRegisteredError {\n    return isError(value) && value.name === 'WorkflowNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when performing operations on a workflow run that does not exist.\n *\n * This error occurs when you call methods on a run object (e.g. `run.status`,\n * `run.cancel()`, `run.returnValue`) but the underlying run ID does not match\n * any known workflow run. Note that `getRun(id)` itself is synchronous and will\n * not throw \u2014 this error is raised when subsequent operations discover the run\n * is missing.\n *\n * Use the static `WorkflowRunNotFoundError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (WorkflowRunNotFoundError.is(error)) {\n *     console.error(`Run ${error.runId} does not exist`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunNotFoundError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" not found`, {});\n    this.name = 'WorkflowRunNotFoundError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotFoundError {\n    return isError(value) && value.name === 'WorkflowRunNotFoundError';\n  }\n}\n\n/**\n * Thrown when a hook token is already in use by another active workflow run.\n *\n * This is a user error \u2014 it means the same custom token was passed to\n * `createHook` in two or more concurrent runs. Use a unique token per run\n * (or omit the token to let the runtime generate one automatically).\n */\nexport class HookConflictError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super(`Hook token \"${token}\" is already in use by another workflow`, {\n      slug: ERROR_SLUGS.HOOK_CONFLICT,\n    });\n    this.name = 'HookConflictError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookConflictError {\n    return isError(value) && value.name === 'HookConflictError';\n  }\n}\n\n/**\n * Thrown when calling `resumeHook()` or `resumeWebhook()` with a token that\n * does not match any active hook.\n *\n * Common causes:\n * - The hook has expired (past its TTL)\n * - The hook was already disposed after being consumed\n * - The workflow has not started yet, so the hook does not exist\n *\n * A common pattern is to catch this error and start a new workflow run when\n * the hook does not exist yet (the \"resume or start\" pattern).\n *\n * Use the static `HookNotFoundError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { HookNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   await resumeHook(token, payload);\n * } catch (error) {\n *   if (HookNotFoundError.is(error)) {\n *     // Hook doesn't exist \u2014 start a new workflow run instead\n *     await startWorkflow(\"myWorkflow\", payload);\n *   }\n * }\n * ```\n */\nexport class HookNotFoundError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super('Hook not found', {});\n    this.name = 'HookNotFoundError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookNotFoundError {\n    return isError(value) && value.name === 'HookNotFoundError';\n  }\n}\n\n/**\n * Thrown when an operation conflicts with the current state of an entity.\n * This includes attempts to modify an entity already in a terminal state,\n * create an entity that already exists, or any other 409-style conflict.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class EntityConflictError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'EntityConflictError';\n  }\n\n  static is(value: unknown): value is EntityConflictError {\n    return isError(value) && value.name === 'EntityConflictError';\n  }\n}\n\n/**\n * Thrown when a run is no longer available \u2014 either because it has been\n * cleaned up, expired, or already reached a terminal state (completed/failed).\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class RunExpiredError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'RunExpiredError';\n  }\n\n  static is(value: unknown): value is RunExpiredError {\n    return isError(value) && value.name === 'RunExpiredError';\n  }\n}\n\n/**\n * Thrown when an operation cannot proceed because a required timestamp\n * (e.g. retryAfter) has not been reached yet.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n *\n * @property retryAfter - Delay in seconds before the operation can be retried.\n */\nexport class TooEarlyError extends WorkflowWorldError {\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message, { retryAfter: options?.retryAfter });\n    this.name = 'TooEarlyError';\n  }\n\n  static is(value: unknown): value is TooEarlyError {\n    return isError(value) && value.name === 'TooEarlyError';\n  }\n}\n\n/**\n * Thrown when a request is rate limited by the workflow backend.\n *\n * The workflow runtime handles this error automatically with retry logic.\n * Users interacting with world storage backends directly may encounter it\n * if retries are exhausted.\n *\n * @property retryAfter - Delay in seconds before the request can be retried.\n */\nexport class ThrottleError extends WorkflowWorldError {\n  retryAfter?: number;\n\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message);\n    this.name = 'ThrottleError';\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is ThrottleError {\n    return isError(value) && value.name === 'ThrottleError';\n  }\n}\n\n/**\n * Thrown when awaiting `run.returnValue` on a workflow run that was cancelled.\n *\n * This error indicates that the workflow was explicitly cancelled (via\n * `run.cancel()`) and will not produce a return value. You can check for\n * cancellation before awaiting the return value by inspecting `run.status`.\n *\n * Use the static `WorkflowRunCancelledError.is()` method for type-safe\n * checking in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunCancelledError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunCancelledError.is(error)) {\n *     console.log(`Run ${error.runId} was cancelled`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunCancelledError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" cancelled`, {});\n    this.name = 'WorkflowRunCancelledError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunCancelledError {\n    return isError(value) && value.name === 'WorkflowRunCancelledError';\n  }\n}\n\n/**\n * Thrown when attempting to operate on a workflow run that requires a newer World version.\n *\n * This error occurs when a run was created with a newer spec version than the\n * current World implementation supports. To resolve this, upgrade your\n * `workflow` packages to a version that supports the required spec version.\n *\n * Use the static `RunNotSupportedError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { RunNotSupportedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (RunNotSupportedError.is(error)) {\n *     console.error(\n *       `Run requires spec v${error.runSpecVersion}, ` +\n *       `but world supports v${error.worldSpecVersion}`\n *     );\n *   }\n * }\n * ```\n */\nexport class RunNotSupportedError extends WorkflowError {\n  readonly runSpecVersion: number;\n  readonly worldSpecVersion: number;\n\n  constructor(runSpecVersion: number, worldSpecVersion: number) {\n    super(\n      `Run requires spec version ${runSpecVersion}, but world supports version ${worldSpecVersion}. ` +\n        `Please upgrade 'workflow' package.`\n    );\n    this.name = 'RunNotSupportedError';\n    this.runSpecVersion = runSpecVersion;\n    this.worldSpecVersion = worldSpecVersion;\n  }\n\n  static is(value: unknown): value is RunNotSupportedError {\n    return isError(value) && value.name === 'RunNotSupportedError';\n  }\n}\n\n/**\n * A fatal error is an error that cannot be retried.\n * It will cause the step to fail and the error will\n * be bubbled up to the workflow logic.\n */\nexport class FatalError extends Error {\n  fatal = true;\n\n  constructor(message: string) {\n    super(message);\n    this.name = 'FatalError';\n  }\n\n  static is(value: unknown): value is FatalError {\n    return isError(value) && value.name === 'FatalError';\n  }\n}\n\nexport interface RetryableErrorOptions {\n  /**\n   * The number of milliseconds to wait before retrying the step.\n   * Can also be a duration string (e.g., \"5s\", \"2m\") or a Date object.\n   * If not provided, the step will be retried after 1 second (1000 milliseconds).\n   */\n  retryAfter?: number | StringValue | Date;\n}\n\n/**\n * An error that can happen during a step execution, allowing\n * for configuration of the retry behavior.\n */\nexport class RetryableError extends Error {\n  /**\n   * The Date when the step should be retried.\n   */\n  retryAfter: Date;\n\n  constructor(message: string, options: RetryableErrorOptions = {}) {\n    super(message);\n    this.name = 'RetryableError';\n\n    if (options.retryAfter !== undefined) {\n      this.retryAfter = parseDurationToDate(options.retryAfter);\n    } else {\n      // Default to 1 second (1000 milliseconds)\n      this.retryAfter = new Date(Date.now() + 1000);\n    }\n  }\n\n  static is(value: unknown): value is RetryableError {\n    return isError(value) && value.name === 'RetryableError';\n  }\n}\n\nexport const VERCEL_403_ERROR_MESSAGE =\n  'Your current vercel account does not have access to this resource. Use `vercel login` or `vercel switch` to ensure you are linked to the right account.';\n\nexport { RUN_ERROR_CODES, type RunErrorCode } from './error-codes.js';\n", "/**\n * Just the core utilities that are meant to be imported by user\n * steps/workflows. This allows the bundler to tree-shake and limit what goes\n * into the final user bundles. Logic for running/handling steps/workflows\n * should live in runtime. Eventually these might be separate packages\n * `workflow` and `workflow/runtime`?\n *\n * Everything here will get re-exported under the 'workflow' top level package.\n * This should be a minimal set of APIs so **do not anything here** unless it's\n * needed for userland workflow code.\n */\n\nexport {\n  FatalError,\n  RetryableError,\n  type RetryableErrorOptions,\n} from '@workflow/errors';\nexport {\n  createHook,\n  createWebhook,\n  type Hook,\n  type HookOptions,\n  type RequestWithResponse,\n  type Webhook,\n  type WebhookOptions,\n} from './create-hook.js';\nexport { defineHook, type TypedHook } from './define-hook.js';\nexport { sleep } from './sleep.js';\nexport {\n  getStepMetadata,\n  type StepMetadata,\n} from './step/get-step-metadata.js';\nexport {\n  getWorkflowMetadata,\n  type WorkflowMetadata,\n} from './step/get-workflow-metadata.js';\nexport {\n  getWritable,\n  type WorkflowWritableStreamOptions,\n} from './step/writable-stream.js';\n", "\n    // Built in steps\n    import 'workflow/internal/builtins';\n    // User steps\n    import '../../../../packages/workflow/dist/stdlib.js';\nimport './workflows/order-saga.ts';\n    // Serde files for cross-context class registration\n    \n    // API entrypoint\n    export { stepEntrypoint as POST } from 'workflow/runtime';"],
  "mappings": ";;;;;;;AAAA,SAAA,4BAAA;AASE,eAAW,kCAAA;AACX,SAAO,KAAK,YAAW;AACzB;AAFa;AAIb,eAAsB,0BAAuB;AAC3C,SAAA,KAAW,KAAA;;AADS;AAGtB,eAAC,0BAAA;AAED,SAAO,KAAK,KAAA;;AAFX;qBAIiB,mCAAG,+BAAA;AACrB,qBAAC,2BAAA,uBAAA;;;;ACrBD,SAAA,wBAAAA,6BAAA;AAaA,eAAsB,SAAkD,MAAA;AACtE,SAAA,WAAW,MAAA,GAAA,IAAA;;AADS;AAGtBC,sBAAC,gDAAA,KAAA;;;AChBD,SAAS,wBAAAC,6BAA4B;;;ACArC,SAAS,iBAAiB;AAC1B,SACE,gBACA,eACA,yBACD;AACD,SAAS,MAAiC,qBAAqB;AAC/D,SAAS,2BAA2B;AACpC,SACE,qBACA,4BACA,uBACD;;;ACgjBD,SAAM,uBAAsB;;;AChjB5B,SACE,YACA,qBAED;AACD,SACE,kBACA;AAOF,SAAS,aAA4B;AACrC,SAAS,uBAAa;AACtB,SACE,2BAEK;AACP,SACE,mBAAmB;;;AH9BrB,IAAM,mBAAmB,8BAAO,SAAS,UAAQ;AAC7C,QAAM,cAAc,MAAM,UAAU,QAAQ;AAAA,IACxC,gBAAgB,aAAa,OAAO;AAAA,IACpC;AAAA,EACJ,CAAC;AACD,SAAO;AACX,GANyB;AAOzB,IAAM,gBAAgB,8BAAO,SAAS,WAAS;AAC3C,QAAM,SAAS,MAAM,gBAAgB,OAAO;AAAA,IACxC,gBAAgB,WAAW,OAAO;AAAA,IAClC;AAAA,EACJ,CAAC;AACD,SAAO;AACX,GANsB;AAOtB,IAAM,eAAe,8BAAO,SAAS,YAAU;AAC3C,QAAM,WAAW,MAAM,QAAQ,KAAK;AAAA,IAChC,gBAAgB,YAAY,OAAO;AAAA,IACnC;AAAA,EACJ,CAAC;AACD,SAAO;AACX,GANqB;AAOrB,IAAM,gBAAgB,8BAAO,SAAS,aAAW;AAC7C,QAAM,gBAAgB,OAAO;AAAA,IACzB,gBAAgB,UAAU,OAAO;AAAA,IACjC;AAAA,EACJ,CAAC;AACL,GALsB;AAMtB,IAAM,mBAAmB,8BAAO,SAAS,kBAAgB;AACrD,QAAM,UAAU,QAAQ;AAAA,IACpB,gBAAgB,WAAW,OAAO;AAAA,IAClC;AAAA,EACJ,CAAC;AACL,GALyB;AAMzB,IAAM,mBAAmB,8BAAO,SAAS,UAAQ;AAC7C,QAAM,aAAa,KAAK;AAAA,IACpB,gBAAgB,gBAAgB,OAAO;AAAA,IACvC,IAAI;AAAA,IACJ,UAAU;AAAA,EACd,CAAC;AACL,GANyB;AAOzB,eAAO,UAAiC,SAAS,QAAQ,OAAO,SAAS,OAAO;AAC5E,QAAM,IAAI,MAAM,4HAA4H;AAChJ;AAF8B;AAG9B,UAAU,aAAa;AACvBC,sBAAqB,kDAAkD,gBAAgB;AACvFA,sBAAqB,+CAA+C,aAAa;AACjFA,sBAAqB,8CAA8C,YAAY;AAC/EA,sBAAqB,+CAA+C,aAAa;AACjFA,sBAAqB,kDAAkD,gBAAgB;AACvFA,sBAAqB,kDAAkD,gBAAgB;;;AI3CnF,SAA2B,sBAAY;",
  "names": ["registerStepFunction", "registerStepFunction", "registerStepFunction", "registerStepFunction"]
}
 diff --git a/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs.debug.json b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs.debug.json new file mode 100644 index 0000000000..986d37bbeb --- /dev/null +++ b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/steps.mjs.debug.json @@ -0,0 +1,10 @@ +{ + "stepFiles": [ + "/Users/johnlindquist/dev/workflow/packages/workflow/dist/stdlib.js", + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/compensation-saga/workflows/order-saga.ts" + ], + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/compensation-saga/workflows/order-saga.ts" + ], + "serdeOnlyFiles": [] +} diff --git a/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs new file mode 100644 index 0000000000..77069faf88 --- /dev/null +++ b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs @@ -0,0 +1,215 @@ +// biome-ignore-all lint: generated file +/* eslint-disable */ +import { workflowEntrypoint } from 'workflow/runtime'; + +const workflowCode = `globalThis.__private_workflows = new Map(); +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __commonJS = (cb, mod) => function __require() { + return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// ../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js +var require_ms = __commonJS({ + "../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js"(exports, module2) { + var s = 1e3; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === "string" && val.length > 0) { + return parse(val); + } else if (type === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error("val is not a non-empty string or a valid number. val=" + JSON.stringify(val)); + }; + function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?\$/i.exec(str); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || "ms").toLowerCase(); + switch (type) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; + } + } + __name(parse, "parse"); + function fmtShort(ms2) { + var msAbs = Math.abs(ms2); + if (msAbs >= d) { + return Math.round(ms2 / d) + "d"; + } + if (msAbs >= h) { + return Math.round(ms2 / h) + "h"; + } + if (msAbs >= m) { + return Math.round(ms2 / m) + "m"; + } + if (msAbs >= s) { + return Math.round(ms2 / s) + "s"; + } + return ms2 + "ms"; + } + __name(fmtShort, "fmtShort"); + function fmtLong(ms2) { + var msAbs = Math.abs(ms2); + if (msAbs >= d) { + return plural(ms2, msAbs, d, "day"); + } + if (msAbs >= h) { + return plural(ms2, msAbs, h, "hour"); + } + if (msAbs >= m) { + return plural(ms2, msAbs, m, "minute"); + } + if (msAbs >= s) { + return plural(ms2, msAbs, s, "second"); + } + return ms2 + " ms"; + } + __name(fmtLong, "fmtLong"); + function plural(ms2, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms2 / n) + " " + name + (isPlural ? "s" : ""); + } + __name(plural, "plural"); + } +}); + +// ../../../../packages/utils/dist/time.js +var import_ms = __toESM(require_ms(), 1); + +// ../../../../packages/errors/dist/index.js +function isError(value) { + return typeof value === "object" && value !== null && "name" in value && "message" in value; +} +__name(isError, "isError"); +var FatalError = class extends Error { + static { + __name(this, "FatalError"); + } + fatal = true; + constructor(message) { + super(message); + this.name = "FatalError"; + } + static is(value) { + return isError(value) && value.name === "FatalError"; + } +}; + +// ../../../../packages/workflow/dist/stdlib.js +var fetch = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./packages/workflow/dist/stdlib//fetch"); + +// workflows/order-saga.ts +var reserveInventory = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/order-saga//reserveInventory"); +var chargePayment = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/order-saga//chargePayment"); +var bookShipment = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/order-saga//bookShipment"); +var refundPayment = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/order-saga//refundPayment"); +var releaseInventory = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/order-saga//releaseInventory"); +var sendConfirmation = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/order-saga//sendConfirmation"); +async function orderSaga(orderId, amount, items, address, email) { + const reservation = await reserveInventory(orderId, items); + let charge; + try { + charge = await chargePayment(orderId, amount); + } catch (error) { + if (error instanceof FatalError) { + await releaseInventory(orderId, reservation.id); + throw error; + } + throw error; + } + try { + await bookShipment(orderId, address); + } catch (error) { + if (error instanceof FatalError) { + await refundPayment(orderId, charge.id); + await releaseInventory(orderId, reservation.id); + throw error; + } + throw error; + } + await sendConfirmation(orderId, email); + return { + orderId, + status: "fulfilled" + }; +} +__name(orderSaga, "orderSaga"); +orderSaga.workflowId = "workflow//./workflows/order-saga//orderSaga"; +globalThis.__private_workflows.set("workflow//./workflows/order-saga//orderSaga", orderSaga); +//# sourceMappingURL=data:application/json;base64,{
  "version": 3,
  "sources": ["../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js", "../../../../packages/utils/src/time.ts", "../../../../packages/errors/src/index.ts", "../../../../packages/workflow/src/stdlib.ts", "workflows/order-saga.ts"],
  "sourcesContent": ["/**\n * Helpers.\n */ var s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n *  - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */ module.exports = function(val, options) {\n    options = options || {};\n    var type = typeof val;\n    if (type === 'string' && val.length > 0) {\n        return parse(val);\n    } else if (type === 'number' && isFinite(val)) {\n        return options.long ? fmtLong(val) : fmtShort(val);\n    }\n    throw new Error('val is not a non-empty string or a valid number. val=' + JSON.stringify(val));\n};\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */ function parse(str) {\n    str = String(str);\n    if (str.length > 100) {\n        return;\n    }\n    var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str);\n    if (!match) {\n        return;\n    }\n    var n = parseFloat(match[1]);\n    var type = (match[2] || 'ms').toLowerCase();\n    switch(type){\n        case 'years':\n        case 'year':\n        case 'yrs':\n        case 'yr':\n        case 'y':\n            return n * y;\n        case 'weeks':\n        case 'week':\n        case 'w':\n            return n * w;\n        case 'days':\n        case 'day':\n        case 'd':\n            return n * d;\n        case 'hours':\n        case 'hour':\n        case 'hrs':\n        case 'hr':\n        case 'h':\n            return n * h;\n        case 'minutes':\n        case 'minute':\n        case 'mins':\n        case 'min':\n        case 'm':\n            return n * m;\n        case 'seconds':\n        case 'second':\n        case 'secs':\n        case 'sec':\n        case 's':\n            return n * s;\n        case 'milliseconds':\n        case 'millisecond':\n        case 'msecs':\n        case 'msec':\n        case 'ms':\n            return n;\n        default:\n            return undefined;\n    }\n}\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */ function fmtShort(ms) {\n    var msAbs = Math.abs(ms);\n    if (msAbs >= d) {\n        return Math.round(ms / d) + 'd';\n    }\n    if (msAbs >= h) {\n        return Math.round(ms / h) + 'h';\n    }\n    if (msAbs >= m) {\n        return Math.round(ms / m) + 'm';\n    }\n    if (msAbs >= s) {\n        return Math.round(ms / s) + 's';\n    }\n    return ms + 'ms';\n}\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */ function fmtLong(ms) {\n    var msAbs = Math.abs(ms);\n    if (msAbs >= d) {\n        return plural(ms, msAbs, d, 'day');\n    }\n    if (msAbs >= h) {\n        return plural(ms, msAbs, h, 'hour');\n    }\n    if (msAbs >= m) {\n        return plural(ms, msAbs, m, 'minute');\n    }\n    if (msAbs >= s) {\n        return plural(ms, msAbs, s, 'second');\n    }\n    return ms + ' ms';\n}\n/**\n * Pluralization helper.\n */ function plural(ms, msAbs, n, name) {\n    var isPlural = msAbs >= n * 1.5;\n    return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n", "import type { StringValue } from 'ms';\nimport ms from 'ms';\n\n/**\n * Parses a duration parameter (string, number, or Date) and returns a Date object\n * representing when the duration should elapse.\n *\n * - For strings: Parses duration strings like \"1s\", \"5m\", \"1h\", etc. using the `ms` library\n * - For numbers: Treats as milliseconds from now\n * - For Date objects: Returns the date directly (handles both Date instances and date-like objects from deserialization)\n *\n * @param param - The duration parameter (StringValue, Date, or number of milliseconds)\n * @returns A Date object representing when the duration should elapse\n * @throws {Error} If the parameter is invalid or cannot be parsed\n */\nexport function parseDurationToDate(param: StringValue | Date | number): Date {\n  if (typeof param === 'string') {\n    const durationMs = ms(param);\n    if (typeof durationMs !== 'number' || durationMs < 0) {\n      throw new Error(\n        `Invalid duration: \"${param}\". Expected a valid duration string like \"1s\", \"1m\", \"1h\", etc.`\n      );\n    }\n    return new Date(Date.now() + durationMs);\n  } else if (typeof param === 'number') {\n    if (param < 0 || !Number.isFinite(param)) {\n      throw new Error(\n        `Invalid duration: ${param}. Expected a non-negative finite number of milliseconds.`\n      );\n    }\n    return new Date(Date.now() + param);\n  } else if (\n    param instanceof Date ||\n    (param &&\n      typeof param === 'object' &&\n      typeof (param as any).getTime === 'function')\n  ) {\n    // Handle both Date instances and date-like objects (from deserialization)\n    return param instanceof Date ? param : new Date((param as any).getTime());\n  } else {\n    throw new Error(\n      `Invalid duration parameter. Expected a duration string, number (milliseconds), or Date object.`\n    );\n  }\n}\n", "import { parseDurationToDate } from '@workflow/utils';\nimport type { StructuredError } from '@workflow/world';\nimport type { StringValue } from 'ms';\n\nconst BASE_URL = 'https://useworkflow.dev/err';\n\n/**\n * @internal\n * Check if a value is an Error without relying on Node.js utilities.\n * This is needed for error classes that can be used in VM contexts where\n * Node.js imports are not available.\n */\nfunction isError(value: unknown): value is { name: string; message: string } {\n  return (\n    typeof value === 'object' &&\n    value !== null &&\n    'name' in value &&\n    'message' in value\n  );\n}\n\n/**\n * @internal\n * All the slugs of the errors used for documentation links.\n */\nexport const ERROR_SLUGS = {\n  NODE_JS_MODULE_IN_WORKFLOW: 'node-js-module-in-workflow',\n  START_INVALID_WORKFLOW_FUNCTION: 'start-invalid-workflow-function',\n  SERIALIZATION_FAILED: 'serialization-failed',\n  WEBHOOK_INVALID_RESPOND_WITH_VALUE: 'webhook-invalid-respond-with-value',\n  WEBHOOK_RESPONSE_NOT_SENT: 'webhook-response-not-sent',\n  FETCH_IN_WORKFLOW_FUNCTION: 'fetch-in-workflow',\n  TIMEOUT_FUNCTIONS_IN_WORKFLOW: 'timeout-in-workflow',\n  HOOK_CONFLICT: 'hook-conflict',\n  CORRUPTED_EVENT_LOG: 'corrupted-event-log',\n  STEP_NOT_REGISTERED: 'step-not-registered',\n  WORKFLOW_NOT_REGISTERED: 'workflow-not-registered',\n} as const;\n\ntype ErrorSlug = (typeof ERROR_SLUGS)[keyof typeof ERROR_SLUGS];\n\ninterface WorkflowErrorOptions extends ErrorOptions {\n  /**\n   * The slug of the error. This will be used to generate a link to the error documentation.\n   */\n  slug?: ErrorSlug;\n}\n\n/**\n * The base class for all Workflow-related errors.\n *\n * This error is thrown by the Workflow DevKit when internal operations fail.\n * You can use this class with `instanceof` to catch any Workflow DevKit error.\n *\n * @example\n * ```ts\n * try {\n *   await getRun(runId);\n * } catch (error) {\n *   if (error instanceof WorkflowError) {\n *     console.error('Workflow DevKit error:', error.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowError extends Error {\n  readonly cause?: unknown;\n\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    const msgDocs = options?.slug\n      ? `${message}\\n\\nLearn more: ${BASE_URL}/${options.slug}`\n      : message;\n    super(msgDocs, { cause: options?.cause });\n    this.cause = options?.cause;\n\n    if (options?.cause instanceof Error) {\n      this.stack = `${this.stack}\\nCaused by: ${options.cause.stack}`;\n    }\n  }\n\n  static is(value: unknown): value is WorkflowError {\n    return isError(value) && value.name === 'WorkflowError';\n  }\n}\n\n/**\n * Thrown when a world (storage backend) operation fails unexpectedly.\n *\n * This is the catch-all error for world implementations. Specific,\n * well-known failure modes have dedicated error types (e.g.\n * EntityConflictError, RunExpiredError, ThrottleError). This error\n * covers everything else \u2014 validation failures, missing entities\n * without a dedicated type, or unexpected HTTP errors from world-vercel.\n */\nexport class WorkflowWorldError extends WorkflowError {\n  status?: number;\n  code?: string;\n  url?: string;\n  /** Retry-After value in seconds, present on 429 and 425 responses */\n  retryAfter?: number;\n\n  constructor(\n    message: string,\n    options?: {\n      status?: number;\n      url?: string;\n      code?: string;\n      retryAfter?: number;\n      cause?: unknown;\n    }\n  ) {\n    super(message, {\n      cause: options?.cause,\n    });\n    this.name = 'WorkflowWorldError';\n    this.status = options?.status;\n    this.code = options?.code;\n    this.url = options?.url;\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is WorkflowWorldError {\n    return isError(value) && value.name === 'WorkflowWorldError';\n  }\n}\n\n/**\n * Thrown when a workflow run fails during execution.\n *\n * This error indicates that the workflow encountered a fatal error and cannot\n * continue. It is thrown when awaiting `run.returnValue` on a run whose status\n * is `'failed'`. The `cause` property contains the underlying error with its\n * message, stack trace, and optional error code.\n *\n * Use the static `WorkflowRunFailedError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunFailedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunFailedError.is(error)) {\n *     console.error(`Run ${error.runId} failed:`, error.cause.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunFailedError extends WorkflowError {\n  runId: string;\n  declare cause: Error & { code?: string };\n\n  constructor(runId: string, error: StructuredError) {\n    // Create a proper Error instance from the StructuredError to set as cause\n    // NOTE: custom error types do not get serialized/deserialized. Everything is an Error\n    const causeError = new Error(error.message);\n    if (error.stack) {\n      causeError.stack = error.stack;\n    }\n    if (error.code) {\n      (causeError as any).code = error.code;\n    }\n\n    super(`Workflow run \"${runId}\" failed: ${error.message}`, {\n      cause: causeError,\n    });\n    this.name = 'WorkflowRunFailedError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunFailedError {\n    return isError(value) && value.name === 'WorkflowRunFailedError';\n  }\n}\n\n/**\n * Thrown when attempting to get results from an incomplete workflow run.\n *\n * This error occurs when you try to access the result of a workflow\n * that is still running or hasn't completed yet.\n */\nexport class WorkflowRunNotCompletedError extends WorkflowError {\n  runId: string;\n  status: string;\n\n  constructor(runId: string, status: string) {\n    super(`Workflow run \"${runId}\" has not completed`, {});\n    this.name = 'WorkflowRunNotCompletedError';\n    this.runId = runId;\n    this.status = status;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotCompletedError {\n    return isError(value) && value.name === 'WorkflowRunNotCompletedError';\n  }\n}\n\n/**\n * Thrown when the Workflow runtime encounters an internal error.\n *\n * This error indicates an issue with workflow execution, such as\n * serialization failures, starting an invalid workflow function, or\n * other runtime problems.\n */\nexport class WorkflowRuntimeError extends WorkflowError {\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    super(message, {\n      ...options,\n    });\n    this.name = 'WorkflowRuntimeError';\n  }\n\n  static is(value: unknown): value is WorkflowRuntimeError {\n    return isError(value) && value.name === 'WorkflowRuntimeError';\n  }\n}\n\n/**\n * Thrown when a step function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means\n * something went wrong with the bundling/build tooling that caused the step\n * to not get built correctly.\n *\n * When this happens, the step fails (like a FatalError) and control is passed back\n * to the workflow function, which can optionally handle the failure gracefully.\n */\nexport class StepNotRegisteredError extends WorkflowRuntimeError {\n  stepName: string;\n\n  constructor(stepName: string) {\n    super(\n      `Step \"${stepName}\" is not registered in the current deployment. This usually indicates a build or bundling issue that caused the step to not be included in the deployment.`,\n      { slug: ERROR_SLUGS.STEP_NOT_REGISTERED }\n    );\n    this.name = 'StepNotRegisteredError';\n    this.stepName = stepName;\n  }\n\n  static is(value: unknown): value is StepNotRegisteredError {\n    return isError(value) && value.name === 'StepNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when a workflow function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means:\n * - A run was started against a deployment that does not have the workflow\n *   (e.g., the workflow was renamed or moved and a new run targeted the latest deployment)\n * - Something went wrong with the bundling/build tooling that caused the workflow\n *   to not get built correctly\n *\n * When this happens, the run fails with a `RUNTIME_ERROR` error code.\n */\nexport class WorkflowNotRegisteredError extends WorkflowRuntimeError {\n  workflowName: string;\n\n  constructor(workflowName: string) {\n    super(\n      `Workflow \"${workflowName}\" is not registered in the current deployment. This usually means a run was started against a deployment that does not have this workflow, or there was a build/bundling issue.`,\n      { slug: ERROR_SLUGS.WORKFLOW_NOT_REGISTERED }\n    );\n    this.name = 'WorkflowNotRegisteredError';\n    this.workflowName = workflowName;\n  }\n\n  static is(value: unknown): value is WorkflowNotRegisteredError {\n    return isError(value) && value.name === 'WorkflowNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when performing operations on a workflow run that does not exist.\n *\n * This error occurs when you call methods on a run object (e.g. `run.status`,\n * `run.cancel()`, `run.returnValue`) but the underlying run ID does not match\n * any known workflow run. Note that `getRun(id)` itself is synchronous and will\n * not throw \u2014 this error is raised when subsequent operations discover the run\n * is missing.\n *\n * Use the static `WorkflowRunNotFoundError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (WorkflowRunNotFoundError.is(error)) {\n *     console.error(`Run ${error.runId} does not exist`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunNotFoundError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" not found`, {});\n    this.name = 'WorkflowRunNotFoundError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotFoundError {\n    return isError(value) && value.name === 'WorkflowRunNotFoundError';\n  }\n}\n\n/**\n * Thrown when a hook token is already in use by another active workflow run.\n *\n * This is a user error \u2014 it means the same custom token was passed to\n * `createHook` in two or more concurrent runs. Use a unique token per run\n * (or omit the token to let the runtime generate one automatically).\n */\nexport class HookConflictError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super(`Hook token \"${token}\" is already in use by another workflow`, {\n      slug: ERROR_SLUGS.HOOK_CONFLICT,\n    });\n    this.name = 'HookConflictError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookConflictError {\n    return isError(value) && value.name === 'HookConflictError';\n  }\n}\n\n/**\n * Thrown when calling `resumeHook()` or `resumeWebhook()` with a token that\n * does not match any active hook.\n *\n * Common causes:\n * - The hook has expired (past its TTL)\n * - The hook was already disposed after being consumed\n * - The workflow has not started yet, so the hook does not exist\n *\n * A common pattern is to catch this error and start a new workflow run when\n * the hook does not exist yet (the \"resume or start\" pattern).\n *\n * Use the static `HookNotFoundError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { HookNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   await resumeHook(token, payload);\n * } catch (error) {\n *   if (HookNotFoundError.is(error)) {\n *     // Hook doesn't exist \u2014 start a new workflow run instead\n *     await startWorkflow(\"myWorkflow\", payload);\n *   }\n * }\n * ```\n */\nexport class HookNotFoundError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super('Hook not found', {});\n    this.name = 'HookNotFoundError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookNotFoundError {\n    return isError(value) && value.name === 'HookNotFoundError';\n  }\n}\n\n/**\n * Thrown when an operation conflicts with the current state of an entity.\n * This includes attempts to modify an entity already in a terminal state,\n * create an entity that already exists, or any other 409-style conflict.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class EntityConflictError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'EntityConflictError';\n  }\n\n  static is(value: unknown): value is EntityConflictError {\n    return isError(value) && value.name === 'EntityConflictError';\n  }\n}\n\n/**\n * Thrown when a run is no longer available \u2014 either because it has been\n * cleaned up, expired, or already reached a terminal state (completed/failed).\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class RunExpiredError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'RunExpiredError';\n  }\n\n  static is(value: unknown): value is RunExpiredError {\n    return isError(value) && value.name === 'RunExpiredError';\n  }\n}\n\n/**\n * Thrown when an operation cannot proceed because a required timestamp\n * (e.g. retryAfter) has not been reached yet.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n *\n * @property retryAfter - Delay in seconds before the operation can be retried.\n */\nexport class TooEarlyError extends WorkflowWorldError {\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message, { retryAfter: options?.retryAfter });\n    this.name = 'TooEarlyError';\n  }\n\n  static is(value: unknown): value is TooEarlyError {\n    return isError(value) && value.name === 'TooEarlyError';\n  }\n}\n\n/**\n * Thrown when a request is rate limited by the workflow backend.\n *\n * The workflow runtime handles this error automatically with retry logic.\n * Users interacting with world storage backends directly may encounter it\n * if retries are exhausted.\n *\n * @property retryAfter - Delay in seconds before the request can be retried.\n */\nexport class ThrottleError extends WorkflowWorldError {\n  retryAfter?: number;\n\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message);\n    this.name = 'ThrottleError';\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is ThrottleError {\n    return isError(value) && value.name === 'ThrottleError';\n  }\n}\n\n/**\n * Thrown when awaiting `run.returnValue` on a workflow run that was cancelled.\n *\n * This error indicates that the workflow was explicitly cancelled (via\n * `run.cancel()`) and will not produce a return value. You can check for\n * cancellation before awaiting the return value by inspecting `run.status`.\n *\n * Use the static `WorkflowRunCancelledError.is()` method for type-safe\n * checking in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunCancelledError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunCancelledError.is(error)) {\n *     console.log(`Run ${error.runId} was cancelled`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunCancelledError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" cancelled`, {});\n    this.name = 'WorkflowRunCancelledError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunCancelledError {\n    return isError(value) && value.name === 'WorkflowRunCancelledError';\n  }\n}\n\n/**\n * Thrown when attempting to operate on a workflow run that requires a newer World version.\n *\n * This error occurs when a run was created with a newer spec version than the\n * current World implementation supports. To resolve this, upgrade your\n * `workflow` packages to a version that supports the required spec version.\n *\n * Use the static `RunNotSupportedError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { RunNotSupportedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (RunNotSupportedError.is(error)) {\n *     console.error(\n *       `Run requires spec v${error.runSpecVersion}, ` +\n *       `but world supports v${error.worldSpecVersion}`\n *     );\n *   }\n * }\n * ```\n */\nexport class RunNotSupportedError extends WorkflowError {\n  readonly runSpecVersion: number;\n  readonly worldSpecVersion: number;\n\n  constructor(runSpecVersion: number, worldSpecVersion: number) {\n    super(\n      `Run requires spec version ${runSpecVersion}, but world supports version ${worldSpecVersion}. ` +\n        `Please upgrade 'workflow' package.`\n    );\n    this.name = 'RunNotSupportedError';\n    this.runSpecVersion = runSpecVersion;\n    this.worldSpecVersion = worldSpecVersion;\n  }\n\n  static is(value: unknown): value is RunNotSupportedError {\n    return isError(value) && value.name === 'RunNotSupportedError';\n  }\n}\n\n/**\n * A fatal error is an error that cannot be retried.\n * It will cause the step to fail and the error will\n * be bubbled up to the workflow logic.\n */\nexport class FatalError extends Error {\n  fatal = true;\n\n  constructor(message: string) {\n    super(message);\n    this.name = 'FatalError';\n  }\n\n  static is(value: unknown): value is FatalError {\n    return isError(value) && value.name === 'FatalError';\n  }\n}\n\nexport interface RetryableErrorOptions {\n  /**\n   * The number of milliseconds to wait before retrying the step.\n   * Can also be a duration string (e.g., \"5s\", \"2m\") or a Date object.\n   * If not provided, the step will be retried after 1 second (1000 milliseconds).\n   */\n  retryAfter?: number | StringValue | Date;\n}\n\n/**\n * An error that can happen during a step execution, allowing\n * for configuration of the retry behavior.\n */\nexport class RetryableError extends Error {\n  /**\n   * The Date when the step should be retried.\n   */\n  retryAfter: Date;\n\n  constructor(message: string, options: RetryableErrorOptions = {}) {\n    super(message);\n    this.name = 'RetryableError';\n\n    if (options.retryAfter !== undefined) {\n      this.retryAfter = parseDurationToDate(options.retryAfter);\n    } else {\n      // Default to 1 second (1000 milliseconds)\n      this.retryAfter = new Date(Date.now() + 1000);\n    }\n  }\n\n  static is(value: unknown): value is RetryableError {\n    return isError(value) && value.name === 'RetryableError';\n  }\n}\n\nexport const VERCEL_403_ERROR_MESSAGE =\n  'Your current vercel account does not have access to this resource. Use `vercel login` or `vercel switch` to ensure you are linked to the right account.';\n\nexport { RUN_ERROR_CODES, type RunErrorCode } from './error-codes.js';\n", "/**\n * This is the \"standard library\" of steps that we make available to all workflow users.\n * The can be imported like so: `import { fetch } from 'workflow'`. and used in workflow.\n * The need to be exported directly in this package and cannot live in `core` to prevent\n * circular dependencies post-compilation.\n */\n\n/**\n * A hoisted `fetch()` function that is executed as a \"step\" function,\n * for use within workflow functions.\n *\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API\n */\nexport async function fetch(...args: Parameters<typeof globalThis.fetch>) {\n  'use step';\n  return globalThis.fetch(...args);\n}\n", "import { FatalError } from \"workflow\";\n/**__internal_workflows{\"workflows\":{\"workflows/order-saga.ts\":{\"default\":{\"workflowId\":\"workflow//./workflows/order-saga//orderSaga\"}}},\"steps\":{\"workflows/order-saga.ts\":{\"bookShipment\":{\"stepId\":\"step//./workflows/order-saga//bookShipment\"},\"chargePayment\":{\"stepId\":\"step//./workflows/order-saga//chargePayment\"},\"refundPayment\":{\"stepId\":\"step//./workflows/order-saga//refundPayment\"},\"releaseInventory\":{\"stepId\":\"step//./workflows/order-saga//releaseInventory\"},\"reserveInventory\":{\"stepId\":\"step//./workflows/order-saga//reserveInventory\"},\"sendConfirmation\":{\"stepId\":\"step//./workflows/order-saga//sendConfirmation\"}}}}*/;\nconst reserveInventory = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/order-saga//reserveInventory\");\nconst chargePayment = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/order-saga//chargePayment\");\nconst bookShipment = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/order-saga//bookShipment\");\nconst refundPayment = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/order-saga//refundPayment\");\nconst releaseInventory = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/order-saga//releaseInventory\");\nconst sendConfirmation = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/order-saga//sendConfirmation\");\nexport default async function orderSaga(orderId, amount, items, address, email) {\n    // Forward step 1: Reserve inventory\n    const reservation = await reserveInventory(orderId, items);\n    // Forward step 2: Charge payment\n    let charge;\n    try {\n        charge = await chargePayment(orderId, amount);\n    } catch (error) {\n        // Compensate: release inventory\n        if (error instanceof FatalError) {\n            await releaseInventory(orderId, reservation.id);\n            throw error;\n        }\n        throw error;\n    }\n    // Forward step 3: Book shipment\n    try {\n        await bookShipment(orderId, address);\n    } catch (error) {\n        // Compensate in reverse order: refund payment, then release inventory\n        if (error instanceof FatalError) {\n            await refundPayment(orderId, charge.id);\n            await releaseInventory(orderId, reservation.id);\n            throw error;\n        }\n        throw error;\n    }\n    // All forward steps succeeded\n    await sendConfirmation(orderId, email);\n    return {\n        orderId,\n        status: \"fulfilled\"\n    };\n}\norderSaga.workflowId = \"workflow//./workflows/order-saga//orderSaga\";\nglobalThis.__private_workflows.set(\"workflow//./workflows/order-saga//orderSaga\", orderSaga);\n"],
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA,8EAAAA,SAAA;AAEI,QAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AAaR,IAAAA,QAAO,UAAU,SAAS,KAAK,SAAS;AACxC,gBAAU,WAAW,CAAC;AACtB,UAAI,OAAO,OAAO;AAClB,UAAI,SAAS,YAAY,IAAI,SAAS,GAAG;AACrC,eAAO,MAAM,GAAG;AAAA,MACpB,WAAW,SAAS,YAAY,SAAS,GAAG,GAAG;AAC3C,eAAO,QAAQ,OAAO,QAAQ,GAAG,IAAI,SAAS,GAAG;AAAA,MACrD;AACA,YAAM,IAAI,MAAM,0DAA0D,KAAK,UAAU,GAAG,CAAC;AAAA,IACjG;AAOI,aAAS,MAAM,KAAK;AACpB,YAAM,OAAO,GAAG;AAChB,UAAI,IAAI,SAAS,KAAK;AAClB;AAAA,MACJ;AACA,UAAI,QAAQ,mIAAmI,KAAK,GAAG;AACvJ,UAAI,CAAC,OAAO;AACR;AAAA,MACJ;AACA,UAAI,IAAI,WAAW,MAAM,CAAC,CAAC;AAC3B,UAAI,QAAQ,MAAM,CAAC,KAAK,MAAM,YAAY;AAC1C,cAAO,MAAK;AAAA,QACR,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO;AAAA,QACX;AACI,iBAAO;AAAA,MACf;AAAA,IACJ;AArDa;AA4DT,aAAS,SAASC,KAAI;AACtB,UAAI,QAAQ,KAAK,IAAIA,GAAE;AACvB,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,aAAOA,MAAK;AAAA,IAChB;AAfa;AAsBT,aAAS,QAAQA,KAAI;AACrB,UAAI,QAAQ,KAAK,IAAIA,GAAE;AACvB,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,KAAK;AAAA,MACrC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,MAAM;AAAA,MACtC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,QAAQ;AAAA,MACxC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,QAAQ;AAAA,MACxC;AACA,aAAOA,MAAK;AAAA,IAChB;AAfa;AAkBT,aAAS,OAAOA,KAAI,OAAO,GAAG,MAAM;AACpC,UAAI,WAAW,SAAS,IAAI;AAC5B,aAAO,KAAK,MAAMA,MAAK,CAAC,IAAI,MAAM,QAAQ,WAAW,MAAM;AAAA,IAC/D;AAHa;AAAA;AAAA;;;ACvIb,gBAAe;;;ACUZ,SAAA,QAAA,OAAA;AACH,SAAS,OAAQ,UAAc,YAAA,UAAA,QAAA,UAAA,SAAA,aAAA;;AAD5B;AAggBQ,IAAA,aAAA,cAAuB,MAAA;EA3gBlC,OA2gBkC;;;EACvB,QAAA;EAET,YAAY,SAAA;AACV,UACE,OAAA;SACE,OAAA;;SAGJ,GAAK,OAAA;AACL,WAAK,QAAA,KAAA,KAAmB,MAAA,SAAgB;EAC1C;;;;AC1gBC,IAAA,QAAA,WAAA,uBAAA,IAAA,mBAAA,CAAA,EAAA,8CAAA;;;ACVH,IAAM,mBAAmB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,gDAAgD;AACrH,IAAM,gBAAgB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,6CAA6C;AAC/G,IAAM,eAAe,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,4CAA4C;AAC7G,IAAM,gBAAgB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,6CAA6C;AAC/G,IAAM,mBAAmB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,gDAAgD;AACrH,IAAM,mBAAmB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,gDAAgD;AACrH,eAAO,UAAiC,SAAS,QAAQ,OAAO,SAAS,OAAO;AAE5E,QAAM,cAAc,MAAM,iBAAiB,SAAS,KAAK;AAEzD,MAAI;AACJ,MAAI;AACA,aAAS,MAAM,cAAc,SAAS,MAAM;AAAA,EAChD,SAAS,OAAO;AAEZ,QAAI,iBAAiB,YAAY;AAC7B,YAAM,iBAAiB,SAAS,YAAY,EAAE;AAC9C,YAAM;AAAA,IACV;AACA,UAAM;AAAA,EACV;AAEA,MAAI;AACA,UAAM,aAAa,SAAS,OAAO;AAAA,EACvC,SAAS,OAAO;AAEZ,QAAI,iBAAiB,YAAY;AAC7B,YAAM,cAAc,SAAS,OAAO,EAAE;AACtC,YAAM,iBAAiB,SAAS,YAAY,EAAE;AAC9C,YAAM;AAAA,IACV;AACA,UAAM;AAAA,EACV;AAEA,QAAM,iBAAiB,SAAS,KAAK;AACrC,SAAO;AAAA,IACH;AAAA,IACA,QAAQ;AAAA,EACZ;AACJ;AAjC8B;AAkC9B,UAAU,aAAa;AACvB,WAAW,oBAAoB,IAAI,+CAA+C,SAAS;",
  "names": ["module", "ms"]
}
 +`; + +export const POST = workflowEntrypoint(workflowCode); diff --git a/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs.debug.json b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs.debug.json new file mode 100644 index 0000000000..68a993ff54 --- /dev/null +++ b/tests/fixtures/workflow-skills/compensation-saga/.workflow-vitest/workflows.mjs.debug.json @@ -0,0 +1,6 @@ +{ + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/compensation-saga/workflows/order-saga.ts" + ], + "serdeOnlyFiles": [] +} diff --git a/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs new file mode 100644 index 0000000000..f636c13757 --- /dev/null +++ b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs @@ -0,0 +1,164 @@ +// biome-ignore-all lint: generated file +/* eslint-disable */ + +var __defProp = Object.defineProperty; +var __name = (target, value) => + __defProp(target, 'name', { value, configurable: true }); + +// ../../../../packages/workflow/dist/internal/builtins.js +import { registerStepFunction } from 'workflow/internal/private'; +async function __builtin_response_array_buffer() { + return this.arrayBuffer(); +} +__name(__builtin_response_array_buffer, '__builtin_response_array_buffer'); +async function __builtin_response_json() { + return this.json(); +} +__name(__builtin_response_json, '__builtin_response_json'); +async function __builtin_response_text() { + return this.text(); +} +__name(__builtin_response_text, '__builtin_response_text'); +registerStepFunction( + '__builtin_response_array_buffer', + __builtin_response_array_buffer +); +registerStepFunction('__builtin_response_json', __builtin_response_json); +registerStepFunction('__builtin_response_text', __builtin_response_text); + +// ../../../../packages/workflow/dist/stdlib.js +import { registerStepFunction as registerStepFunction2 } from 'workflow/internal/private'; +async function fetch(...args) { + return globalThis.fetch(...args); +} +__name(fetch, 'fetch'); +registerStepFunction2('step//./packages/workflow/dist/stdlib//fetch', fetch); + +// workflows/shopify-order.ts +import { registerStepFunction as registerStepFunction3 } from 'workflow/internal/private'; + +// ../../../../packages/utils/dist/index.js +import { pluralize } from '../../../../../packages/utils/dist/pluralize.js'; +import { + parseClassName, + parseStepName, + parseWorkflowName, +} from '../../../../../packages/utils/dist/parse-name.js'; +import { + once, + withResolvers, +} from '../../../../../packages/utils/dist/promise.js'; +import { parseDurationToDate } from '../../../../../packages/utils/dist/time.js'; +import { + isVercelWorldTarget, + resolveWorkflowTargetWorld, + usesVercelWorld, +} from '../../../../../packages/utils/dist/world-target.js'; + +// ../../../../packages/errors/dist/index.js +import { RUN_ERROR_CODES } from '../../../../../packages/errors/dist/error-codes.js'; +function isError(value) { + return ( + typeof value === 'object' && + value !== null && + 'name' in value && + 'message' in value + ); +} +__name(isError, 'isError'); +var FatalError = class extends Error { + static { + __name(this, 'FatalError'); + } + fatal = true; + constructor(message) { + super(message); + this.name = 'FatalError'; + } + static is(value) { + return isError(value) && value.name === 'FatalError'; + } +}; + +// ../../../../packages/core/dist/index.js +import { + createHook, + createWebhook, +} from '../../../../../packages/core/dist/create-hook.js'; +import { defineHook } from '../../../../../packages/core/dist/define-hook.js'; +import { sleep } from '../../../../../packages/core/dist/sleep.js'; +import { getStepMetadata } from '../../../../../packages/core/dist/step/get-step-metadata.js'; +import { getWorkflowMetadata } from '../../../../../packages/core/dist/step/get-workflow-metadata.js'; +import { getWritable } from '../../../../../packages/core/dist/step/writable-stream.js'; + +// workflows/shopify-order.ts +var checkDuplicate = /* @__PURE__ */ __name(async (orderId) => { + const existing = await db.orders.findUnique({ + where: { + shopifyId: orderId, + }, + }); + if (existing?.status === 'completed') { + throw new FatalError(`Order ${orderId} already processed`); + } + return existing; +}, 'checkDuplicate'); +var chargePayment = /* @__PURE__ */ __name(async (orderId, amount) => { + const result = await paymentProvider.charge({ + idempotencyKey: `payment:${orderId}`, + amount, + }); + return result; +}, 'chargePayment'); +var reserveInventory = /* @__PURE__ */ __name(async (orderId, items) => { + const reservation = await warehouse.reserve({ + idempotencyKey: `inventory:${orderId}`, + items, + }); + return reservation; +}, 'reserveInventory'); +var refundPayment = /* @__PURE__ */ __name(async (orderId, chargeId) => { + await paymentProvider.refund({ + idempotencyKey: `refund:${orderId}`, + chargeId, + }); +}, 'refundPayment'); +var sendConfirmation = /* @__PURE__ */ __name(async (orderId, email) => { + await emailService.send({ + idempotencyKey: `confirmation:${orderId}`, + to: email, + template: 'order-confirmed', + }); +}, 'sendConfirmation'); +async function shopifyOrder(orderId, amount, items, email) { + throw new Error( + 'You attempted to execute workflow shopifyOrder function directly. To start a workflow, use start(shopifyOrder) from workflow/api' + ); +} +__name(shopifyOrder, 'shopifyOrder'); +shopifyOrder.workflowId = 'workflow//./workflows/shopify-order//shopifyOrder'; +registerStepFunction3( + 'step//./workflows/shopify-order//checkDuplicate', + checkDuplicate +); +registerStepFunction3( + 'step//./workflows/shopify-order//chargePayment', + chargePayment +); +registerStepFunction3( + 'step//./workflows/shopify-order//reserveInventory', + reserveInventory +); +registerStepFunction3( + 'step//./workflows/shopify-order//refundPayment', + refundPayment +); +registerStepFunction3( + 'step//./workflows/shopify-order//sendConfirmation', + sendConfirmation +); + +// virtual-entry.js +import { stepEntrypoint } from 'workflow/runtime'; +export { stepEntrypoint as POST }; +//# sourceMappingURL=data:application/json;base64,{
  "version": 3,
  "sources": ["../../../../../packages/workflow/src/internal/builtins.ts", "../../../../../packages/workflow/src/stdlib.ts", "../workflows/shopify-order.ts", "../../../../../packages/utils/src/index.ts", "../../../../../packages/errors/src/index.ts", "../../../../../packages/core/src/index.ts", "../virtual-entry.js"],
  "sourcesContent": ["/**\n * These are the built-in steps that are \"automatically available\" in the workflow scope. They are\n * similar to \"stdlib\" except that are not meant to be imported by users, but are instead \"just available\"\n * alongside user defined steps. They are used internally by the runtime\n */\n\nexport async function __builtin_response_array_buffer(\n  this: Request | Response\n) {\n  'use step';\n  return this.arrayBuffer();\n}\n\nexport async function __builtin_response_json(this: Request | Response) {\n  'use step';\n  return this.json();\n}\n\nexport async function __builtin_response_text(this: Request | Response) {\n  'use step';\n  return this.text();\n}\n", "/**\n * This is the \"standard library\" of steps that we make available to all workflow users.\n * The can be imported like so: `import { fetch } from 'workflow'`. and used in workflow.\n * The need to be exported directly in this package and cannot live in `core` to prevent\n * circular dependencies post-compilation.\n */\n\n/**\n * A hoisted `fetch()` function that is executed as a \"step\" function,\n * for use within workflow functions.\n *\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API\n */\nexport async function fetch(...args: Parameters<typeof globalThis.fetch>) {\n  'use step';\n  return globalThis.fetch(...args);\n}\n", "import { registerStepFunction } from \"workflow/internal/private\";\nimport { FatalError } from \"workflow\";\n/**__internal_workflows{\"workflows\":{\"workflows/shopify-order.ts\":{\"default\":{\"workflowId\":\"workflow//./workflows/shopify-order//shopifyOrder\"}}},\"steps\":{\"workflows/shopify-order.ts\":{\"chargePayment\":{\"stepId\":\"step//./workflows/shopify-order//chargePayment\"},\"checkDuplicate\":{\"stepId\":\"step//./workflows/shopify-order//checkDuplicate\"},\"refundPayment\":{\"stepId\":\"step//./workflows/shopify-order//refundPayment\"},\"reserveInventory\":{\"stepId\":\"step//./workflows/shopify-order//reserveInventory\"},\"sendConfirmation\":{\"stepId\":\"step//./workflows/shopify-order//sendConfirmation\"}}}}*/;\nconst checkDuplicate = async (orderId)=>{\n    const existing = await db.orders.findUnique({\n        where: {\n            shopifyId: orderId\n        }\n    });\n    if (existing?.status === \"completed\") {\n        throw new FatalError(`Order ${orderId} already processed`);\n    }\n    return existing;\n};\nconst chargePayment = async (orderId, amount)=>{\n    const result = await paymentProvider.charge({\n        idempotencyKey: `payment:${orderId}`,\n        amount\n    });\n    return result;\n};\nconst reserveInventory = async (orderId, items)=>{\n    const reservation = await warehouse.reserve({\n        idempotencyKey: `inventory:${orderId}`,\n        items\n    });\n    return reservation;\n};\nconst refundPayment = async (orderId, chargeId)=>{\n    await paymentProvider.refund({\n        idempotencyKey: `refund:${orderId}`,\n        chargeId\n    });\n};\nconst sendConfirmation = async (orderId, email)=>{\n    await emailService.send({\n        idempotencyKey: `confirmation:${orderId}`,\n        to: email,\n        template: \"order-confirmed\"\n    });\n};\nexport default async function shopifyOrder(orderId, amount, items, email) {\n    throw new Error(\"You attempted to execute workflow shopifyOrder function directly. To start a workflow, use start(shopifyOrder) from workflow/api\");\n}\nshopifyOrder.workflowId = \"workflow//./workflows/shopify-order//shopifyOrder\";\nregisterStepFunction(\"step//./workflows/shopify-order//checkDuplicate\", checkDuplicate);\nregisterStepFunction(\"step//./workflows/shopify-order//chargePayment\", chargePayment);\nregisterStepFunction(\"step//./workflows/shopify-order//reserveInventory\", reserveInventory);\nregisterStepFunction(\"step//./workflows/shopify-order//refundPayment\", refundPayment);\nregisterStepFunction(\"step//./workflows/shopify-order//sendConfirmation\", sendConfirmation);\n", "export { pluralize } from './pluralize.js';\nexport {\n  parseClassName,\n  parseStepName,\n  parseWorkflowName,\n} from './parse-name.js';\nexport { once, type PromiseWithResolvers, withResolvers } from './promise.js';\nexport { parseDurationToDate } from './time.js';\nexport {\n  isVercelWorldTarget,\n  resolveWorkflowTargetWorld,\n  usesVercelWorld,\n} from './world-target.js';\n", "import { parseDurationToDate } from '@workflow/utils';\nimport type { StructuredError } from '@workflow/world';\nimport type { StringValue } from 'ms';\n\nconst BASE_URL = 'https://useworkflow.dev/err';\n\n/**\n * @internal\n * Check if a value is an Error without relying on Node.js utilities.\n * This is needed for error classes that can be used in VM contexts where\n * Node.js imports are not available.\n */\nfunction isError(value: unknown): value is { name: string; message: string } {\n  return (\n    typeof value === 'object' &&\n    value !== null &&\n    'name' in value &&\n    'message' in value\n  );\n}\n\n/**\n * @internal\n * All the slugs of the errors used for documentation links.\n */\nexport const ERROR_SLUGS = {\n  NODE_JS_MODULE_IN_WORKFLOW: 'node-js-module-in-workflow',\n  START_INVALID_WORKFLOW_FUNCTION: 'start-invalid-workflow-function',\n  SERIALIZATION_FAILED: 'serialization-failed',\n  WEBHOOK_INVALID_RESPOND_WITH_VALUE: 'webhook-invalid-respond-with-value',\n  WEBHOOK_RESPONSE_NOT_SENT: 'webhook-response-not-sent',\n  FETCH_IN_WORKFLOW_FUNCTION: 'fetch-in-workflow',\n  TIMEOUT_FUNCTIONS_IN_WORKFLOW: 'timeout-in-workflow',\n  HOOK_CONFLICT: 'hook-conflict',\n  CORRUPTED_EVENT_LOG: 'corrupted-event-log',\n  STEP_NOT_REGISTERED: 'step-not-registered',\n  WORKFLOW_NOT_REGISTERED: 'workflow-not-registered',\n} as const;\n\ntype ErrorSlug = (typeof ERROR_SLUGS)[keyof typeof ERROR_SLUGS];\n\ninterface WorkflowErrorOptions extends ErrorOptions {\n  /**\n   * The slug of the error. This will be used to generate a link to the error documentation.\n   */\n  slug?: ErrorSlug;\n}\n\n/**\n * The base class for all Workflow-related errors.\n *\n * This error is thrown by the Workflow DevKit when internal operations fail.\n * You can use this class with `instanceof` to catch any Workflow DevKit error.\n *\n * @example\n * ```ts\n * try {\n *   await getRun(runId);\n * } catch (error) {\n *   if (error instanceof WorkflowError) {\n *     console.error('Workflow DevKit error:', error.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowError extends Error {\n  readonly cause?: unknown;\n\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    const msgDocs = options?.slug\n      ? `${message}\\n\\nLearn more: ${BASE_URL}/${options.slug}`\n      : message;\n    super(msgDocs, { cause: options?.cause });\n    this.cause = options?.cause;\n\n    if (options?.cause instanceof Error) {\n      this.stack = `${this.stack}\\nCaused by: ${options.cause.stack}`;\n    }\n  }\n\n  static is(value: unknown): value is WorkflowError {\n    return isError(value) && value.name === 'WorkflowError';\n  }\n}\n\n/**\n * Thrown when a world (storage backend) operation fails unexpectedly.\n *\n * This is the catch-all error for world implementations. Specific,\n * well-known failure modes have dedicated error types (e.g.\n * EntityConflictError, RunExpiredError, ThrottleError). This error\n * covers everything else \u2014 validation failures, missing entities\n * without a dedicated type, or unexpected HTTP errors from world-vercel.\n */\nexport class WorkflowWorldError extends WorkflowError {\n  status?: number;\n  code?: string;\n  url?: string;\n  /** Retry-After value in seconds, present on 429 and 425 responses */\n  retryAfter?: number;\n\n  constructor(\n    message: string,\n    options?: {\n      status?: number;\n      url?: string;\n      code?: string;\n      retryAfter?: number;\n      cause?: unknown;\n    }\n  ) {\n    super(message, {\n      cause: options?.cause,\n    });\n    this.name = 'WorkflowWorldError';\n    this.status = options?.status;\n    this.code = options?.code;\n    this.url = options?.url;\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is WorkflowWorldError {\n    return isError(value) && value.name === 'WorkflowWorldError';\n  }\n}\n\n/**\n * Thrown when a workflow run fails during execution.\n *\n * This error indicates that the workflow encountered a fatal error and cannot\n * continue. It is thrown when awaiting `run.returnValue` on a run whose status\n * is `'failed'`. The `cause` property contains the underlying error with its\n * message, stack trace, and optional error code.\n *\n * Use the static `WorkflowRunFailedError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunFailedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunFailedError.is(error)) {\n *     console.error(`Run ${error.runId} failed:`, error.cause.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunFailedError extends WorkflowError {\n  runId: string;\n  declare cause: Error & { code?: string };\n\n  constructor(runId: string, error: StructuredError) {\n    // Create a proper Error instance from the StructuredError to set as cause\n    // NOTE: custom error types do not get serialized/deserialized. Everything is an Error\n    const causeError = new Error(error.message);\n    if (error.stack) {\n      causeError.stack = error.stack;\n    }\n    if (error.code) {\n      (causeError as any).code = error.code;\n    }\n\n    super(`Workflow run \"${runId}\" failed: ${error.message}`, {\n      cause: causeError,\n    });\n    this.name = 'WorkflowRunFailedError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunFailedError {\n    return isError(value) && value.name === 'WorkflowRunFailedError';\n  }\n}\n\n/**\n * Thrown when attempting to get results from an incomplete workflow run.\n *\n * This error occurs when you try to access the result of a workflow\n * that is still running or hasn't completed yet.\n */\nexport class WorkflowRunNotCompletedError extends WorkflowError {\n  runId: string;\n  status: string;\n\n  constructor(runId: string, status: string) {\n    super(`Workflow run \"${runId}\" has not completed`, {});\n    this.name = 'WorkflowRunNotCompletedError';\n    this.runId = runId;\n    this.status = status;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotCompletedError {\n    return isError(value) && value.name === 'WorkflowRunNotCompletedError';\n  }\n}\n\n/**\n * Thrown when the Workflow runtime encounters an internal error.\n *\n * This error indicates an issue with workflow execution, such as\n * serialization failures, starting an invalid workflow function, or\n * other runtime problems.\n */\nexport class WorkflowRuntimeError extends WorkflowError {\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    super(message, {\n      ...options,\n    });\n    this.name = 'WorkflowRuntimeError';\n  }\n\n  static is(value: unknown): value is WorkflowRuntimeError {\n    return isError(value) && value.name === 'WorkflowRuntimeError';\n  }\n}\n\n/**\n * Thrown when a step function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means\n * something went wrong with the bundling/build tooling that caused the step\n * to not get built correctly.\n *\n * When this happens, the step fails (like a FatalError) and control is passed back\n * to the workflow function, which can optionally handle the failure gracefully.\n */\nexport class StepNotRegisteredError extends WorkflowRuntimeError {\n  stepName: string;\n\n  constructor(stepName: string) {\n    super(\n      `Step \"${stepName}\" is not registered in the current deployment. This usually indicates a build or bundling issue that caused the step to not be included in the deployment.`,\n      { slug: ERROR_SLUGS.STEP_NOT_REGISTERED }\n    );\n    this.name = 'StepNotRegisteredError';\n    this.stepName = stepName;\n  }\n\n  static is(value: unknown): value is StepNotRegisteredError {\n    return isError(value) && value.name === 'StepNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when a workflow function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means:\n * - A run was started against a deployment that does not have the workflow\n *   (e.g., the workflow was renamed or moved and a new run targeted the latest deployment)\n * - Something went wrong with the bundling/build tooling that caused the workflow\n *   to not get built correctly\n *\n * When this happens, the run fails with a `RUNTIME_ERROR` error code.\n */\nexport class WorkflowNotRegisteredError extends WorkflowRuntimeError {\n  workflowName: string;\n\n  constructor(workflowName: string) {\n    super(\n      `Workflow \"${workflowName}\" is not registered in the current deployment. This usually means a run was started against a deployment that does not have this workflow, or there was a build/bundling issue.`,\n      { slug: ERROR_SLUGS.WORKFLOW_NOT_REGISTERED }\n    );\n    this.name = 'WorkflowNotRegisteredError';\n    this.workflowName = workflowName;\n  }\n\n  static is(value: unknown): value is WorkflowNotRegisteredError {\n    return isError(value) && value.name === 'WorkflowNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when performing operations on a workflow run that does not exist.\n *\n * This error occurs when you call methods on a run object (e.g. `run.status`,\n * `run.cancel()`, `run.returnValue`) but the underlying run ID does not match\n * any known workflow run. Note that `getRun(id)` itself is synchronous and will\n * not throw \u2014 this error is raised when subsequent operations discover the run\n * is missing.\n *\n * Use the static `WorkflowRunNotFoundError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (WorkflowRunNotFoundError.is(error)) {\n *     console.error(`Run ${error.runId} does not exist`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunNotFoundError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" not found`, {});\n    this.name = 'WorkflowRunNotFoundError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotFoundError {\n    return isError(value) && value.name === 'WorkflowRunNotFoundError';\n  }\n}\n\n/**\n * Thrown when a hook token is already in use by another active workflow run.\n *\n * This is a user error \u2014 it means the same custom token was passed to\n * `createHook` in two or more concurrent runs. Use a unique token per run\n * (or omit the token to let the runtime generate one automatically).\n */\nexport class HookConflictError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super(`Hook token \"${token}\" is already in use by another workflow`, {\n      slug: ERROR_SLUGS.HOOK_CONFLICT,\n    });\n    this.name = 'HookConflictError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookConflictError {\n    return isError(value) && value.name === 'HookConflictError';\n  }\n}\n\n/**\n * Thrown when calling `resumeHook()` or `resumeWebhook()` with a token that\n * does not match any active hook.\n *\n * Common causes:\n * - The hook has expired (past its TTL)\n * - The hook was already disposed after being consumed\n * - The workflow has not started yet, so the hook does not exist\n *\n * A common pattern is to catch this error and start a new workflow run when\n * the hook does not exist yet (the \"resume or start\" pattern).\n *\n * Use the static `HookNotFoundError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { HookNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   await resumeHook(token, payload);\n * } catch (error) {\n *   if (HookNotFoundError.is(error)) {\n *     // Hook doesn't exist \u2014 start a new workflow run instead\n *     await startWorkflow(\"myWorkflow\", payload);\n *   }\n * }\n * ```\n */\nexport class HookNotFoundError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super('Hook not found', {});\n    this.name = 'HookNotFoundError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookNotFoundError {\n    return isError(value) && value.name === 'HookNotFoundError';\n  }\n}\n\n/**\n * Thrown when an operation conflicts with the current state of an entity.\n * This includes attempts to modify an entity already in a terminal state,\n * create an entity that already exists, or any other 409-style conflict.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class EntityConflictError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'EntityConflictError';\n  }\n\n  static is(value: unknown): value is EntityConflictError {\n    return isError(value) && value.name === 'EntityConflictError';\n  }\n}\n\n/**\n * Thrown when a run is no longer available \u2014 either because it has been\n * cleaned up, expired, or already reached a terminal state (completed/failed).\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class RunExpiredError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'RunExpiredError';\n  }\n\n  static is(value: unknown): value is RunExpiredError {\n    return isError(value) && value.name === 'RunExpiredError';\n  }\n}\n\n/**\n * Thrown when an operation cannot proceed because a required timestamp\n * (e.g. retryAfter) has not been reached yet.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n *\n * @property retryAfter - Delay in seconds before the operation can be retried.\n */\nexport class TooEarlyError extends WorkflowWorldError {\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message, { retryAfter: options?.retryAfter });\n    this.name = 'TooEarlyError';\n  }\n\n  static is(value: unknown): value is TooEarlyError {\n    return isError(value) && value.name === 'TooEarlyError';\n  }\n}\n\n/**\n * Thrown when a request is rate limited by the workflow backend.\n *\n * The workflow runtime handles this error automatically with retry logic.\n * Users interacting with world storage backends directly may encounter it\n * if retries are exhausted.\n *\n * @property retryAfter - Delay in seconds before the request can be retried.\n */\nexport class ThrottleError extends WorkflowWorldError {\n  retryAfter?: number;\n\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message);\n    this.name = 'ThrottleError';\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is ThrottleError {\n    return isError(value) && value.name === 'ThrottleError';\n  }\n}\n\n/**\n * Thrown when awaiting `run.returnValue` on a workflow run that was cancelled.\n *\n * This error indicates that the workflow was explicitly cancelled (via\n * `run.cancel()`) and will not produce a return value. You can check for\n * cancellation before awaiting the return value by inspecting `run.status`.\n *\n * Use the static `WorkflowRunCancelledError.is()` method for type-safe\n * checking in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunCancelledError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunCancelledError.is(error)) {\n *     console.log(`Run ${error.runId} was cancelled`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunCancelledError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" cancelled`, {});\n    this.name = 'WorkflowRunCancelledError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunCancelledError {\n    return isError(value) && value.name === 'WorkflowRunCancelledError';\n  }\n}\n\n/**\n * Thrown when attempting to operate on a workflow run that requires a newer World version.\n *\n * This error occurs when a run was created with a newer spec version than the\n * current World implementation supports. To resolve this, upgrade your\n * `workflow` packages to a version that supports the required spec version.\n *\n * Use the static `RunNotSupportedError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { RunNotSupportedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (RunNotSupportedError.is(error)) {\n *     console.error(\n *       `Run requires spec v${error.runSpecVersion}, ` +\n *       `but world supports v${error.worldSpecVersion}`\n *     );\n *   }\n * }\n * ```\n */\nexport class RunNotSupportedError extends WorkflowError {\n  readonly runSpecVersion: number;\n  readonly worldSpecVersion: number;\n\n  constructor(runSpecVersion: number, worldSpecVersion: number) {\n    super(\n      `Run requires spec version ${runSpecVersion}, but world supports version ${worldSpecVersion}. ` +\n        `Please upgrade 'workflow' package.`\n    );\n    this.name = 'RunNotSupportedError';\n    this.runSpecVersion = runSpecVersion;\n    this.worldSpecVersion = worldSpecVersion;\n  }\n\n  static is(value: unknown): value is RunNotSupportedError {\n    return isError(value) && value.name === 'RunNotSupportedError';\n  }\n}\n\n/**\n * A fatal error is an error that cannot be retried.\n * It will cause the step to fail and the error will\n * be bubbled up to the workflow logic.\n */\nexport class FatalError extends Error {\n  fatal = true;\n\n  constructor(message: string) {\n    super(message);\n    this.name = 'FatalError';\n  }\n\n  static is(value: unknown): value is FatalError {\n    return isError(value) && value.name === 'FatalError';\n  }\n}\n\nexport interface RetryableErrorOptions {\n  /**\n   * The number of milliseconds to wait before retrying the step.\n   * Can also be a duration string (e.g., \"5s\", \"2m\") or a Date object.\n   * If not provided, the step will be retried after 1 second (1000 milliseconds).\n   */\n  retryAfter?: number | StringValue | Date;\n}\n\n/**\n * An error that can happen during a step execution, allowing\n * for configuration of the retry behavior.\n */\nexport class RetryableError extends Error {\n  /**\n   * The Date when the step should be retried.\n   */\n  retryAfter: Date;\n\n  constructor(message: string, options: RetryableErrorOptions = {}) {\n    super(message);\n    this.name = 'RetryableError';\n\n    if (options.retryAfter !== undefined) {\n      this.retryAfter = parseDurationToDate(options.retryAfter);\n    } else {\n      // Default to 1 second (1000 milliseconds)\n      this.retryAfter = new Date(Date.now() + 1000);\n    }\n  }\n\n  static is(value: unknown): value is RetryableError {\n    return isError(value) && value.name === 'RetryableError';\n  }\n}\n\nexport const VERCEL_403_ERROR_MESSAGE =\n  'Your current vercel account does not have access to this resource. Use `vercel login` or `vercel switch` to ensure you are linked to the right account.';\n\nexport { RUN_ERROR_CODES, type RunErrorCode } from './error-codes.js';\n", "/**\n * Just the core utilities that are meant to be imported by user\n * steps/workflows. This allows the bundler to tree-shake and limit what goes\n * into the final user bundles. Logic for running/handling steps/workflows\n * should live in runtime. Eventually these might be separate packages\n * `workflow` and `workflow/runtime`?\n *\n * Everything here will get re-exported under the 'workflow' top level package.\n * This should be a minimal set of APIs so **do not anything here** unless it's\n * needed for userland workflow code.\n */\n\nexport {\n  FatalError,\n  RetryableError,\n  type RetryableErrorOptions,\n} from '@workflow/errors';\nexport {\n  createHook,\n  createWebhook,\n  type Hook,\n  type HookOptions,\n  type RequestWithResponse,\n  type Webhook,\n  type WebhookOptions,\n} from './create-hook.js';\nexport { defineHook, type TypedHook } from './define-hook.js';\nexport { sleep } from './sleep.js';\nexport {\n  getStepMetadata,\n  type StepMetadata,\n} from './step/get-step-metadata.js';\nexport {\n  getWorkflowMetadata,\n  type WorkflowMetadata,\n} from './step/get-workflow-metadata.js';\nexport {\n  getWritable,\n  type WorkflowWritableStreamOptions,\n} from './step/writable-stream.js';\n", "\n    // Built in steps\n    import 'workflow/internal/builtins';\n    // User steps\n    import '../../../../packages/workflow/dist/stdlib.js';\nimport './workflows/shopify-order.ts';\n    // Serde files for cross-context class registration\n    \n    // API entrypoint\n    export { stepEntrypoint as POST } from 'workflow/runtime';"],
  "mappings": ";;;;;;;AAAA,SAAA,4BAAA;AASE,eAAW,kCAAA;AACX,SAAO,KAAK,YAAW;AACzB;AAFa;AAIb,eAAsB,0BAAuB;AAC3C,SAAA,KAAW,KAAA;;AADS;AAGtB,eAAC,0BAAA;AAED,SAAO,KAAK,KAAA;;AAFX;qBAIiB,mCAAG,+BAAA;AACrB,qBAAC,2BAAA,uBAAA;;;;ACrBD,SAAA,wBAAAA,6BAAA;AAaA,eAAsB,SAAkD,MAAA;AACtE,SAAA,WAAW,MAAA,GAAA,IAAA;;AADS;AAGtBC,sBAAC,gDAAA,KAAA;;;AChBD,SAAS,wBAAAC,6BAA4B;;;ACArC,SAAS,iBAAiB;AAC1B,SACE,gBACA,eACA,yBACD;AACD,SAAS,MAAiC,qBAAqB;AAC/D,SAAS,2BAA2B;AACpC,SACE,qBACA,4BACA,uBACD;;;ACgjBD,SAAM,uBAAsB;AAjjBzB,SAAA,QAAA,OAAA;AACH,SAAS,OAAQ,UAAc,YAAA,UAAA,QAAA,UAAA,SAAA,aAAA;;AAD5B;AAggBQ,IAAA,aAAA,cAAuB,MAAA;EA3gBlC,OA2gBkC;;;EACvB,QAAA;EAET,YAAY,SAAA;AACV,UACE,OAAA;SACE,OAAA;;SAGJ,GAAK,OAAA;AACL,WAAK,QAAA,KAAA,KAAmB,MAAA,SAAgB;EAC1C;;;;AC1gBF,SACE,YACA,qBAED;AACD,SACE,kBACA;AAOF,SAAS,aAA4B;AACrC,SAAS,uBAAa;AACtB,SACE,2BAEK;AACP,SACE,mBAAmB;;;AH9BrB,IAAM,iBAAiB,8BAAO,YAAU;AACpC,QAAM,WAAW,MAAM,GAAG,OAAO,WAAW;AAAA,IACxC,OAAO;AAAA,MACH,WAAW;AAAA,IACf;AAAA,EACJ,CAAC;AACD,MAAI,UAAU,WAAW,aAAa;AAClC,UAAM,IAAI,WAAW,SAAS,OAAO,oBAAoB;AAAA,EAC7D;AACA,SAAO;AACX,GAVuB;AAWvB,IAAM,gBAAgB,8BAAO,SAAS,WAAS;AAC3C,QAAM,SAAS,MAAM,gBAAgB,OAAO;AAAA,IACxC,gBAAgB,WAAW,OAAO;AAAA,IAClC;AAAA,EACJ,CAAC;AACD,SAAO;AACX,GANsB;AAOtB,IAAM,mBAAmB,8BAAO,SAAS,UAAQ;AAC7C,QAAM,cAAc,MAAM,UAAU,QAAQ;AAAA,IACxC,gBAAgB,aAAa,OAAO;AAAA,IACpC;AAAA,EACJ,CAAC;AACD,SAAO;AACX,GANyB;AAOzB,IAAM,gBAAgB,8BAAO,SAAS,aAAW;AAC7C,QAAM,gBAAgB,OAAO;AAAA,IACzB,gBAAgB,UAAU,OAAO;AAAA,IACjC;AAAA,EACJ,CAAC;AACL,GALsB;AAMtB,IAAM,mBAAmB,8BAAO,SAAS,UAAQ;AAC7C,QAAM,aAAa,KAAK;AAAA,IACpB,gBAAgB,gBAAgB,OAAO;AAAA,IACvC,IAAI;AAAA,IACJ,UAAU;AAAA,EACd,CAAC;AACL,GANyB;AAOzB,eAAO,aAAoC,SAAS,QAAQ,OAAO,OAAO;AACtE,QAAM,IAAI,MAAM,kIAAkI;AACtJ;AAF8B;AAG9B,aAAa,aAAa;AAC1BC,sBAAqB,mDAAmD,cAAc;AACtFA,sBAAqB,kDAAkD,aAAa;AACpFA,sBAAqB,qDAAqD,gBAAgB;AAC1FA,sBAAqB,kDAAkD,aAAa;AACpFA,sBAAqB,qDAAqD,gBAAgB;;;AIxCtF,SAA2B,sBAAY;",
  "names": ["registerStepFunction", "registerStepFunction", "registerStepFunction", "registerStepFunction"]
}
 diff --git a/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs.debug.json b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs.debug.json new file mode 100644 index 0000000000..42f8415fa5 --- /dev/null +++ b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/steps.mjs.debug.json @@ -0,0 +1,10 @@ +{ + "stepFiles": [ + "/Users/johnlindquist/dev/workflow/packages/workflow/dist/stdlib.js", + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/duplicate-webhook-order/workflows/shopify-order.ts" + ], + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/duplicate-webhook-order/workflows/shopify-order.ts" + ], + "serdeOnlyFiles": [] +} diff --git a/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs new file mode 100644 index 0000000000..1ed6007aa7 --- /dev/null +++ b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs @@ -0,0 +1,204 @@ +// biome-ignore-all lint: generated file +/* eslint-disable */ +import { workflowEntrypoint } from 'workflow/runtime'; + +const workflowCode = `globalThis.__private_workflows = new Map(); +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __commonJS = (cb, mod) => function __require() { + return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// ../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js +var require_ms = __commonJS({ + "../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js"(exports, module2) { + var s = 1e3; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === "string" && val.length > 0) { + return parse(val); + } else if (type === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error("val is not a non-empty string or a valid number. val=" + JSON.stringify(val)); + }; + function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?\$/i.exec(str); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || "ms").toLowerCase(); + switch (type) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; + } + } + __name(parse, "parse"); + function fmtShort(ms2) { + var msAbs = Math.abs(ms2); + if (msAbs >= d) { + return Math.round(ms2 / d) + "d"; + } + if (msAbs >= h) { + return Math.round(ms2 / h) + "h"; + } + if (msAbs >= m) { + return Math.round(ms2 / m) + "m"; + } + if (msAbs >= s) { + return Math.round(ms2 / s) + "s"; + } + return ms2 + "ms"; + } + __name(fmtShort, "fmtShort"); + function fmtLong(ms2) { + var msAbs = Math.abs(ms2); + if (msAbs >= d) { + return plural(ms2, msAbs, d, "day"); + } + if (msAbs >= h) { + return plural(ms2, msAbs, h, "hour"); + } + if (msAbs >= m) { + return plural(ms2, msAbs, m, "minute"); + } + if (msAbs >= s) { + return plural(ms2, msAbs, s, "second"); + } + return ms2 + " ms"; + } + __name(fmtLong, "fmtLong"); + function plural(ms2, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms2 / n) + " " + name + (isPlural ? "s" : ""); + } + __name(plural, "plural"); + } +}); + +// ../../../../packages/utils/dist/time.js +var import_ms = __toESM(require_ms(), 1); + +// ../../../../packages/errors/dist/index.js +function isError(value) { + return typeof value === "object" && value !== null && "name" in value && "message" in value; +} +__name(isError, "isError"); +var FatalError = class extends Error { + static { + __name(this, "FatalError"); + } + fatal = true; + constructor(message) { + super(message); + this.name = "FatalError"; + } + static is(value) { + return isError(value) && value.name === "FatalError"; + } +}; + +// ../../../../packages/workflow/dist/stdlib.js +var fetch = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./packages/workflow/dist/stdlib//fetch"); + +// workflows/shopify-order.ts +var checkDuplicate = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/shopify-order//checkDuplicate"); +var chargePayment = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/shopify-order//chargePayment"); +var reserveInventory = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/shopify-order//reserveInventory"); +var refundPayment = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/shopify-order//refundPayment"); +var sendConfirmation = globalThis[/* @__PURE__ */ Symbol.for("WORKFLOW_USE_STEP")]("step//./workflows/shopify-order//sendConfirmation"); +async function shopifyOrder(orderId, amount, items, email) { + await checkDuplicate(orderId); + const charge = await chargePayment(orderId, amount); + try { + await reserveInventory(orderId, items); + } catch (error) { + if (error instanceof FatalError) { + await refundPayment(orderId, charge.id); + throw error; + } + throw error; + } + await sendConfirmation(orderId, email); + return { + orderId, + status: "fulfilled" + }; +} +__name(shopifyOrder, "shopifyOrder"); +shopifyOrder.workflowId = "workflow//./workflows/shopify-order//shopifyOrder"; +globalThis.__private_workflows.set("workflow//./workflows/shopify-order//shopifyOrder", shopifyOrder); +//# sourceMappingURL=data:application/json;base64,{
  "version": 3,
  "sources": ["../../../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js", "../../../../packages/utils/src/time.ts", "../../../../packages/errors/src/index.ts", "../../../../packages/workflow/src/stdlib.ts", "workflows/shopify-order.ts"],
  "sourcesContent": ["/**\n * Helpers.\n */ var s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n *  - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */ module.exports = function(val, options) {\n    options = options || {};\n    var type = typeof val;\n    if (type === 'string' && val.length > 0) {\n        return parse(val);\n    } else if (type === 'number' && isFinite(val)) {\n        return options.long ? fmtLong(val) : fmtShort(val);\n    }\n    throw new Error('val is not a non-empty string or a valid number. val=' + JSON.stringify(val));\n};\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */ function parse(str) {\n    str = String(str);\n    if (str.length > 100) {\n        return;\n    }\n    var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str);\n    if (!match) {\n        return;\n    }\n    var n = parseFloat(match[1]);\n    var type = (match[2] || 'ms').toLowerCase();\n    switch(type){\n        case 'years':\n        case 'year':\n        case 'yrs':\n        case 'yr':\n        case 'y':\n            return n * y;\n        case 'weeks':\n        case 'week':\n        case 'w':\n            return n * w;\n        case 'days':\n        case 'day':\n        case 'd':\n            return n * d;\n        case 'hours':\n        case 'hour':\n        case 'hrs':\n        case 'hr':\n        case 'h':\n            return n * h;\n        case 'minutes':\n        case 'minute':\n        case 'mins':\n        case 'min':\n        case 'm':\n            return n * m;\n        case 'seconds':\n        case 'second':\n        case 'secs':\n        case 'sec':\n        case 's':\n            return n * s;\n        case 'milliseconds':\n        case 'millisecond':\n        case 'msecs':\n        case 'msec':\n        case 'ms':\n            return n;\n        default:\n            return undefined;\n    }\n}\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */ function fmtShort(ms) {\n    var msAbs = Math.abs(ms);\n    if (msAbs >= d) {\n        return Math.round(ms / d) + 'd';\n    }\n    if (msAbs >= h) {\n        return Math.round(ms / h) + 'h';\n    }\n    if (msAbs >= m) {\n        return Math.round(ms / m) + 'm';\n    }\n    if (msAbs >= s) {\n        return Math.round(ms / s) + 's';\n    }\n    return ms + 'ms';\n}\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */ function fmtLong(ms) {\n    var msAbs = Math.abs(ms);\n    if (msAbs >= d) {\n        return plural(ms, msAbs, d, 'day');\n    }\n    if (msAbs >= h) {\n        return plural(ms, msAbs, h, 'hour');\n    }\n    if (msAbs >= m) {\n        return plural(ms, msAbs, m, 'minute');\n    }\n    if (msAbs >= s) {\n        return plural(ms, msAbs, s, 'second');\n    }\n    return ms + ' ms';\n}\n/**\n * Pluralization helper.\n */ function plural(ms, msAbs, n, name) {\n    var isPlural = msAbs >= n * 1.5;\n    return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n", "import type { StringValue } from 'ms';\nimport ms from 'ms';\n\n/**\n * Parses a duration parameter (string, number, or Date) and returns a Date object\n * representing when the duration should elapse.\n *\n * - For strings: Parses duration strings like \"1s\", \"5m\", \"1h\", etc. using the `ms` library\n * - For numbers: Treats as milliseconds from now\n * - For Date objects: Returns the date directly (handles both Date instances and date-like objects from deserialization)\n *\n * @param param - The duration parameter (StringValue, Date, or number of milliseconds)\n * @returns A Date object representing when the duration should elapse\n * @throws {Error} If the parameter is invalid or cannot be parsed\n */\nexport function parseDurationToDate(param: StringValue | Date | number): Date {\n  if (typeof param === 'string') {\n    const durationMs = ms(param);\n    if (typeof durationMs !== 'number' || durationMs < 0) {\n      throw new Error(\n        `Invalid duration: \"${param}\". Expected a valid duration string like \"1s\", \"1m\", \"1h\", etc.`\n      );\n    }\n    return new Date(Date.now() + durationMs);\n  } else if (typeof param === 'number') {\n    if (param < 0 || !Number.isFinite(param)) {\n      throw new Error(\n        `Invalid duration: ${param}. Expected a non-negative finite number of milliseconds.`\n      );\n    }\n    return new Date(Date.now() + param);\n  } else if (\n    param instanceof Date ||\n    (param &&\n      typeof param === 'object' &&\n      typeof (param as any).getTime === 'function')\n  ) {\n    // Handle both Date instances and date-like objects (from deserialization)\n    return param instanceof Date ? param : new Date((param as any).getTime());\n  } else {\n    throw new Error(\n      `Invalid duration parameter. Expected a duration string, number (milliseconds), or Date object.`\n    );\n  }\n}\n", "import { parseDurationToDate } from '@workflow/utils';\nimport type { StructuredError } from '@workflow/world';\nimport type { StringValue } from 'ms';\n\nconst BASE_URL = 'https://useworkflow.dev/err';\n\n/**\n * @internal\n * Check if a value is an Error without relying on Node.js utilities.\n * This is needed for error classes that can be used in VM contexts where\n * Node.js imports are not available.\n */\nfunction isError(value: unknown): value is { name: string; message: string } {\n  return (\n    typeof value === 'object' &&\n    value !== null &&\n    'name' in value &&\n    'message' in value\n  );\n}\n\n/**\n * @internal\n * All the slugs of the errors used for documentation links.\n */\nexport const ERROR_SLUGS = {\n  NODE_JS_MODULE_IN_WORKFLOW: 'node-js-module-in-workflow',\n  START_INVALID_WORKFLOW_FUNCTION: 'start-invalid-workflow-function',\n  SERIALIZATION_FAILED: 'serialization-failed',\n  WEBHOOK_INVALID_RESPOND_WITH_VALUE: 'webhook-invalid-respond-with-value',\n  WEBHOOK_RESPONSE_NOT_SENT: 'webhook-response-not-sent',\n  FETCH_IN_WORKFLOW_FUNCTION: 'fetch-in-workflow',\n  TIMEOUT_FUNCTIONS_IN_WORKFLOW: 'timeout-in-workflow',\n  HOOK_CONFLICT: 'hook-conflict',\n  CORRUPTED_EVENT_LOG: 'corrupted-event-log',\n  STEP_NOT_REGISTERED: 'step-not-registered',\n  WORKFLOW_NOT_REGISTERED: 'workflow-not-registered',\n} as const;\n\ntype ErrorSlug = (typeof ERROR_SLUGS)[keyof typeof ERROR_SLUGS];\n\ninterface WorkflowErrorOptions extends ErrorOptions {\n  /**\n   * The slug of the error. This will be used to generate a link to the error documentation.\n   */\n  slug?: ErrorSlug;\n}\n\n/**\n * The base class for all Workflow-related errors.\n *\n * This error is thrown by the Workflow DevKit when internal operations fail.\n * You can use this class with `instanceof` to catch any Workflow DevKit error.\n *\n * @example\n * ```ts\n * try {\n *   await getRun(runId);\n * } catch (error) {\n *   if (error instanceof WorkflowError) {\n *     console.error('Workflow DevKit error:', error.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowError extends Error {\n  readonly cause?: unknown;\n\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    const msgDocs = options?.slug\n      ? `${message}\\n\\nLearn more: ${BASE_URL}/${options.slug}`\n      : message;\n    super(msgDocs, { cause: options?.cause });\n    this.cause = options?.cause;\n\n    if (options?.cause instanceof Error) {\n      this.stack = `${this.stack}\\nCaused by: ${options.cause.stack}`;\n    }\n  }\n\n  static is(value: unknown): value is WorkflowError {\n    return isError(value) && value.name === 'WorkflowError';\n  }\n}\n\n/**\n * Thrown when a world (storage backend) operation fails unexpectedly.\n *\n * This is the catch-all error for world implementations. Specific,\n * well-known failure modes have dedicated error types (e.g.\n * EntityConflictError, RunExpiredError, ThrottleError). This error\n * covers everything else \u2014 validation failures, missing entities\n * without a dedicated type, or unexpected HTTP errors from world-vercel.\n */\nexport class WorkflowWorldError extends WorkflowError {\n  status?: number;\n  code?: string;\n  url?: string;\n  /** Retry-After value in seconds, present on 429 and 425 responses */\n  retryAfter?: number;\n\n  constructor(\n    message: string,\n    options?: {\n      status?: number;\n      url?: string;\n      code?: string;\n      retryAfter?: number;\n      cause?: unknown;\n    }\n  ) {\n    super(message, {\n      cause: options?.cause,\n    });\n    this.name = 'WorkflowWorldError';\n    this.status = options?.status;\n    this.code = options?.code;\n    this.url = options?.url;\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is WorkflowWorldError {\n    return isError(value) && value.name === 'WorkflowWorldError';\n  }\n}\n\n/**\n * Thrown when a workflow run fails during execution.\n *\n * This error indicates that the workflow encountered a fatal error and cannot\n * continue. It is thrown when awaiting `run.returnValue` on a run whose status\n * is `'failed'`. The `cause` property contains the underlying error with its\n * message, stack trace, and optional error code.\n *\n * Use the static `WorkflowRunFailedError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunFailedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunFailedError.is(error)) {\n *     console.error(`Run ${error.runId} failed:`, error.cause.message);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunFailedError extends WorkflowError {\n  runId: string;\n  declare cause: Error & { code?: string };\n\n  constructor(runId: string, error: StructuredError) {\n    // Create a proper Error instance from the StructuredError to set as cause\n    // NOTE: custom error types do not get serialized/deserialized. Everything is an Error\n    const causeError = new Error(error.message);\n    if (error.stack) {\n      causeError.stack = error.stack;\n    }\n    if (error.code) {\n      (causeError as any).code = error.code;\n    }\n\n    super(`Workflow run \"${runId}\" failed: ${error.message}`, {\n      cause: causeError,\n    });\n    this.name = 'WorkflowRunFailedError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunFailedError {\n    return isError(value) && value.name === 'WorkflowRunFailedError';\n  }\n}\n\n/**\n * Thrown when attempting to get results from an incomplete workflow run.\n *\n * This error occurs when you try to access the result of a workflow\n * that is still running or hasn't completed yet.\n */\nexport class WorkflowRunNotCompletedError extends WorkflowError {\n  runId: string;\n  status: string;\n\n  constructor(runId: string, status: string) {\n    super(`Workflow run \"${runId}\" has not completed`, {});\n    this.name = 'WorkflowRunNotCompletedError';\n    this.runId = runId;\n    this.status = status;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotCompletedError {\n    return isError(value) && value.name === 'WorkflowRunNotCompletedError';\n  }\n}\n\n/**\n * Thrown when the Workflow runtime encounters an internal error.\n *\n * This error indicates an issue with workflow execution, such as\n * serialization failures, starting an invalid workflow function, or\n * other runtime problems.\n */\nexport class WorkflowRuntimeError extends WorkflowError {\n  constructor(message: string, options?: WorkflowErrorOptions) {\n    super(message, {\n      ...options,\n    });\n    this.name = 'WorkflowRuntimeError';\n  }\n\n  static is(value: unknown): value is WorkflowRuntimeError {\n    return isError(value) && value.name === 'WorkflowRuntimeError';\n  }\n}\n\n/**\n * Thrown when a step function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means\n * something went wrong with the bundling/build tooling that caused the step\n * to not get built correctly.\n *\n * When this happens, the step fails (like a FatalError) and control is passed back\n * to the workflow function, which can optionally handle the failure gracefully.\n */\nexport class StepNotRegisteredError extends WorkflowRuntimeError {\n  stepName: string;\n\n  constructor(stepName: string) {\n    super(\n      `Step \"${stepName}\" is not registered in the current deployment. This usually indicates a build or bundling issue that caused the step to not be included in the deployment.`,\n      { slug: ERROR_SLUGS.STEP_NOT_REGISTERED }\n    );\n    this.name = 'StepNotRegisteredError';\n    this.stepName = stepName;\n  }\n\n  static is(value: unknown): value is StepNotRegisteredError {\n    return isError(value) && value.name === 'StepNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when a workflow function is not registered in the current deployment.\n *\n * This is an infrastructure error \u2014 not a user code error. It typically means:\n * - A run was started against a deployment that does not have the workflow\n *   (e.g., the workflow was renamed or moved and a new run targeted the latest deployment)\n * - Something went wrong with the bundling/build tooling that caused the workflow\n *   to not get built correctly\n *\n * When this happens, the run fails with a `RUNTIME_ERROR` error code.\n */\nexport class WorkflowNotRegisteredError extends WorkflowRuntimeError {\n  workflowName: string;\n\n  constructor(workflowName: string) {\n    super(\n      `Workflow \"${workflowName}\" is not registered in the current deployment. This usually means a run was started against a deployment that does not have this workflow, or there was a build/bundling issue.`,\n      { slug: ERROR_SLUGS.WORKFLOW_NOT_REGISTERED }\n    );\n    this.name = 'WorkflowNotRegisteredError';\n    this.workflowName = workflowName;\n  }\n\n  static is(value: unknown): value is WorkflowNotRegisteredError {\n    return isError(value) && value.name === 'WorkflowNotRegisteredError';\n  }\n}\n\n/**\n * Thrown when performing operations on a workflow run that does not exist.\n *\n * This error occurs when you call methods on a run object (e.g. `run.status`,\n * `run.cancel()`, `run.returnValue`) but the underlying run ID does not match\n * any known workflow run. Note that `getRun(id)` itself is synchronous and will\n * not throw \u2014 this error is raised when subsequent operations discover the run\n * is missing.\n *\n * Use the static `WorkflowRunNotFoundError.is()` method for type-safe checking\n * in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (WorkflowRunNotFoundError.is(error)) {\n *     console.error(`Run ${error.runId} does not exist`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunNotFoundError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" not found`, {});\n    this.name = 'WorkflowRunNotFoundError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunNotFoundError {\n    return isError(value) && value.name === 'WorkflowRunNotFoundError';\n  }\n}\n\n/**\n * Thrown when a hook token is already in use by another active workflow run.\n *\n * This is a user error \u2014 it means the same custom token was passed to\n * `createHook` in two or more concurrent runs. Use a unique token per run\n * (or omit the token to let the runtime generate one automatically).\n */\nexport class HookConflictError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super(`Hook token \"${token}\" is already in use by another workflow`, {\n      slug: ERROR_SLUGS.HOOK_CONFLICT,\n    });\n    this.name = 'HookConflictError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookConflictError {\n    return isError(value) && value.name === 'HookConflictError';\n  }\n}\n\n/**\n * Thrown when calling `resumeHook()` or `resumeWebhook()` with a token that\n * does not match any active hook.\n *\n * Common causes:\n * - The hook has expired (past its TTL)\n * - The hook was already disposed after being consumed\n * - The workflow has not started yet, so the hook does not exist\n *\n * A common pattern is to catch this error and start a new workflow run when\n * the hook does not exist yet (the \"resume or start\" pattern).\n *\n * Use the static `HookNotFoundError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { HookNotFoundError } from \"workflow/internal/errors\";\n *\n * try {\n *   await resumeHook(token, payload);\n * } catch (error) {\n *   if (HookNotFoundError.is(error)) {\n *     // Hook doesn't exist \u2014 start a new workflow run instead\n *     await startWorkflow(\"myWorkflow\", payload);\n *   }\n * }\n * ```\n */\nexport class HookNotFoundError extends WorkflowError {\n  token: string;\n\n  constructor(token: string) {\n    super('Hook not found', {});\n    this.name = 'HookNotFoundError';\n    this.token = token;\n  }\n\n  static is(value: unknown): value is HookNotFoundError {\n    return isError(value) && value.name === 'HookNotFoundError';\n  }\n}\n\n/**\n * Thrown when an operation conflicts with the current state of an entity.\n * This includes attempts to modify an entity already in a terminal state,\n * create an entity that already exists, or any other 409-style conflict.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class EntityConflictError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'EntityConflictError';\n  }\n\n  static is(value: unknown): value is EntityConflictError {\n    return isError(value) && value.name === 'EntityConflictError';\n  }\n}\n\n/**\n * Thrown when a run is no longer available \u2014 either because it has been\n * cleaned up, expired, or already reached a terminal state (completed/failed).\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n */\nexport class RunExpiredError extends WorkflowWorldError {\n  constructor(message: string) {\n    super(message);\n    this.name = 'RunExpiredError';\n  }\n\n  static is(value: unknown): value is RunExpiredError {\n    return isError(value) && value.name === 'RunExpiredError';\n  }\n}\n\n/**\n * Thrown when an operation cannot proceed because a required timestamp\n * (e.g. retryAfter) has not been reached yet.\n *\n * The workflow runtime handles this error automatically. Users interacting\n * with world storage backends directly may encounter it.\n *\n * @property retryAfter - Delay in seconds before the operation can be retried.\n */\nexport class TooEarlyError extends WorkflowWorldError {\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message, { retryAfter: options?.retryAfter });\n    this.name = 'TooEarlyError';\n  }\n\n  static is(value: unknown): value is TooEarlyError {\n    return isError(value) && value.name === 'TooEarlyError';\n  }\n}\n\n/**\n * Thrown when a request is rate limited by the workflow backend.\n *\n * The workflow runtime handles this error automatically with retry logic.\n * Users interacting with world storage backends directly may encounter it\n * if retries are exhausted.\n *\n * @property retryAfter - Delay in seconds before the request can be retried.\n */\nexport class ThrottleError extends WorkflowWorldError {\n  retryAfter?: number;\n\n  constructor(message: string, options?: { retryAfter?: number }) {\n    super(message);\n    this.name = 'ThrottleError';\n    this.retryAfter = options?.retryAfter;\n  }\n\n  static is(value: unknown): value is ThrottleError {\n    return isError(value) && value.name === 'ThrottleError';\n  }\n}\n\n/**\n * Thrown when awaiting `run.returnValue` on a workflow run that was cancelled.\n *\n * This error indicates that the workflow was explicitly cancelled (via\n * `run.cancel()`) and will not produce a return value. You can check for\n * cancellation before awaiting the return value by inspecting `run.status`.\n *\n * Use the static `WorkflowRunCancelledError.is()` method for type-safe\n * checking in catch blocks.\n *\n * @example\n * ```ts\n * import { WorkflowRunCancelledError } from \"workflow/internal/errors\";\n *\n * try {\n *   const result = await run.returnValue;\n * } catch (error) {\n *   if (WorkflowRunCancelledError.is(error)) {\n *     console.log(`Run ${error.runId} was cancelled`);\n *   }\n * }\n * ```\n */\nexport class WorkflowRunCancelledError extends WorkflowError {\n  runId: string;\n\n  constructor(runId: string) {\n    super(`Workflow run \"${runId}\" cancelled`, {});\n    this.name = 'WorkflowRunCancelledError';\n    this.runId = runId;\n  }\n\n  static is(value: unknown): value is WorkflowRunCancelledError {\n    return isError(value) && value.name === 'WorkflowRunCancelledError';\n  }\n}\n\n/**\n * Thrown when attempting to operate on a workflow run that requires a newer World version.\n *\n * This error occurs when a run was created with a newer spec version than the\n * current World implementation supports. To resolve this, upgrade your\n * `workflow` packages to a version that supports the required spec version.\n *\n * Use the static `RunNotSupportedError.is()` method for type-safe checking in\n * catch blocks.\n *\n * @example\n * ```ts\n * import { RunNotSupportedError } from \"workflow/internal/errors\";\n *\n * try {\n *   const status = await run.status;\n * } catch (error) {\n *   if (RunNotSupportedError.is(error)) {\n *     console.error(\n *       `Run requires spec v${error.runSpecVersion}, ` +\n *       `but world supports v${error.worldSpecVersion}`\n *     );\n *   }\n * }\n * ```\n */\nexport class RunNotSupportedError extends WorkflowError {\n  readonly runSpecVersion: number;\n  readonly worldSpecVersion: number;\n\n  constructor(runSpecVersion: number, worldSpecVersion: number) {\n    super(\n      `Run requires spec version ${runSpecVersion}, but world supports version ${worldSpecVersion}. ` +\n        `Please upgrade 'workflow' package.`\n    );\n    this.name = 'RunNotSupportedError';\n    this.runSpecVersion = runSpecVersion;\n    this.worldSpecVersion = worldSpecVersion;\n  }\n\n  static is(value: unknown): value is RunNotSupportedError {\n    return isError(value) && value.name === 'RunNotSupportedError';\n  }\n}\n\n/**\n * A fatal error is an error that cannot be retried.\n * It will cause the step to fail and the error will\n * be bubbled up to the workflow logic.\n */\nexport class FatalError extends Error {\n  fatal = true;\n\n  constructor(message: string) {\n    super(message);\n    this.name = 'FatalError';\n  }\n\n  static is(value: unknown): value is FatalError {\n    return isError(value) && value.name === 'FatalError';\n  }\n}\n\nexport interface RetryableErrorOptions {\n  /**\n   * The number of milliseconds to wait before retrying the step.\n   * Can also be a duration string (e.g., \"5s\", \"2m\") or a Date object.\n   * If not provided, the step will be retried after 1 second (1000 milliseconds).\n   */\n  retryAfter?: number | StringValue | Date;\n}\n\n/**\n * An error that can happen during a step execution, allowing\n * for configuration of the retry behavior.\n */\nexport class RetryableError extends Error {\n  /**\n   * The Date when the step should be retried.\n   */\n  retryAfter: Date;\n\n  constructor(message: string, options: RetryableErrorOptions = {}) {\n    super(message);\n    this.name = 'RetryableError';\n\n    if (options.retryAfter !== undefined) {\n      this.retryAfter = parseDurationToDate(options.retryAfter);\n    } else {\n      // Default to 1 second (1000 milliseconds)\n      this.retryAfter = new Date(Date.now() + 1000);\n    }\n  }\n\n  static is(value: unknown): value is RetryableError {\n    return isError(value) && value.name === 'RetryableError';\n  }\n}\n\nexport const VERCEL_403_ERROR_MESSAGE =\n  'Your current vercel account does not have access to this resource. Use `vercel login` or `vercel switch` to ensure you are linked to the right account.';\n\nexport { RUN_ERROR_CODES, type RunErrorCode } from './error-codes.js';\n", "/**\n * This is the \"standard library\" of steps that we make available to all workflow users.\n * The can be imported like so: `import { fetch } from 'workflow'`. and used in workflow.\n * The need to be exported directly in this package and cannot live in `core` to prevent\n * circular dependencies post-compilation.\n */\n\n/**\n * A hoisted `fetch()` function that is executed as a \"step\" function,\n * for use within workflow functions.\n *\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API\n */\nexport async function fetch(...args: Parameters<typeof globalThis.fetch>) {\n  'use step';\n  return globalThis.fetch(...args);\n}\n", "import { FatalError } from \"workflow\";\n/**__internal_workflows{\"workflows\":{\"workflows/shopify-order.ts\":{\"default\":{\"workflowId\":\"workflow//./workflows/shopify-order//shopifyOrder\"}}},\"steps\":{\"workflows/shopify-order.ts\":{\"chargePayment\":{\"stepId\":\"step//./workflows/shopify-order//chargePayment\"},\"checkDuplicate\":{\"stepId\":\"step//./workflows/shopify-order//checkDuplicate\"},\"refundPayment\":{\"stepId\":\"step//./workflows/shopify-order//refundPayment\"},\"reserveInventory\":{\"stepId\":\"step//./workflows/shopify-order//reserveInventory\"},\"sendConfirmation\":{\"stepId\":\"step//./workflows/shopify-order//sendConfirmation\"}}}}*/;\nconst checkDuplicate = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/shopify-order//checkDuplicate\");\nconst chargePayment = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/shopify-order//chargePayment\");\nconst reserveInventory = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/shopify-order//reserveInventory\");\nconst refundPayment = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/shopify-order//refundPayment\");\nconst sendConfirmation = globalThis[Symbol.for(\"WORKFLOW_USE_STEP\")](\"step//./workflows/shopify-order//sendConfirmation\");\nexport default async function shopifyOrder(orderId, amount, items, email) {\n    // Duplicate check \u2014 skip if already processed\n    await checkDuplicate(orderId);\n    // Charge payment with idempotency key\n    const charge = await chargePayment(orderId, amount);\n    // Reserve inventory \u2014 compensate with refund on failure\n    try {\n        await reserveInventory(orderId, items);\n    } catch (error) {\n        if (error instanceof FatalError) {\n            await refundPayment(orderId, charge.id);\n            throw error;\n        }\n        throw error;\n    }\n    // Send confirmation\n    await sendConfirmation(orderId, email);\n    return {\n        orderId,\n        status: \"fulfilled\"\n    };\n}\nshopifyOrder.workflowId = \"workflow//./workflows/shopify-order//shopifyOrder\";\nglobalThis.__private_workflows.set(\"workflow//./workflows/shopify-order//shopifyOrder\", shopifyOrder);\n"],
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA,8EAAAA,SAAA;AAEI,QAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AACZ,QAAI,IAAI,IAAI;AAaR,IAAAA,QAAO,UAAU,SAAS,KAAK,SAAS;AACxC,gBAAU,WAAW,CAAC;AACtB,UAAI,OAAO,OAAO;AAClB,UAAI,SAAS,YAAY,IAAI,SAAS,GAAG;AACrC,eAAO,MAAM,GAAG;AAAA,MACpB,WAAW,SAAS,YAAY,SAAS,GAAG,GAAG;AAC3C,eAAO,QAAQ,OAAO,QAAQ,GAAG,IAAI,SAAS,GAAG;AAAA,MACrD;AACA,YAAM,IAAI,MAAM,0DAA0D,KAAK,UAAU,GAAG,CAAC;AAAA,IACjG;AAOI,aAAS,MAAM,KAAK;AACpB,YAAM,OAAO,GAAG;AAChB,UAAI,IAAI,SAAS,KAAK;AAClB;AAAA,MACJ;AACA,UAAI,QAAQ,mIAAmI,KAAK,GAAG;AACvJ,UAAI,CAAC,OAAO;AACR;AAAA,MACJ;AACA,UAAI,IAAI,WAAW,MAAM,CAAC,CAAC;AAC3B,UAAI,QAAQ,MAAM,CAAC,KAAK,MAAM,YAAY;AAC1C,cAAO,MAAK;AAAA,QACR,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO,IAAI;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACD,iBAAO;AAAA,QACX;AACI,iBAAO;AAAA,MACf;AAAA,IACJ;AArDa;AA4DT,aAAS,SAASC,KAAI;AACtB,UAAI,QAAQ,KAAK,IAAIA,GAAE;AACvB,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,KAAK,MAAMA,MAAK,CAAC,IAAI;AAAA,MAChC;AACA,aAAOA,MAAK;AAAA,IAChB;AAfa;AAsBT,aAAS,QAAQA,KAAI;AACrB,UAAI,QAAQ,KAAK,IAAIA,GAAE;AACvB,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,KAAK;AAAA,MACrC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,MAAM;AAAA,MACtC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,QAAQ;AAAA,MACxC;AACA,UAAI,SAAS,GAAG;AACZ,eAAO,OAAOA,KAAI,OAAO,GAAG,QAAQ;AAAA,MACxC;AACA,aAAOA,MAAK;AAAA,IAChB;AAfa;AAkBT,aAAS,OAAOA,KAAI,OAAO,GAAG,MAAM;AACpC,UAAI,WAAW,SAAS,IAAI;AAC5B,aAAO,KAAK,MAAMA,MAAK,CAAC,IAAI,MAAM,QAAQ,WAAW,MAAM;AAAA,IAC/D;AAHa;AAAA;AAAA;;;ACvIb,gBAAe;;;ACUZ,SAAA,QAAA,OAAA;AACH,SAAS,OAAQ,UAAc,YAAA,UAAA,QAAA,UAAA,SAAA,aAAA;;AAD5B;AAggBQ,IAAA,aAAA,cAAuB,MAAA;EA3gBlC,OA2gBkC;;;EACvB,QAAA;EAET,YAAY,SAAA;AACV,UACE,OAAA;SACE,OAAA;;SAGJ,GAAK,OAAA;AACL,WAAK,QAAA,KAAA,KAAmB,MAAA,SAAgB;EAC1C;;;;AC1gBC,IAAA,QAAA,WAAA,uBAAA,IAAA,mBAAA,CAAA,EAAA,8CAAA;;;ACVH,IAAM,iBAAiB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,iDAAiD;AACpH,IAAM,gBAAgB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,gDAAgD;AAClH,IAAM,mBAAmB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,mDAAmD;AACxH,IAAM,gBAAgB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,gDAAgD;AAClH,IAAM,mBAAmB,WAAW,uBAAO,IAAI,mBAAmB,CAAC,EAAE,mDAAmD;AACxH,eAAO,aAAoC,SAAS,QAAQ,OAAO,OAAO;AAEtE,QAAM,eAAe,OAAO;AAE5B,QAAM,SAAS,MAAM,cAAc,SAAS,MAAM;AAElD,MAAI;AACA,UAAM,iBAAiB,SAAS,KAAK;AAAA,EACzC,SAAS,OAAO;AACZ,QAAI,iBAAiB,YAAY;AAC7B,YAAM,cAAc,SAAS,OAAO,EAAE;AACtC,YAAM;AAAA,IACV;AACA,UAAM;AAAA,EACV;AAEA,QAAM,iBAAiB,SAAS,KAAK;AACrC,SAAO;AAAA,IACH;AAAA,IACA,QAAQ;AAAA,EACZ;AACJ;AArB8B;AAsB9B,aAAa,aAAa;AAC1B,WAAW,oBAAoB,IAAI,qDAAqD,YAAY;",
  "names": ["module", "ms"]
}
 +`; + +export const POST = workflowEntrypoint(workflowCode); diff --git a/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs.debug.json b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs.debug.json new file mode 100644 index 0000000000..62db3359b7 --- /dev/null +++ b/tests/fixtures/workflow-skills/duplicate-webhook-order/.workflow-vitest/workflows.mjs.debug.json @@ -0,0 +1,6 @@ +{ + "workflowFiles": [ + "/Users/johnlindquist/dev/workflow/tests/fixtures/workflow-skills/duplicate-webhook-order/workflows/shopify-order.ts" + ], + "serdeOnlyFiles": [] +} From b0d41ff7593ae424133050db2565584a7508b9c2 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 01:46:19 -0600 Subject: [PATCH 03/17] fix: polish cookbook public routing Keep the cookbook surface canonical at /cookbooks so docs navigation, sitemap output, and AI/chat entry points stop leaking the legacy /docs/cookbook paths. Correct the approval-chain example so the docs teach the intended sequential approval semantics instead of implying the workflow approves after the first successful level. This keeps the cookbook aligned with the docs quality bar and avoids misleading readers with inconsistent behavior. Ploop-Iter: 2 --- .../app/[lang]/cookbooks/[[...slug]]/page.tsx | 10 +++++--- docs/app/[lang]/docs/[[...slug]]/page.tsx | 7 +++++- docs/app/[lang]/sitemap.md/route.ts | 6 ++--- docs/app/sitemap.md/route.ts | 6 ++--- .../cookbook/approvals/approval-chain.mdx | 20 ++++++++++----- docs/lib/geistdocs/cookbook-source.ts | 25 +++++++++---------- packages/ai/package.json | 2 +- packages/astro/package.json | 2 +- packages/builders/package.json | 2 +- packages/cli/package.json | 2 +- packages/core/package.json | 2 +- packages/errors/package.json | 2 +- packages/nest/package.json | 2 +- packages/next/package.json | 2 +- packages/nitro/package.json | 2 +- packages/nuxt/package.json | 2 +- packages/rollup/package.json | 2 +- packages/serde/package.json | 2 +- packages/sveltekit/package.json | 2 +- packages/swc-plugin-workflow/package.json | 2 +- packages/typescript-plugin/package.json | 2 +- packages/utils/package.json | 2 +- packages/vite/package.json | 2 +- packages/vitest/package.json | 2 +- packages/web-shared/package.json | 2 +- packages/web/package.json | 2 +- packages/workflow/package.json | 2 +- packages/world-local/package.json | 2 +- packages/world-postgres/package.json | 2 +- packages/world-testing/package.json | 2 +- packages/world-vercel/package.json | 2 +- packages/world/package.json | 2 +- 32 files changed, 71 insertions(+), 55 deletions(-) diff --git a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx index 60eab998a6..b4d325b2b4 100644 --- a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx +++ b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx @@ -4,6 +4,7 @@ import { createRelativeLink } from 'fumadocs-ui/mdx'; import type { Metadata } from 'next'; import { notFound } from 'next/navigation'; import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; +import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { AskAI } from '@/components/geistdocs/ask-ai'; import { CopyPage } from '@/components/geistdocs/copy-page'; import { @@ -32,6 +33,9 @@ const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { notFound(); } + const publicUrl = rewriteCookbookUrl(page.url); + const publicPage = { ...page, url: publicUrl } as typeof page; + const markdown = await getLLMText(page); const MDX = page.data.body; @@ -47,8 +51,8 @@ const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { - - + + ), }} @@ -59,7 +63,7 @@ const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { null; const Page = async ({ params }: PageProps<'/[lang]/docs/[[...slug]]'>) => { const { slug, lang } = await params; + if (Array.isArray(slug) && slug[0] === 'cookbook') { + const rest = slug.slice(1); + redirect(`/${lang}/cookbooks${rest.length ? `/${rest.join('/')}` : ''}`); + } + const page = source.getPage(slug, lang); if (!page) { diff --git a/docs/app/[lang]/sitemap.md/route.ts b/docs/app/[lang]/sitemap.md/route.ts index 1912d496d9..7c193e126d 100644 --- a/docs/app/[lang]/sitemap.md/route.ts +++ b/docs/app/[lang]/sitemap.md/route.ts @@ -1,4 +1,5 @@ import type { Node, Root } from 'fumadocs-core/page-tree'; +import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { source } from '@/lib/geistdocs/source'; export const revalidate = false; @@ -16,10 +17,10 @@ export async function GET( if ('type' in node) { if (node.type === 'page') { - mdText += `${indent}- [${node.name}](${node.url})\n`; + mdText += `${indent}- [${node.name}](${rewriteCookbookUrl(node.url)})\n`; } else if (node.type === 'folder') { if (node.index) { - mdText += `${indent}- [${node.name}](${node.index.url})\n`; + mdText += `${indent}- [${node.name}](${rewriteCookbookUrl(node.index.url)})\n`; } else { mdText += `${indent}- ${node.name}\n`; } @@ -30,7 +31,6 @@ export async function GET( } } } else if (node.children.length > 0) { - // Root node for (const child of node.children) { traverseTree(child, depth); } diff --git a/docs/app/sitemap.md/route.ts b/docs/app/sitemap.md/route.ts index 6129b002e7..fa01ffc501 100644 --- a/docs/app/sitemap.md/route.ts +++ b/docs/app/sitemap.md/route.ts @@ -1,4 +1,5 @@ import type { Node, Root } from 'fumadocs-core/page-tree'; +import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { i18n } from '@/lib/geistdocs/i18n'; import { source } from '@/lib/geistdocs/source'; @@ -13,10 +14,10 @@ export async function GET(_req: Request) { if ('type' in node) { if (node.type === 'page') { - mdText += `${indent}- [${node.name}](${node.url})\n`; + mdText += `${indent}- [${node.name}](${rewriteCookbookUrl(node.url)})\n`; } else if (node.type === 'folder') { if (node.index) { - mdText += `${indent}- [${node.name}](${node.index.url})\n`; + mdText += `${indent}- [${node.name}](${rewriteCookbookUrl(node.index.url)})\n`; } else { mdText += `${indent}- ${node.name}\n`; } @@ -27,7 +28,6 @@ export async function GET(_req: Request) { } } } else if (node.children.length > 0) { - // Root node for (const child of node.children) { traverseTree(child, depth); } diff --git a/docs/content/docs/cookbook/approvals/approval-chain.mdx b/docs/content/docs/cookbook/approvals/approval-chain.mdx index 97a84336ce..3302b43aca 100644 --- a/docs/content/docs/cookbook/approvals/approval-chain.mdx +++ b/docs/content/docs/cookbook/approvals/approval-chain.mdx @@ -9,15 +9,18 @@ Use the approval chain pattern when a request must pass through multiple approva ## Pattern -Determine approval levels from the request, then iterate through each. At each level, race a hook against a timeout — if the timeout fires, escalate to the next level. +Build the required approval levels from the request, then loop through them. Each level waits for a hook or times out. A rejection stops the workflow immediately; the request is only approved after the entire chain completes. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; type ApprovalRole = "manager" | "director" | "vp"; +type ApprovalDecision = { approved: boolean; comment?: string }; declare function notifyApprover(expenseId: string, role: ApprovalRole): Promise; // @setup +const approvalHook = defineHook(); + export async function approvalChain(expenseId: string, amount: number) { "use workflow"; @@ -26,14 +29,15 @@ export async function approvalChain(expenseId: string, amount: number) { amount < 5000 ? [{ role: "manager", timeout: "10s" }, { role: "director", timeout: "8s" }] : [{ role: "manager", timeout: "10s" }, { role: "director", timeout: "8s" }, { role: "vp", timeout: "6s" }]; + let approvals = 0; + for (const level of levels) { await notifyApprover(expenseId, level.role); - const levelHook = defineHook<{ approved: boolean; comment?: string }>(); - const hook = levelHook.create({ token: `approval:${expenseId}:${level.role}` }); - const result = await Promise.race([ - hook.then((p) => ({ type: "decision" as const, payload: p })), + approvalHook + .create({ token: `approval:${expenseId}:${level.role}` }) + .then((payload) => ({ type: "decision" as const, payload })), sleep(level.timeout).then(() => ({ type: "timeout" as const })), ]); @@ -41,7 +45,11 @@ export async function approvalChain(expenseId: string, amount: number) { if (!result.payload.approved) { return { expenseId, status: "rejected", decidedBy: level.role }; } - return { expenseId, status: "approved", decidedBy: level.role }; + approvals++; + } + + if (approvals === levels.length) { + return { expenseId, status: "approved", decidedBy: levels[levels.length - 1]?.role }; } return { expenseId, status: "expired" }; diff --git a/docs/lib/geistdocs/cookbook-source.ts b/docs/lib/geistdocs/cookbook-source.ts index d4c6db71ad..69809bf546 100644 --- a/docs/lib/geistdocs/cookbook-source.ts +++ b/docs/lib/geistdocs/cookbook-source.ts @@ -1,5 +1,11 @@ import { source } from './source'; +const COOKBOOK_URL_RE = /\/docs\/cookbook(?=\/|$)/; + +export function rewriteCookbookUrl(url: string): string { + return url.replace(COOKBOOK_URL_RE, '/cookbooks'); +} + /** * Extract the cookbook subtree from the docs page tree, * rewriting URLs from /docs/cookbook/... to /cookbooks/... @@ -7,7 +13,6 @@ import { source } from './source'; export function getCookbookTree(lang: string) { const fullTree = source.pageTree[lang]; - // Find the cookbook folder in the tree const cookbookNode = fullTree.children.find( (node) => node.type === 'folder' && node.name === 'Cookbook' ); @@ -16,7 +21,6 @@ export function getCookbookTree(lang: string) { return { name: 'Cookbooks', children: [] }; } - // Deep-clone and rewrite URLs return { name: 'Cookbooks', children: rewriteUrls(cookbookNode.children), @@ -25,27 +29,22 @@ export function getCookbookTree(lang: string) { function rewriteUrls(nodes: T[]): T[] { return nodes.map((node) => { - const n = node as Record; - const rewritten = { ...n }; + const rewritten = { ...(node as Record) }; if (typeof rewritten.url === 'string') { - rewritten.url = rewritten.url.replace( - /\/docs\/cookbook\//, - '/cookbooks/' - ); + rewritten.url = rewriteCookbookUrl(rewritten.url); } if (Array.isArray(rewritten.children)) { rewritten.children = rewriteUrls(rewritten.children); } - // Handle index page inside folders if (rewritten.index && typeof rewritten.index === 'object') { - const idx = { ...(rewritten.index as Record) }; - if (typeof idx.url === 'string') { - idx.url = idx.url.replace(/\/docs\/cookbook\//, '/cookbooks/'); + const index = { ...(rewritten.index as Record) }; + if (typeof index.url === 'string') { + index.url = rewriteCookbookUrl(index.url); } - rewritten.index = idx; + rewritten.index = index; } return rewritten as T; diff --git a/packages/ai/package.json b/packages/ai/package.json index eeb76a7c35..b76c22b31b 100644 --- a/packages/ai/package.json +++ b/packages/ai/package.json @@ -93,4 +93,4 @@ "@ai-sdk/openai": "^3.0.0", "@ai-sdk/xai": "^3.0.0" } -} +} \ No newline at end of file diff --git a/packages/astro/package.json b/packages/astro/package.json index 63f3ab4e43..af610f2088 100644 --- a/packages/astro/package.json +++ b/packages/astro/package.json @@ -38,4 +38,4 @@ "@workflow/tsconfig": "workspace:*", "astro": "5.18.0" } -} +} \ No newline at end of file diff --git a/packages/builders/package.json b/packages/builders/package.json index 6c83b8fa0f..3da9b435b0 100644 --- a/packages/builders/package.json +++ b/packages/builders/package.json @@ -52,4 +52,4 @@ "json5": "2.2.3", "tinyglobby": "0.2.15" } -} +} \ No newline at end of file diff --git a/packages/cli/package.json b/packages/cli/package.json index c6aa29e880..2c7e6e52f3 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -71,4 +71,4 @@ "xdg-app-paths": "5.1.0", "zod": "catalog:" } -} +} \ No newline at end of file diff --git a/packages/core/package.json b/packages/core/package.json index 861a0be6cf..ea77b3f762 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -113,4 +113,4 @@ "optional": true } } -} +} \ No newline at end of file diff --git a/packages/errors/package.json b/packages/errors/package.json index 5587244de7..8144a1d23c 100644 --- a/packages/errors/package.json +++ b/packages/errors/package.json @@ -38,4 +38,4 @@ "@workflow/utils": "workspace:*", "ms": "2.1.3" } -} +} \ No newline at end of file diff --git a/packages/nest/package.json b/packages/nest/package.json index 10aabfdd34..bdf64f5615 100644 --- a/packages/nest/package.json +++ b/packages/nest/package.json @@ -71,4 +71,4 @@ "url": "git+https://github.com/vercel/workflow.git", "directory": "packages/nest" } -} +} \ No newline at end of file diff --git a/packages/next/package.json b/packages/next/package.json index ad8cc50d13..db8d7c3e36 100644 --- a/packages/next/package.json +++ b/packages/next/package.json @@ -55,4 +55,4 @@ "optional": true } } -} +} \ No newline at end of file diff --git a/packages/nitro/package.json b/packages/nitro/package.json index caa9776e45..d303118490 100644 --- a/packages/nitro/package.json +++ b/packages/nitro/package.json @@ -41,4 +41,4 @@ "nitro": "catalog:", "vite": "7.1.12" } -} +} \ No newline at end of file diff --git a/packages/nuxt/package.json b/packages/nuxt/package.json index bff07ec50a..d2156a693e 100644 --- a/packages/nuxt/package.json +++ b/packages/nuxt/package.json @@ -45,4 +45,4 @@ "@workflow/tsconfig": "workspace:*", "nuxt": "4.4.2" } -} +} \ No newline at end of file diff --git a/packages/rollup/package.json b/packages/rollup/package.json index bbec93a241..6d037c9745 100644 --- a/packages/rollup/package.json +++ b/packages/rollup/package.json @@ -35,4 +35,4 @@ "@workflow/tsconfig": "workspace:*", "rollup": "^4.53.2" } -} +} \ No newline at end of file diff --git a/packages/serde/package.json b/packages/serde/package.json index ed37c68c16..63291046fb 100644 --- a/packages/serde/package.json +++ b/packages/serde/package.json @@ -32,4 +32,4 @@ "@types/node": "catalog:", "@workflow/tsconfig": "workspace:*" } -} +} \ No newline at end of file diff --git a/packages/sveltekit/package.json b/packages/sveltekit/package.json index 39223f1d32..9d4900c926 100644 --- a/packages/sveltekit/package.json +++ b/packages/sveltekit/package.json @@ -40,4 +40,4 @@ "@workflow/tsconfig": "workspace:*", "vite": "7.1.12" } -} +} \ No newline at end of file diff --git a/packages/swc-plugin-workflow/package.json b/packages/swc-plugin-workflow/package.json index 52eaff69cc..4b722b5f5a 100644 --- a/packages/swc-plugin-workflow/package.json +++ b/packages/swc-plugin-workflow/package.json @@ -35,4 +35,4 @@ "prepublishOnly": "pnpm build" }, "preferUnplugged": true -} +} \ No newline at end of file diff --git a/packages/typescript-plugin/package.json b/packages/typescript-plugin/package.json index cc9d4353a7..e09ef89a1f 100644 --- a/packages/typescript-plugin/package.json +++ b/packages/typescript-plugin/package.json @@ -41,4 +41,4 @@ "typescript": "catalog:", "vitest": "catalog:" } -} +} \ No newline at end of file diff --git a/packages/utils/package.json b/packages/utils/package.json index 38c576b6b5..3e31eb9bd1 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -50,4 +50,4 @@ "dependencies": { "ms": "2.1.3" } -} +} \ No newline at end of file diff --git a/packages/vite/package.json b/packages/vite/package.json index c296eae02a..cafcb51b4b 100644 --- a/packages/vite/package.json +++ b/packages/vite/package.json @@ -36,4 +36,4 @@ "@workflow/tsconfig": "workspace:*", "vite": "7.1.12" } -} +} \ No newline at end of file diff --git a/packages/vitest/package.json b/packages/vitest/package.json index b250186756..18c7c10335 100644 --- a/packages/vitest/package.json +++ b/packages/vitest/package.json @@ -49,4 +49,4 @@ "optional": true } } -} +} \ No newline at end of file diff --git a/packages/web-shared/package.json b/packages/web-shared/package.json index 4ea4fcc5c6..b2ed5b43c9 100644 --- a/packages/web-shared/package.json +++ b/packages/web-shared/package.json @@ -75,4 +75,4 @@ "typescript": "catalog:", "vitest": "catalog:" } -} +} \ No newline at end of file diff --git a/packages/web/package.json b/packages/web/package.json index 5030d36b07..e0fe3f28a4 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -87,4 +87,4 @@ "vitest": "^3", "jsdom": "^26" } -} +} \ No newline at end of file diff --git a/packages/workflow/package.json b/packages/workflow/package.json index 3d7efe79c1..6794ba8e56 100644 --- a/packages/workflow/package.json +++ b/packages/workflow/package.json @@ -92,4 +92,4 @@ "optional": true } } -} +} \ No newline at end of file diff --git a/packages/world-local/package.json b/packages/world-local/package.json index 4b1c92c301..4589c3e766 100644 --- a/packages/world-local/package.json +++ b/packages/world-local/package.json @@ -55,4 +55,4 @@ "optional": true } } -} +} \ No newline at end of file diff --git a/packages/world-postgres/package.json b/packages/world-postgres/package.json index 1d52b9c811..797b203273 100644 --- a/packages/world-postgres/package.json +++ b/packages/world-postgres/package.json @@ -71,4 +71,4 @@ "keywords": [], "author": "", "packageManager": "pnpm@10.15.1" -} +} \ No newline at end of file diff --git a/packages/world-testing/package.json b/packages/world-testing/package.json index 333d8f38b4..6ecd9698de 100644 --- a/packages/world-testing/package.json +++ b/packages/world-testing/package.json @@ -44,4 +44,4 @@ "keywords": [], "author": "", "packageManager": "pnpm@10.15.1" -} +} \ No newline at end of file diff --git a/packages/world-vercel/package.json b/packages/world-vercel/package.json index 1428775b04..3b66a50759 100644 --- a/packages/world-vercel/package.json +++ b/packages/world-vercel/package.json @@ -53,4 +53,4 @@ "genversion": "3.2.0", "vitest": "catalog:" } -} +} \ No newline at end of file diff --git a/packages/world/package.json b/packages/world/package.json index d473cdb588..85ee71960c 100644 --- a/packages/world/package.json +++ b/packages/world/package.json @@ -36,4 +36,4 @@ "keywords": [], "author": "", "packageManager": "pnpm@10.15.1" -} +} \ No newline at end of file From ee038ac43afe78603c2ebc8c2f44a7fc42de23d7 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 02:32:37 -0600 Subject: [PATCH 04/17] docs: canonicalize cookbook doc routes Align cookbook-facing docs outputs with the new public route so redirects, sitemap entries, and LLM-facing exports stay consistent. This keeps the polished cookbook section discoverable at its canonical location while trimming the last demo-heavy recipe examples toward the same concise style as the rest of the docs. Ploop-Iter: 3 --- docs/app/[lang]/docs/[[...slug]]/page.tsx | 4 +- docs/app/[lang]/llms.mdx/[[...slug]]/route.ts | 7 ++- docs/app/[lang]/llms.txt/route.ts | 15 ++++-- docs/app/sitemap.ts | 3 +- .../observability/namespaced-streams.mdx | 3 +- .../cookbook/resilience/dead-letter-queue.mdx | 49 +++++++++---------- .../resilience/retryable-rate-limit.mdx | 27 +++------- 7 files changed, 50 insertions(+), 58 deletions(-) diff --git a/docs/app/[lang]/docs/[[...slug]]/page.tsx b/docs/app/[lang]/docs/[[...slug]]/page.tsx index 37e5f20fe3..12a1c2648f 100644 --- a/docs/app/[lang]/docs/[[...slug]]/page.tsx +++ b/docs/app/[lang]/docs/[[...slug]]/page.tsx @@ -2,7 +2,7 @@ import { Step, Steps } from 'fumadocs-ui/components/steps'; import { Tab, Tabs } from 'fumadocs-ui/components/tabs'; import { createRelativeLink } from 'fumadocs-ui/mdx'; import type { Metadata } from 'next'; -import { notFound, redirect } from 'next/navigation'; +import { notFound, permanentRedirect } from 'next/navigation'; import { AgentTraces } from '@/components/custom/agent-traces'; import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; import { FluidComputeCallout } from '@/components/custom/fluid-compute-callout'; @@ -34,7 +34,7 @@ const Page = async ({ params }: PageProps<'/[lang]/docs/[[...slug]]'>) => { if (Array.isArray(slug) && slug[0] === 'cookbook') { const rest = slug.slice(1); - redirect(`/${lang}/cookbooks${rest.length ? `/${rest.join('/')}` : ''}`); + permanentRedirect(`/${lang}/cookbooks${rest.length ? `/${rest.join('/')}` : ''}`); } const page = source.getPage(slug, lang); diff --git a/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts b/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts index 8f6eb71527..e9dc69bdf2 100644 --- a/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts +++ b/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts @@ -1,7 +1,10 @@ import { notFound } from 'next/navigation'; +import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { getLLMText, source } from '@/lib/geistdocs/source'; import { i18n } from '@/lib/geistdocs/i18n'; +const COOKBOOK_URL_RE_GLOBAL = /\/docs\/cookbook(?=\/|$)/g; + export const revalidate = false; export async function GET( @@ -18,8 +21,10 @@ export async function GET( const sitemapPath = lang === i18n.defaultLanguage ? '/sitemap.md' : `/${lang}/sitemap.md`; + const text = await getLLMText(page); + return new Response( - (await getLLMText(page)) + + text.replace(COOKBOOK_URL_RE_GLOBAL, '/cookbooks') + `\n\n## Sitemap [Overview of all docs pages](${sitemapPath})\n`, { diff --git a/docs/app/[lang]/llms.txt/route.ts b/docs/app/[lang]/llms.txt/route.ts index 96f061223d..90eebd3b2d 100644 --- a/docs/app/[lang]/llms.txt/route.ts +++ b/docs/app/[lang]/llms.txt/route.ts @@ -1,6 +1,8 @@ import type { NextRequest } from 'next/server'; import { getLLMText, source } from '@/lib/geistdocs/source'; +const COOKBOOK_URL_RE_GLOBAL = /\/docs\/cookbook(?=\/|$)/g; + export const revalidate = false; export const GET = async ( @@ -11,9 +13,12 @@ export const GET = async ( const scan = source.getPages(lang).map(getLLMText); const scanned = await Promise.all(scan); - return new Response(scanned.join('\n\n'), { - headers: { - 'Content-Type': 'text/markdown; charset=utf-8', - }, - }); + return new Response( + scanned.join('\n\n').replace(COOKBOOK_URL_RE_GLOBAL, '/cookbooks'), + { + headers: { + 'Content-Type': 'text/markdown; charset=utf-8', + }, + } + ); }; diff --git a/docs/app/sitemap.ts b/docs/app/sitemap.ts index 673ea996d2..9f3be47eba 100644 --- a/docs/app/sitemap.ts +++ b/docs/app/sitemap.ts @@ -1,5 +1,6 @@ import type { MetadataRoute } from 'next'; +import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { source } from '@/lib/geistdocs/source'; const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http'; @@ -17,7 +18,7 @@ export default function sitemap(): MetadataRoute.Sitemap { changeFrequency: 'weekly' as const, lastModified: undefined, priority: 0.5, - url: url(page.url), + url: url(rewriteCookbookUrl(page.url)), }); } diff --git a/docs/content/docs/cookbook/observability/namespaced-streams.mdx b/docs/content/docs/cookbook/observability/namespaced-streams.mdx index 9a27855257..162fe678f3 100644 --- a/docs/content/docs/cookbook/observability/namespaced-streams.mdx +++ b/docs/content/docs/cookbook/observability/namespaced-streams.mdx @@ -38,5 +38,4 @@ export async function namespacedStreams(runId: string, topic: string) { ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`getWritable()`](/docs/api-reference/workflow/get-writable) — creates namespaced streams with `{ namespace: "..." }` -- [`getWorkflowMetadata()`](/docs/api-reference/workflow/get-workflow-metadata) — retrieves the workflow run ID for telemetry correlation +- [`Promise.all()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all) — publishes independent outputs in parallel diff --git a/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx b/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx index 729343820f..639f5d51cd 100644 --- a/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx +++ b/docs/content/docs/cookbook/resilience/dead-letter-queue.mdx @@ -9,47 +9,44 @@ Use dead letter queue when messages that fail repeatedly should be moved aside f ## Pattern -A step uses `getStepMetadata()` to track the attempt count. If the attempt reaches the maximum, return a dead-lettered status instead of throwing again. This prevents infinite retry loops while preserving the message. +Retry delivery with plain JavaScript control flow. If a message still fails after the last attempt, move it aside and return a dead-lettered result. ```typescript lineNumbers -import { getStepMetadata } from "workflow"; - declare function deliverMessage(messageId: string): Promise; // @setup +declare function moveToDeadLetter(messageId: string): Promise; // @setup -const MAX_ATTEMPTS = 3; - -export async function deadLetterQueue(messages: string[]) { +export async function deadLetterQueue( + messages: string[], + maxAttempts: number = 3 +) { "use workflow"; const results = []; for (const messageId of messages) { - const result = await processMessage(messageId); - results.push(result); + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + await deliverMessage(messageId); + results.push({ messageId, status: "delivered", attempts: attempt }); + break; + } catch { + if (attempt === maxAttempts) { + await moveToDeadLetter(messageId); + results.push({ + messageId, + status: "dead_lettered", + attempts: attempt, + }); + } + } + } } return results; } - -async function processMessage(messageId: string) { - "use step"; - - const { attempt } = getStepMetadata(); - - try { - await deliverMessage(messageId); - return { messageId, status: "delivered", attempts: attempt }; - } catch (error) { - if (attempt >= MAX_ATTEMPTS) { - return { messageId, status: "dead_lettered", attempts: attempt }; - } - throw error; - } -} ``` ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — marks functions with full Node.js access -- [`getStepMetadata()`](/docs/api-reference/step/get-step-metadata) — provides the current attempt number +- [Workflows & Steps](/docs/foundations/workflows-and-steps) — step helpers keep the workflow logic short and durable diff --git a/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx b/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx index fa87c9cb51..2416395485 100644 --- a/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx +++ b/docs/content/docs/cookbook/resilience/retryable-rate-limit.mdx @@ -9,40 +9,25 @@ Use retryable rate limit when an external API returns 429 and you want the runti ## Pattern -A step throws `RetryableError` with a `retryAfter` duration when it receives a 429 response. The runtime automatically reschedules the step after the specified delay — no manual sleep loops needed. +Keep the workflow itself boring. The CRM step can throw `RetryableError` on 429, and the workflow code stays a normal fetch-then-store sequence. ```typescript lineNumbers -import { RetryableError } from "workflow"; +type Contact = { id: string; email: string }; -declare function upsertIntoWarehouse(contactId: string, contact: unknown): Promise; // @setup +declare function fetchContactFromCrm(contactId: string): Promise; // @setup throws RetryableError on 429 +declare function upsertIntoWarehouse(contact: Contact): Promise; // @setup export async function syncCrmContact(contactId: string) { "use workflow"; const contact = await fetchContactFromCrm(contactId); - await upsertIntoWarehouse(contactId, contact); + await upsertIntoWarehouse(contact); return { contactId, status: "synced" }; } - -async function fetchContactFromCrm(contactId: string) { - "use step"; - - const res = await fetch(`https://crm.example.com/contacts/${contactId}`); - - if (res.status === 429) { - const retryAfterMs = parseInt(res.headers.get("retry-after") || "2000"); - throw new RetryableError("CRM rate-limited (429)", { - retryAfter: retryAfterMs, - }); - } - - return res.json(); -} ``` ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`"use step"`](/docs/api-reference/workflow/use-step) — marks functions with full Node.js access -- [`RetryableError`](/docs/api-reference/workflow/retryable-error) — signals the runtime to retry after a delay +- [`RetryableError`](/docs/api-reference/workflow/retryable-error) — lets a step ask the runtime to retry after a delay From 74f39e2acc6b5207adaf162424c396e954e288d0 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 03:32:07 -0600 Subject: [PATCH 05/17] ploop: iteration 4 checkpoint Automated checkpoint commit. Ploop-Iter: 4 --- docs/app/[lang]/docs/[[...slug]]/page.tsx | 6 ++-- docs/app/[lang]/llms.mdx/[[...slug]]/route.ts | 6 ++-- docs/app/[lang]/llms.txt/route.ts | 16 ++++----- docs/lib/geistdocs/cookbook-source.ts | 35 +++++++++++++++---- 4 files changed, 41 insertions(+), 22 deletions(-) diff --git a/docs/app/[lang]/docs/[[...slug]]/page.tsx b/docs/app/[lang]/docs/[[...slug]]/page.tsx index 12a1c2648f..feb132e7c1 100644 --- a/docs/app/[lang]/docs/[[...slug]]/page.tsx +++ b/docs/app/[lang]/docs/[[...slug]]/page.tsx @@ -3,6 +3,7 @@ import { Tab, Tabs } from 'fumadocs-ui/components/tabs'; import { createRelativeLink } from 'fumadocs-ui/mdx'; import type { Metadata } from 'next'; import { notFound, permanentRedirect } from 'next/navigation'; +import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { AgentTraces } from '@/components/custom/agent-traces'; import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; import { FluidComputeCallout } from '@/components/custom/fluid-compute-callout'; @@ -33,8 +34,9 @@ const Page = async ({ params }: PageProps<'/[lang]/docs/[[...slug]]'>) => { const { slug, lang } = await params; if (Array.isArray(slug) && slug[0] === 'cookbook') { - const rest = slug.slice(1); - permanentRedirect(`/${lang}/cookbooks${rest.length ? `/${rest.join('/')}` : ''}`); + const rest = slug.slice(1).join('/'); + const legacyPath = `/docs/cookbook${rest ? `/${rest}` : ''}`; + permanentRedirect(`/${lang}${rewriteCookbookUrl(legacyPath)}`); } const page = source.getPage(slug, lang); diff --git a/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts b/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts index e9dc69bdf2..897e54d56a 100644 --- a/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts +++ b/docs/app/[lang]/llms.mdx/[[...slug]]/route.ts @@ -1,10 +1,8 @@ import { notFound } from 'next/navigation'; -import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; +import { rewriteCookbookUrlsInText } from '@/lib/geistdocs/cookbook-source'; import { getLLMText, source } from '@/lib/geistdocs/source'; import { i18n } from '@/lib/geistdocs/i18n'; -const COOKBOOK_URL_RE_GLOBAL = /\/docs\/cookbook(?=\/|$)/g; - export const revalidate = false; export async function GET( @@ -24,7 +22,7 @@ export async function GET( const text = await getLLMText(page); return new Response( - text.replace(COOKBOOK_URL_RE_GLOBAL, '/cookbooks') + + rewriteCookbookUrlsInText(text) + `\n\n## Sitemap [Overview of all docs pages](${sitemapPath})\n`, { diff --git a/docs/app/[lang]/llms.txt/route.ts b/docs/app/[lang]/llms.txt/route.ts index 90eebd3b2d..343ad45b0c 100644 --- a/docs/app/[lang]/llms.txt/route.ts +++ b/docs/app/[lang]/llms.txt/route.ts @@ -1,8 +1,7 @@ import type { NextRequest } from 'next/server'; +import { rewriteCookbookUrlsInText } from '@/lib/geistdocs/cookbook-source'; import { getLLMText, source } from '@/lib/geistdocs/source'; -const COOKBOOK_URL_RE_GLOBAL = /\/docs\/cookbook(?=\/|$)/g; - export const revalidate = false; export const GET = async ( @@ -13,12 +12,9 @@ export const GET = async ( const scan = source.getPages(lang).map(getLLMText); const scanned = await Promise.all(scan); - return new Response( - scanned.join('\n\n').replace(COOKBOOK_URL_RE_GLOBAL, '/cookbooks'), - { - headers: { - 'Content-Type': 'text/markdown; charset=utf-8', - }, - } - ); + return new Response(rewriteCookbookUrlsInText(scanned.join('\n\n')), { + headers: { + 'Content-Type': 'text/markdown; charset=utf-8', + }, + }); }; diff --git a/docs/lib/geistdocs/cookbook-source.ts b/docs/lib/geistdocs/cookbook-source.ts index 69809bf546..3f495d747e 100644 --- a/docs/lib/geistdocs/cookbook-source.ts +++ b/docs/lib/geistdocs/cookbook-source.ts @@ -1,9 +1,34 @@ +import type { Node } from 'fumadocs-core/page-tree'; import { source } from './source'; -const COOKBOOK_URL_RE = /\/docs\/cookbook(?=\/|$)/; +const COOKBOOK_DOCS_PREFIX_RE = /\/docs\/cookbook(?=\/|$)/g; export function rewriteCookbookUrl(url: string): string { - return url.replace(COOKBOOK_URL_RE, '/cookbooks'); + return url.replace(COOKBOOK_DOCS_PREFIX_RE, '/cookbooks'); +} + +export function rewriteCookbookUrlsInText(text: string): string { + return text.replace(COOKBOOK_DOCS_PREFIX_RE, '/cookbooks'); +} + +function isCookbookFolder(node: Node): boolean { + if (node.type !== 'folder') { + return false; + } + + if (node.index?.url?.startsWith('/docs/cookbook')) { + return true; + } + + return node.children.some((child) => { + if (child.type === 'page') { + return child.url.startsWith('/docs/cookbook'); + } + if (child.type === 'folder') { + return child.index?.url?.startsWith('/docs/cookbook') ?? false; + } + return false; + }); } /** @@ -13,12 +38,10 @@ export function rewriteCookbookUrl(url: string): string { export function getCookbookTree(lang: string) { const fullTree = source.pageTree[lang]; - const cookbookNode = fullTree.children.find( - (node) => node.type === 'folder' && node.name === 'Cookbook' - ); + const cookbookNode = fullTree.children.find(isCookbookFolder); if (!cookbookNode || cookbookNode.type !== 'folder') { - return { name: 'Cookbooks', children: [] }; + return { name: 'Cookbooks', children: [] as Node[] }; } return { From 7d8df61abf061461ce9ad22a9b9cf7a24c19cfda Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 04:20:48 -0600 Subject: [PATCH 06/17] fix(docs): finalize cookbook route split Keep cookbook content discoverable after moving it to a first-class /cookbooks surface so navigation, canonical metadata, and markdown consumers resolve the new public URLs consistently. Avoid serving the legacy /docs/cookbook tree as if it were still part of the docs section, which reduces duplicate navigation paths and prevents stale static output from competing with the new route structure. Ploop-Iter: 5 --- .../app/[lang]/cookbooks/[[...slug]]/page.tsx | 6 +- docs/app/[lang]/docs/[[...slug]]/page.tsx | 8 +- docs/app/[lang]/docs/layout.tsx | 17 +++- docs/content/docs/meta.json | 3 +- docs/lib/geistdocs/cookbook-source.ts | 98 +++++++++++-------- docs/next.config.ts | 40 +++++++- 6 files changed, 122 insertions(+), 50 deletions(-) diff --git a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx index b4d325b2b4..909df493de 100644 --- a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx +++ b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx @@ -99,6 +99,8 @@ export const generateMetadata = async ({ notFound(); } + const publicPath = `/cookbooks${slug ? `/${slug.join('/')}` : ''}`; + const metadata: Metadata = { title: page.data.title, description: page.data.description, @@ -106,9 +108,9 @@ export const generateMetadata = async ({ images: getPageImage(page).url, }, alternates: { - canonical: `/cookbooks${slug ? `/${slug.join('/')}` : ''}`, + canonical: publicPath, types: { - 'text/markdown': `/cookbooks${slug ? `/${slug.join('/')}` : ''}.md`, + 'text/markdown': publicPath, }, }, }; diff --git a/docs/app/[lang]/docs/[[...slug]]/page.tsx b/docs/app/[lang]/docs/[[...slug]]/page.tsx index feb132e7c1..4f3fffd159 100644 --- a/docs/app/[lang]/docs/[[...slug]]/page.tsx +++ b/docs/app/[lang]/docs/[[...slug]]/page.tsx @@ -94,7 +94,13 @@ const Page = async ({ params }: PageProps<'/[lang]/docs/[[...slug]]'>) => { ); }; -export const generateStaticParams = () => source.generateParams(); +export const generateStaticParams = () => + source + .generateParams() + .filter( + (params) => + !(Array.isArray(params.slug) && params.slug[0] === 'cookbook'), + ); export const generateMetadata = async ({ params, diff --git a/docs/app/[lang]/docs/layout.tsx b/docs/app/[lang]/docs/layout.tsx index 583e850925..866624d514 100644 --- a/docs/app/[lang]/docs/layout.tsx +++ b/docs/app/[lang]/docs/layout.tsx @@ -1,10 +1,25 @@ +import type { Root } from 'fumadocs-core/page-tree'; import { DocsLayout } from '@/components/geistdocs/docs-layout'; import { source } from '@/lib/geistdocs/source'; +function withoutCookbook(tree: Root): Root { + return { + ...tree, + children: tree.children.filter((node) => { + if (node.type !== 'folder') return true; + return !node.index?.url?.startsWith('/docs/cookbook'); + }), + }; +} + const Layout = async ({ children, params }: LayoutProps<'/[lang]/docs'>) => { const { lang } = await params; - return {children}; + return ( + + {children} + + ); }; export default Layout; diff --git a/docs/content/docs/meta.json b/docs/content/docs/meta.json index eda30fd046..c6e338712d 100644 --- a/docs/content/docs/meta.json +++ b/docs/content/docs/meta.json @@ -10,6 +10,7 @@ "testing", "deploying", "errors", - "api-reference" + "api-reference", + "cookbook" ] } diff --git a/docs/lib/geistdocs/cookbook-source.ts b/docs/lib/geistdocs/cookbook-source.ts index 3f495d747e..2560841236 100644 --- a/docs/lib/geistdocs/cookbook-source.ts +++ b/docs/lib/geistdocs/cookbook-source.ts @@ -1,8 +1,11 @@ -import type { Node } from 'fumadocs-core/page-tree'; +import type { Node, Root } from 'fumadocs-core/page-tree'; import { source } from './source'; const COOKBOOK_DOCS_PREFIX_RE = /\/docs\/cookbook(?=\/|$)/g; +type FolderNode = Extract; +type PageNode = Extract; + export function rewriteCookbookUrl(url: string): string { return url.replace(COOKBOOK_DOCS_PREFIX_RE, '/cookbooks'); } @@ -11,65 +14,78 @@ export function rewriteCookbookUrlsInText(text: string): string { return text.replace(COOKBOOK_DOCS_PREFIX_RE, '/cookbooks'); } -function isCookbookFolder(node: Node): boolean { - if (node.type !== 'folder') { - return false; - } +function isCookbookFolder(node: Node): node is FolderNode { + if (node.type !== 'folder') return false; - if (node.index?.url?.startsWith('/docs/cookbook')) { - return true; - } + if (node.index?.url?.startsWith('/docs/cookbook')) return true; + // Fallback: check if children contain cookbook pages return node.children.some((child) => { - if (child.type === 'page') { - return child.url.startsWith('/docs/cookbook'); - } - if (child.type === 'folder') { + if (child.type === 'page') return child.url.startsWith('/docs/cookbook'); + if (child.type === 'folder') return child.index?.url?.startsWith('/docs/cookbook') ?? false; - } return false; }); } +function rewriteNode>(node: T): T { + const rewritten = { ...node }; + + if (typeof rewritten.url === 'string') { + rewritten.url = rewriteCookbookUrl(rewritten.url); + } + + if (rewritten.index && typeof rewritten.index === 'object') { + rewritten.index = rewriteNode( + rewritten.index as Record, + ); + } + + if (Array.isArray(rewritten.children)) { + rewritten.children = rewritten.children.map((child) => + rewriteNode(child as Record), + ); + } + + return rewritten as T; +} + +function createOverviewPage(cookbookNode: FolderNode): PageNode | null { + if (!cookbookNode.index) { + return null; + } + + return { + type: 'page', + $id: `${cookbookNode.$id}__overview`, + name: 'Overview', + url: rewriteCookbookUrl(cookbookNode.index.url), + } as PageNode; +} + /** * Extract the cookbook subtree from the docs page tree, * rewriting URLs from /docs/cookbook/... to /cookbooks/... + * Returns a proper Root tree with an Overview entry for the landing page. */ -export function getCookbookTree(lang: string) { +export function getCookbookTree(lang: string): Root { const fullTree = source.pageTree[lang]; const cookbookNode = fullTree.children.find(isCookbookFolder); - if (!cookbookNode || cookbookNode.type !== 'folder') { - return { name: 'Cookbooks', children: [] as Node[] }; + if (!cookbookNode) { + throw new Error('Cookbook tree not found in docs source'); } + const overview = createOverviewPage(cookbookNode); + + const categoryNodes = cookbookNode.children.map( + (child) => rewriteNode(child as Record) as Node, + ); + return { + ...fullTree, name: 'Cookbooks', - children: rewriteUrls(cookbookNode.children), + children: [...(overview ? [overview] : []), ...categoryNodes], }; } - -function rewriteUrls(nodes: T[]): T[] { - return nodes.map((node) => { - const rewritten = { ...(node as Record) }; - - if (typeof rewritten.url === 'string') { - rewritten.url = rewriteCookbookUrl(rewritten.url); - } - - if (Array.isArray(rewritten.children)) { - rewritten.children = rewriteUrls(rewritten.children); - } - - if (rewritten.index && typeof rewritten.index === 'object') { - const index = { ...(rewritten.index as Record) }; - if (typeof index.url === 'string') { - index.url = rewriteCookbookUrl(index.url); - } - rewritten.index = index; - } - - return rewritten as T; - }); -} diff --git a/docs/next.config.ts b/docs/next.config.ts index 24eb802ab1..037cf9ff32 100644 --- a/docs/next.config.ts +++ b/docs/next.config.ts @@ -18,6 +18,9 @@ const config: NextConfig = { }, async rewrites() { + const markdownAcceptHeader = + '(?=.*(?:text/plain|text/markdown))(?!.*text/html.*(?:text/plain|text/markdown)).*'; + return { beforeFiles: [ { @@ -31,10 +34,29 @@ const config: NextConfig = { { type: 'header', key: 'Accept', - // Have text/markdown or text/plain but before any text/html - // Note, that Claude Code currently requests text/plain - value: - '(?=.*(?:text/plain|text/markdown))(?!.*text/html.*(?:text/plain|text/markdown)).*', + value: markdownAcceptHeader, + }, + ], + }, + { + source: '/cookbooks', + destination: '/llms.mdx/cookbook', + has: [ + { + type: 'header', + key: 'Accept', + value: markdownAcceptHeader, + }, + ], + }, + { + source: '/cookbooks/:path*', + destination: '/llms.mdx/cookbook/:path*', + has: [ + { + type: 'header', + key: 'Accept', + value: markdownAcceptHeader, }, ], }, @@ -49,6 +71,16 @@ const config: NextConfig = { destination: '/docs/getting-started', permanent: true, }, + { + source: '/docs/cookbook', + destination: '/cookbooks', + permanent: true, + }, + { + source: '/docs/cookbook/:path*', + destination: '/cookbooks/:path*', + permanent: true, + }, { source: '/err/:slug', destination: '/docs/errors/:slug', From 19ae735d119ec9f08d1c625b8ffd0a8b5394eb65 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 05:01:34 -0600 Subject: [PATCH 07/17] docs: improve cookbook discovery The cookbook landing page needs to work for both exploratory users and users who already know the pattern they want. This keeps the guided decision tree while adding shared category metadata and a searchable browse mode so recipe discovery feels faster and more consistent with the rest of the docs experience. Ploop-Iter: 6 --- .../geistdocs/cookbook-explorer.tsx | 536 +++++++++++++----- docs/content/docs/cookbook/index.mdx | 2 + docs/lib/cookbook-tree.ts | 32 ++ 3 files changed, 441 insertions(+), 129 deletions(-) diff --git a/docs/components/geistdocs/cookbook-explorer.tsx b/docs/components/geistdocs/cookbook-explorer.tsx index c894f5aa35..ee432b71b0 100644 --- a/docs/components/geistdocs/cookbook-explorer.tsx +++ b/docs/components/geistdocs/cookbook-explorer.tsx @@ -1,19 +1,204 @@ 'use client'; -import { useCallback, useMemo, useState } from 'react'; import Link from 'next/link'; +import { useRouter } from 'next/navigation'; +import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { tree, recipes, slugToCategory, + categoryLabels, + categoryOrder, type Branch, + type Recipe, + type RecipeCategory, type TreeNode, } from '@/lib/cookbook-tree'; type PathEntry = { nodeId: string; branchIndex: number }; +type ExplorerMode = 'guided' | 'browse'; +type CategoryFilter = RecipeCategory | 'all'; + +function getRecipeCategory(recipe: Recipe): RecipeCategory { + return slugToCategory[recipe.slug] as RecipeCategory; +} + +function getRecipeHref(lang: string, recipe: Recipe) { + return `/${lang}/cookbooks/${getRecipeCategory(recipe)}/${recipe.slug}`; +} + +function RecipeCard({ + lang, + recipe, + highlighted = false, +}: { + lang: string; + recipe: Recipe; + highlighted?: boolean; +}) { + const category = getRecipeCategory(recipe); + return ( + +

+ + {categoryLabels[category]} + + {recipe.slug} +
+

+ {recipe.whenToUse} +

+
+
+

+ {recipe.title} +

+

+ {recipe.description} +

+
+ + + +
+ + ); +} export function CookbookExplorer({ lang }: { lang: string }) { + const router = useRouter(); + const searchInputRef = useRef(null); + + const [mode, setMode] = useState('guided'); const [path, setPath] = useState([]); + const [query, setQuery] = useState(''); + const [selectedCategory, setSelectedCategory] = + useState('all'); + const [activeIndex, setActiveIndex] = useState(0); + + const allRecipes = useMemo( + () => + Object.values(recipes).sort((a, b) => { + const categoryCompare = + categoryOrder.indexOf(getRecipeCategory(a)) - + categoryOrder.indexOf(getRecipeCategory(b)); + if (categoryCompare !== 0) return categoryCompare; + return a.title.localeCompare(b.title); + }), + [] + ); + + const recipeCount = allRecipes.length; + + const countsByCategory = useMemo(() => { + return categoryOrder.reduce( + (acc, category) => { + acc[category] = allRecipes.filter( + (recipe) => getRecipeCategory(recipe) === category + ).length; + return acc; + }, + {} as Record + ); + }, [allRecipes]); + + const filteredRecipes = useMemo(() => { + const q = query.trim().toLowerCase(); + return allRecipes.filter((recipe) => { + const category = getRecipeCategory(recipe); + if (selectedCategory !== 'all' && category !== selectedCategory) { + return false; + } + if (!q) return true; + const haystack = [ + recipe.title, + recipe.slug, + recipe.description, + recipe.whenToUse, + categoryLabels[category], + ] + .join(' ') + .toLowerCase(); + return haystack.includes(q); + }); + }, [allRecipes, query, selectedCategory]); + + useEffect(() => { + setActiveIndex(0); + }, [query, selectedCategory]); + + const openRecipe = useCallback( + (recipe: Recipe | undefined) => { + if (!recipe) return; + router.push(getRecipeHref(lang, recipe)); + }, + [lang, router] + ); + + useEffect(() => { + const onKeyDown = (event: KeyboardEvent) => { + const target = event.target as HTMLElement | null; + const isEditable = + !!target && + (target.tagName === 'INPUT' || + target.tagName === 'TEXTAREA' || + target.isContentEditable); + + if ( + ((event.metaKey || event.ctrlKey) && + event.key.toLowerCase() === 'k') || + (event.key === '/' && !isEditable) + ) { + event.preventDefault(); + setMode('browse'); + requestAnimationFrame(() => searchInputRef.current?.focus()); + return; + } + + if (mode !== 'browse' || filteredRecipes.length === 0) return; + + if (event.key === 'ArrowDown') { + event.preventDefault(); + setActiveIndex((index) => (index + 1) % filteredRecipes.length); + } + if (event.key === 'ArrowUp') { + event.preventDefault(); + setActiveIndex( + (index) => + (index - 1 + filteredRecipes.length) % filteredRecipes.length + ); + } + if ( + event.key === 'Enter' && + document.activeElement === searchInputRef.current + ) { + event.preventDefault(); + openRecipe(filteredRecipes[activeIndex]); + } + }; + + window.addEventListener('keydown', onKeyDown); + return () => window.removeEventListener('keydown', onKeyDown); + }, [mode, filteredRecipes, activeIndex, openRecipe]); + + // --- Guided mode state --- const { currentNode, resultSlugs } = useMemo(() => { let node: TreeNode | undefined = tree; @@ -59,151 +244,244 @@ export function CookbookExplorer({ lang }: { lang: string }) { return crumbs; }, [path]); - const resultRecipes = useMemo(() => { - if (!resultSlugs) return []; - return resultSlugs.map((s) => recipes[s]).filter((r) => r != null); - }, [resultSlugs]); - - const recipeCount = Object.keys(recipes).length; + const resultRecipes = useMemo( + () => + (resultSlugs ?? []) + .map((slug) => recipes[slug]) + .filter((recipe): recipe is Recipe => recipe != null), + [resultSlugs] + ); return (
- {/* Breadcrumb path */} - {breadcrumbs.length > 0 && ( -
- - {breadcrumbs.map((crumb, i) => ( -
- + {/* Mode switcher */} +
+ + + + Press{' '} + /{' '} + or{' '} + + ⌘K + + +
+ + {/* Browse mode */} + {mode === 'browse' ? ( +
+
+
+ + + + + setQuery(event.target.value)} + placeholder="Search recipes, use cases, or slugs" + autoComplete="off" + className="w-full bg-transparent text-sm outline-none placeholder:text-muted-foreground" + /> +
+
+ {categoryOrder.map((category) => ( + + ))}
- ))} -
- )} +
- {/* Current question with branches */} - {currentNode && !resultSlugs && ( -
-

- {currentNode.question} -

- {currentNode.id === 'root' && ( -

- Answer a few questions to find the right pattern from{' '} - {recipeCount} recipes. Each result includes a code example you can - copy. -

+

+ {filteredRecipes.length} of {recipeCount} recipes. Use + ↑/↓ to move and Enter to open the highlighted result. +

+ + {filteredRecipes.length === 0 ? ( +
+ No recipes match {query}. Try + a broader term like retry,{' '} + approval, or{' '} + webhook. +
+ ) : ( +
+ {filteredRecipes.map((recipe, index) => ( + + ))} +
)} -
- {currentNode.branches.map((branch, i) => ( +
+ ) : ( + /* Guided mode */ +
+ {breadcrumbs.length > 0 && ( +
- ))} -
-
- )} - - {/* Results */} - {resultSlugs && ( -
-

- Here's what fits -

-

- {resultRecipes.length} recipe - {resultRecipes.length !== 1 ? 's' : ''} match your path. -

-
- {resultRecipes.map((recipe) => { - const category = slugToCategory[recipe.slug]; - return ( - ( +
-

- {recipe.whenToUse} -

-
-
-

- {recipe.title} -

-

- {recipe.description} -

+ + +
+ ))} +
+ )} + + {currentNode && !resultSlugs && ( +
+

+ {currentNode.question} +

+ {currentNode.id === 'root' && ( +

+ Answer a few questions to find the right pattern from{' '} + {recipeCount} recipes, or switch to{' '} + Browse all if you already know roughly what + you want. +

+ )} +
+ {currentNode.branches.map((branch, index) => ( +
- - ); - })} -
+ + ))} +
+
+ )} -
- -
+ {resultSlugs && ( +
+

+ Here's what fits +

+

+ {resultRecipes.length} recipe + {resultRecipes.length !== 1 ? 's' : ''} match your path. +

+
+ {resultRecipes.map((recipe) => ( + + ))} +
+
+ +
+
+ )}
)}
diff --git a/docs/content/docs/cookbook/index.mdx b/docs/content/docs/cookbook/index.mdx index b0b70d20cd..13c003a65d 100644 --- a/docs/content/docs/cookbook/index.mdx +++ b/docs/content/docs/cookbook/index.mdx @@ -4,4 +4,6 @@ description: Find the right workflow pattern for your use case. Browse 50 recipe type: overview --- +Cookbooks are the fastest way to jump from "I need this behavior" to a clean workflow shape you can copy. Use **Guide me** if you are still exploring, or press `/` to search every recipe directly. + diff --git a/docs/lib/cookbook-tree.ts b/docs/lib/cookbook-tree.ts index 7644f9ad96..2650aa5e26 100644 --- a/docs/lib/cookbook-tree.ts +++ b/docs/lib/cookbook-tree.ts @@ -19,6 +19,38 @@ export type Recipe = { category: string; }; +export type RecipeCategory = + | 'payments' + | 'approvals' + | 'resilience' + | 'notifications' + | 'webhooks' + | 'data-processing' + | 'routing' + | 'observability'; + +export const categoryOrder: RecipeCategory[] = [ + 'payments', + 'approvals', + 'resilience', + 'notifications', + 'webhooks', + 'data-processing', + 'routing', + 'observability', +]; + +export const categoryLabels: Record = { + payments: 'Payments & Orders', + approvals: 'Approvals', + resilience: 'Resilience', + notifications: 'Notifications', + webhooks: 'Webhooks & Callbacks', + 'data-processing': 'Data Processing', + routing: 'Routing', + observability: 'Observability', +}; + /** Map from slug → category folder for URL construction */ export const slugToCategory: Record = { saga: 'payments', From 11ce0b442de8e1c09b396b8d754dce81b976860f Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 05:43:36 -0600 Subject: [PATCH 08/17] docs: refine cookbook pattern examples Tighten the simplified cookbook recipes so the examples teach the intended workflow semantics clearly and consistently. The changes keep the documentation focused on the core control-flow patterns reviewers called out, while removing ambiguity around partial arrivals, deadlines, and first-success behavior. Ploop-Iter: 7 --- .../cookbook/data-processing/aggregator.mdx | 51 ++++++++++++------- .../notifications/scheduled-digest.mdx | 43 ++++++---------- .../notifications/wakeable-reminder.mdx | 12 ++--- .../observability/correlation-identifier.mdx | 4 +- .../cookbook/resilience/hedge-request.mdx | 15 +++--- .../docs/cookbook/webhooks/event-gateway.mdx | 46 +++++++++++------ .../docs/cookbook/webhooks/request-reply.mdx | 4 +- 7 files changed, 95 insertions(+), 80 deletions(-) diff --git a/docs/content/docs/cookbook/data-processing/aggregator.mdx b/docs/content/docs/cookbook/data-processing/aggregator.mdx index 5c39d6bf7a..2a1d4629c6 100644 --- a/docs/content/docs/cookbook/data-processing/aggregator.mdx +++ b/docs/content/docs/cookbook/data-processing/aggregator.mdx @@ -5,44 +5,59 @@ type: guide summary: Collect inventory from multiple warehouses with a timeout so stragglers don't block checkout. --- -Use aggregator when you need to gather signals from multiple sources and combine them, but cannot wait forever for all of them. +Use aggregator when you need to gather signals from multiple sources and combine whatever arrived before a deadline. ## Pattern -Create one hook per expected source, then race `Promise.all` (all responded) against `sleep` (deadline). Aggregate whatever data arrived into a single result. +Create one hook per expected source, keep those promises in a pending map, and repeatedly race the next arrival against a durable timeout. That preserves partial results instead of throwing them away when the deadline wins. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; -export const aggregatorSignal = defineHook<{ source: string; value: number }>(); +declare function summarizeAvailability( + batchId: string, + received: Record, +): Promise<{ totalAvailable: number }>; // @setup -declare function processBatch(batchId: string, received: { source: string; value: number }[]): Promise<{ totalValue: number }>; // @setup +export const inventorySignal = defineHook<{ available: number }>(); const SOURCES = ["warehouse-a", "warehouse-b", "warehouse-c"] as const; -export async function aggregator(batchId: string, timeoutMs: number = 8000) { +export async function aggregator(batchId: string, timeoutMs: number = 8_000) { "use workflow"; - const hooks = SOURCES.map((source) => - aggregatorSignal - .create({ token: `${source}:${batchId}` }) - .then((payload) => ({ source, payload })) + const pending = new Map( + SOURCES.map((source) => [ + source, + inventorySignal + .create({ token: `${source}:${batchId}` }) + .then(({ available }) => ({ type: "signal" as const, source, available })), + ]), ); - const outcome = await Promise.race([ - Promise.all(hooks).then((results) => ({ type: "ready" as const, results })), - sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const, results: [] as { source: string; payload: { source: string; value: number } }[] })), - ]); + const received: Record = {}; + const deadline = sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const })); - const received = outcome.results.map(({ payload }) => payload); - const summary = await processBatch(batchId, received); + while (pending.size > 0) { + const outcome = await Promise.race([...pending.values(), deadline]); + if (outcome.type === "timeout") break; + received[outcome.source] = outcome.available; + pending.delete(outcome.source); + } - return { batchId, status: outcome.type === "ready" ? "aggregated" : "partial", summary }; + const summary = await summarizeAvailability(batchId, received); + + return { + batchId, + received: Object.keys(received), + missing: [...pending.keys()], + summary, + }; } ``` ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled -- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates one durable signal per expected source +- [`sleep()`](/docs/api-reference/workflow/sleep) — sets the deadline without burning compute diff --git a/docs/content/docs/cookbook/notifications/scheduled-digest.mdx b/docs/content/docs/cookbook/notifications/scheduled-digest.mdx index f7379462d6..be9daa5b23 100644 --- a/docs/content/docs/cookbook/notifications/scheduled-digest.mdx +++ b/docs/content/docs/cookbook/notifications/scheduled-digest.mdx @@ -5,54 +5,41 @@ type: guide summary: Open a 1-hour collection window for events, then email a digest when the window closes. --- -Open a time window for events, then email a digest when the window closes. The hook can receive multiple events during the window because it is awaited in a loop. +Use scheduled digest when many small events should be batched into one summary instead of triggering one notification per event. ## Pattern -Race incoming events from a `defineHook` against a `sleep()` window. Events accumulate in an array until the window closes, then send a digest if any were collected. +Let external systems write events under a shared digest id. The workflow just sleeps until the window closes, loads what accumulated, and sends one email if there is anything to report. ```typescript lineNumbers -import { sleep, defineHook } from "workflow"; +import { sleep } from "workflow"; -type EventPayload = { type: string; message: string }; +type DigestEvent = { type: string; message: string }; -declare function sendDigestEmail(userId: string, events: EventPayload[]): Promise; // @setup +declare function loadDigestEvents(digestId: string): Promise; // @setup +declare function sendDigestEmail(userId: string, events: DigestEvent[]): Promise; // @setup -export const digestEvent = defineHook(); - -export async function collectAndSendDigest( +export async function scheduledDigest( digestId: string, userId: string, - windowMs: number = 3_600_000 + window: string = "1h", ) { "use workflow"; - const hook = digestEvent.create({ token: `digest:${digestId}` }); - const windowClosed = sleep(`${windowMs}ms`).then(() => ({ - kind: "window_closed" as const, - })); - const events: EventPayload[] = []; - - while (true) { - const outcome = await Promise.race([ - hook.then((payload) => ({ kind: "event" as const, payload })), - windowClosed, - ]); + await sleep(window); - if (outcome.kind === "window_closed") break; - events.push(outcome.payload); - } + const events = await loadDigestEvents(digestId); - if (events.length > 0) { - await sendDigestEmail(userId, events); + if (events.length === 0) { + return { digestId, status: "empty" }; } - return { digestId, status: events.length > 0 ? "sent" : "empty", eventCount: events.length }; + await sendDigestEmail(userId, events); + return { digestId, status: "sent", eventCount: events.length }; } ``` ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer for the collection window -- [`defineHook()`](/docs/api-reference/workflow/define-hook) — receives events from external systems during the window +- [`sleep()`](/docs/api-reference/workflow/sleep) — closes the digest window without using compute while waiting diff --git a/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx b/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx index 714157e354..560333eb55 100644 --- a/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx +++ b/docs/content/docs/cookbook/notifications/wakeable-reminder.mdx @@ -23,15 +23,14 @@ declare function sendReminderEmail(userId: string): Promise; // @setup export const reminderActionHook = defineHook(); -export async function scheduleReminder(userId: string, delayMs: number) { +export async function scheduleReminder(userId: string, sendAt: Date) { "use workflow"; - let sendAt = new Date(Date.now() + delayMs); - const action = reminderActionHook.create({ token: `reminder:${userId}` }); - const outcome = await Promise.race([ sleep(sendAt).then(() => ({ kind: "time" as const })), - action.then((payload) => ({ kind: "action" as const, payload })), + reminderActionHook + .create({ token: `reminder:${userId}` }) + .then((payload) => ({ kind: "action" as const, payload })), ]); if (outcome.kind === "action") { @@ -39,8 +38,7 @@ export async function scheduleReminder(userId: string, delayMs: number) { return { userId, status: "cancelled" }; } if (outcome.payload.type === "snooze") { - sendAt = new Date(Date.now() + outcome.payload.seconds * 1000); - await sleep(sendAt); + await sleep(`${outcome.payload.seconds}s`); } } diff --git a/docs/content/docs/cookbook/observability/correlation-identifier.mdx b/docs/content/docs/cookbook/observability/correlation-identifier.mdx index 819a32b2d5..ff47873868 100644 --- a/docs/content/docs/cookbook/observability/correlation-identifier.mdx +++ b/docs/content/docs/cookbook/observability/correlation-identifier.mdx @@ -12,8 +12,6 @@ Use a correlation identifier when your workflow sends requests to external servi The workflow generates a unique correlation ID, attaches it to the outbound request, then durably waits for the async response. When the response arrives, the correlation ID verifies it matches the original request. ```typescript lineNumbers -import { sleep } from "workflow"; - declare function generateCorrelationId(requestId: string): Promise; // @setup declare function sendRequest(requestId: string, correlationId: string, service: string, payload: string): Promise; // @setup declare function awaitResponse(requestId: string, correlationId: string): Promise; // @setup @@ -42,4 +40,4 @@ export async function correlationIdentifierFlow( ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`sleep()`](/docs/api-reference/workflow/sleep) — durably waits for the async response +- [`"use step"`](/docs/api-reference/workflow/use-step) — correlation ID generation, outbound delivery, and reply matching live in durable helper steps diff --git a/docs/content/docs/cookbook/resilience/hedge-request.mdx b/docs/content/docs/cookbook/resilience/hedge-request.mdx index aeea31a0ae..17d2258de5 100644 --- a/docs/content/docs/cookbook/resilience/hedge-request.mdx +++ b/docs/content/docs/cookbook/resilience/hedge-request.mdx @@ -5,20 +5,23 @@ type: guide summary: Fire the same search query to two replicas and use whichever responds first. --- -Use hedge request when tail latency matters more than redundant work — fire the same request at multiple providers and take the first response. +Use hedge request when tail latency matters more than duplicate work. ## Pattern -Launch the same step against multiple providers using `Promise.race()`. The first to respond wins. Because steps run in parallel, this cuts tail latency at the cost of extra calls. +Send the same request to multiple providers and use `Promise.any()` so the first successful response wins. Failures are ignored unless every provider fails. ```typescript lineNumbers -declare function queryProvider(name: string, query: string): Promise<{ provider: string; data: unknown }>; // @setup +declare function queryProvider( + name: string, + query: string, +): Promise<{ provider: string; data: unknown }>; // @setup export async function hedgeRequest(query: string, providers: string[]) { "use workflow"; - const result = await Promise.race( - providers.map((name) => queryProvider(name, query)) + const result = await Promise.any( + providers.map((name) => queryProvider(name, query)), ); return { winner: result.provider, data: result.data }; @@ -28,4 +31,4 @@ export async function hedgeRequest(query: string, providers: string[]) { ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`Promise.race()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race) — returns the first provider to respond +- [`Promise.any()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/any) — returns the first successful response diff --git a/docs/content/docs/cookbook/webhooks/event-gateway.mdx b/docs/content/docs/cookbook/webhooks/event-gateway.mdx index 0dadd3a740..aa766f1456 100644 --- a/docs/content/docs/cookbook/webhooks/event-gateway.mdx +++ b/docs/content/docs/cookbook/webhooks/event-gateway.mdx @@ -5,44 +5,60 @@ type: guide summary: Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. --- -Wait for payment, inventory, and fraud-check signals to all arrive before shipping an order. Use this when a workflow must collect multiple independent signals before proceeding. +Use event gateway when many external systems emit different event shapes, but the workflow only wants a small set of normalized internal signals. ## Pattern -Create one hook per expected signal, then race `Promise.all` (all signals arrived) against `sleep` (deadline expired). If all signals arrive in time, continue. Otherwise, report which signals are missing. +Edge handlers normalize each external event before signaling the workflow. The workflow keeps one pending hook per required signal, records what arrived, and returns missing signals if the deadline expires. ```typescript lineNumbers import { defineHook, sleep } from "workflow"; -export const orderSignal = defineHook<{ ok: true }>(); +type Signal = "payment" | "inventory" | "fraud"; declare function shipOrder(orderId: string): Promise; // @setup -const SIGNALS = ["payment", "inventory", "fraud"] as const; +export const orderSignal = defineHook<{ ok: boolean }>(); + +const SIGNALS: Signal[] = ["payment", "inventory", "fraud"]; export async function eventGateway(orderId: string, timeoutMs: number = 10_000) { "use workflow"; - const hooks = SIGNALS.map((kind) => - orderSignal.create({ token: `${kind}:${orderId}` }) + const pending = new Map( + SIGNALS.map((signal) => [ + signal, + orderSignal + .create({ token: `${signal}:${orderId}` }) + .then(({ ok }) => ({ type: "signal" as const, signal, ok })), + ]), ); - const outcome = await Promise.race([ - Promise.all(hooks).then(() => ({ type: "ready" as const })), - sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const })), - ]); + const received: Signal[] = []; + const deadline = sleep(`${timeoutMs}ms`).then(() => ({ type: "timeout" as const })); + + while (pending.size > 0) { + const outcome = await Promise.race([...pending.values(), deadline]); + + if (outcome.type === "timeout") { + return { orderId, status: "timeout", received, missing: [...pending.keys()] }; + } + + if (!outcome.ok) { + return { orderId, status: "blocked", failed: outcome.signal }; + } - if (outcome.type === "timeout") { - return { orderId, status: "timeout" }; + received.push(outcome.signal); + pending.delete(outcome.signal); } await shipOrder(orderId); - return { orderId, status: "shipped" }; + return { orderId, status: "shipped", received }; } ``` ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`defineHook()`](/docs/api-reference/workflow/define-hook) — creates a named hook that suspends until signaled -- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`defineHook()`](/docs/api-reference/workflow/define-hook) — receives normalized signals from the edge +- [`sleep()`](/docs/api-reference/workflow/sleep) — provides a durable deadline for convergence diff --git a/docs/content/docs/cookbook/webhooks/request-reply.mdx b/docs/content/docs/cookbook/webhooks/request-reply.mdx index 2dea261f36..c4ce7e965b 100644 --- a/docs/content/docs/cookbook/webhooks/request-reply.mdx +++ b/docs/content/docs/cookbook/webhooks/request-reply.mdx @@ -12,8 +12,6 @@ Send a request to a service, wait for a correlated reply with a deadline, and re For each downstream service, send the request and wait up to a deadline for the reply. If the deadline expires, retry up to a configurable maximum. ```typescript lineNumbers -import { sleep } from "workflow"; - declare function sendRequest( requestId: string, service: string, payload: string, timeoutMs: number, maxAttempts: number ): Promise<{ service: string; response: string | null; success: boolean }>; // @setup @@ -40,4 +38,4 @@ export async function requestReplyFlow( ## Key APIs - [`"use workflow"`](/docs/api-reference/workflow/use-workflow) — declares the orchestrator function -- [`sleep()`](/docs/api-reference/workflow/sleep) — durable timer that survives restarts +- [`"use step"`](/docs/api-reference/workflow/use-step) — the helper step owns retries, deadlines, and downstream delivery From dda6cbcb20a39516771e3273a4d16938183d1da4 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 06:24:26 -0600 Subject: [PATCH 09/17] docs: decouple cookbook sidebar tree Separate cookbook navigation from the docs page tree so the standalone /cookbooks experience stays stable after the route move and the main docs sidebar no longer leaks cookbook entries.\n\nThis keeps cookbook navigation driven by explicit recipe metadata, which avoids duplicated section titles and makes the docs and cookbook surfaces easier to evolve independently.\n\nPloop-Iter: 8 --- docs/components/geistdocs/sidebar.tsx | 23 ++++++- docs/content/docs/meta.json | 3 +- docs/lib/geistdocs/cookbook-source.ts | 99 +++++++++++---------------- 3 files changed, 62 insertions(+), 63 deletions(-) diff --git a/docs/components/geistdocs/sidebar.tsx b/docs/components/geistdocs/sidebar.tsx index 8726f845bc..c3f6d2c4fa 100644 --- a/docs/components/geistdocs/sidebar.tsx +++ b/docs/components/geistdocs/sidebar.tsx @@ -12,7 +12,7 @@ import { import type { SidebarPageTreeComponents } from 'fumadocs-ui/components/sidebar/page-tree'; import { useTreeContext, useTreePath } from 'fumadocs-ui/contexts/tree'; import { usePathname } from 'next/navigation'; -import { Fragment, useEffect, useRef } from 'react'; +import { Fragment, useEffect, useMemo, useRef } from 'react'; import { Sheet, SheetContent, @@ -23,6 +23,16 @@ import { import { useSidebarContext } from '@/hooks/geistdocs/use-sidebar'; import { SearchButton } from './search'; +function isCookbookNode(item: Node) { + if (item.type === 'page') { + return item.url.startsWith('/docs/cookbook'); + } + if (item.type === 'folder') { + return item.index?.url?.startsWith('/docs/cookbook') ?? false; + } + return false; +} + export const Sidebar = () => { const { root } = useTreeContext(); const { isOpen, setIsOpen } = useSidebarContext(); @@ -36,6 +46,13 @@ export const Sidebar = () => { } }, [pathname, setIsOpen]); + const sidebarItems = useMemo(() => { + if (pathname.includes('/cookbooks')) { + return root.children; + } + return root.children.filter((item) => !isCookbookNode(item)); + }, [pathname, root.children]); + const renderSidebarList = (items: Node[]) => items.map((item) => { if (item.type === 'separator') { @@ -60,7 +77,7 @@ export const Sidebar = () => { data-sidebar-placeholder >
- {renderSidebarList(root.children)} + {renderSidebarList(sidebarItems)}
@@ -72,7 +89,7 @@ export const Sidebar = () => { setIsOpen(false)} />
- {renderSidebarList(root.children)} + {renderSidebarList(sidebarItems)}
diff --git a/docs/content/docs/meta.json b/docs/content/docs/meta.json index c6e338712d..eda30fd046 100644 --- a/docs/content/docs/meta.json +++ b/docs/content/docs/meta.json @@ -10,7 +10,6 @@ "testing", "deploying", "errors", - "api-reference", - "cookbook" + "api-reference" ] } diff --git a/docs/lib/geistdocs/cookbook-source.ts b/docs/lib/geistdocs/cookbook-source.ts index 2560841236..b6547ee3d1 100644 --- a/docs/lib/geistdocs/cookbook-source.ts +++ b/docs/lib/geistdocs/cookbook-source.ts @@ -1,4 +1,10 @@ import type { Node, Root } from 'fumadocs-core/page-tree'; +import { + categoryLabels, + categoryOrder, + recipes, + type RecipeCategory, +} from '../cookbook-tree'; import { source } from './source'; const COOKBOOK_DOCS_PREFIX_RE = /\/docs\/cookbook(?=\/|$)/g; @@ -14,78 +20,55 @@ export function rewriteCookbookUrlsInText(text: string): string { return text.replace(COOKBOOK_DOCS_PREFIX_RE, '/cookbooks'); } -function isCookbookFolder(node: Node): node is FolderNode { - if (node.type !== 'folder') return false; - - if (node.index?.url?.startsWith('/docs/cookbook')) return true; - - // Fallback: check if children contain cookbook pages - return node.children.some((child) => { - if (child.type === 'page') return child.url.startsWith('/docs/cookbook'); - if (child.type === 'folder') - return child.index?.url?.startsWith('/docs/cookbook') ?? false; - return false; - }); -} - -function rewriteNode>(node: T): T { - const rewritten = { ...node }; - - if (typeof rewritten.url === 'string') { - rewritten.url = rewriteCookbookUrl(rewritten.url); - } - - if (rewritten.index && typeof rewritten.index === 'object') { - rewritten.index = rewriteNode( - rewritten.index as Record, - ); - } - - if (Array.isArray(rewritten.children)) { - rewritten.children = rewritten.children.map((child) => - rewriteNode(child as Record), - ); - } - - return rewritten as T; +function createOverviewPage(): PageNode { + return { + type: 'page', + $id: 'cookbook__overview', + name: 'Overview', + url: '/cookbooks', + } as PageNode; } -function createOverviewPage(cookbookNode: FolderNode): PageNode | null { - if (!cookbookNode.index) { - return null; - } - +function createRecipePage( + category: RecipeCategory, + slug: string, +): PageNode { + const recipe = recipes[slug]; return { type: 'page', - $id: `${cookbookNode.$id}__overview`, - name: 'Overview', - url: rewriteCookbookUrl(cookbookNode.index.url), + $id: `cookbook__${slug}`, + name: recipe.title, + url: `/cookbooks/${category}/${slug}`, } as PageNode; } +function createCategoryFolder(category: RecipeCategory): FolderNode { + const categoryRecipes = Object.values(recipes).filter( + (recipe) => recipe.category === category, + ); + return { + type: 'folder', + $id: `cookbook__${category}`, + name: categoryLabels[category], + children: categoryRecipes.map((recipe) => + createRecipePage(category as RecipeCategory, recipe.slug), + ), + } as FolderNode; +} + /** - * Extract the cookbook subtree from the docs page tree, - * rewriting URLs from /docs/cookbook/... to /cookbooks/... - * Returns a proper Root tree with an Overview entry for the landing page. + * Build a standalone cookbook sidebar tree from cookbook-tree metadata. + * No longer depends on locating a cookbook node inside the docs page tree. */ export function getCookbookTree(lang: string): Root { const fullTree = source.pageTree[lang]; - const cookbookNode = fullTree.children.find(isCookbookFolder); - - if (!cookbookNode) { - throw new Error('Cookbook tree not found in docs source'); - } - - const overview = createOverviewPage(cookbookNode); - - const categoryNodes = cookbookNode.children.map( - (child) => rewriteNode(child as Record) as Node, - ); - return { ...fullTree, name: 'Cookbooks', - children: [...(overview ? [overview] : []), ...categoryNodes], + children: [ + createOverviewPage(), + ...categoryOrder.map((category) => createCategoryFolder(category)), + ], }; } From 2082142acc4127428c0416c0cfdc166f09140b70 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 07:02:17 -0600 Subject: [PATCH 10/17] fix(docs): align cookbook public nav Keep cookbook pages on their public /cookbooks surface so metadata and copied markdown do not leak legacy /docs/cookbook paths.\n\nSimplify sidebar rendering to trust the injected page tree, which avoids route-specific filtering and keeps cookbook navigation consistent with the active layout tree.\n\nPloop-Iter: 9 --- .../app/[lang]/cookbooks/[[...slug]]/page.tsx | 11 +++++---- docs/components/geistdocs/sidebar.tsx | 23 +++---------------- docs/content/docs/cookbook/index.mdx | 2 +- 3 files changed, 11 insertions(+), 25 deletions(-) diff --git a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx index 909df493de..a2c9d3222f 100644 --- a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx +++ b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx @@ -4,7 +4,10 @@ import { createRelativeLink } from 'fumadocs-ui/mdx'; import type { Metadata } from 'next'; import { notFound } from 'next/navigation'; import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; -import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; +import { + rewriteCookbookUrl, + rewriteCookbookUrlsInText, +} from '@/lib/geistdocs/cookbook-source'; import { AskAI } from '@/components/geistdocs/ask-ai'; import { CopyPage } from '@/components/geistdocs/copy-page'; import { @@ -36,7 +39,7 @@ const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { const publicUrl = rewriteCookbookUrl(page.url); const publicPage = { ...page, url: publicUrl } as typeof page; - const markdown = await getLLMText(page); + const markdown = rewriteCookbookUrlsInText(await getLLMText(page)); const MDX = page.data.body; return ( @@ -99,7 +102,7 @@ export const generateMetadata = async ({ notFound(); } - const publicPath = `/cookbooks${slug ? `/${slug.join('/')}` : ''}`; + const publicPath = rewriteCookbookUrl(page.url); const metadata: Metadata = { title: page.data.title, @@ -110,7 +113,7 @@ export const generateMetadata = async ({ alternates: { canonical: publicPath, types: { - 'text/markdown': publicPath, + 'text/markdown': `${publicPath}.md`, }, }, }; diff --git a/docs/components/geistdocs/sidebar.tsx b/docs/components/geistdocs/sidebar.tsx index c3f6d2c4fa..8726f845bc 100644 --- a/docs/components/geistdocs/sidebar.tsx +++ b/docs/components/geistdocs/sidebar.tsx @@ -12,7 +12,7 @@ import { import type { SidebarPageTreeComponents } from 'fumadocs-ui/components/sidebar/page-tree'; import { useTreeContext, useTreePath } from 'fumadocs-ui/contexts/tree'; import { usePathname } from 'next/navigation'; -import { Fragment, useEffect, useMemo, useRef } from 'react'; +import { Fragment, useEffect, useRef } from 'react'; import { Sheet, SheetContent, @@ -23,16 +23,6 @@ import { import { useSidebarContext } from '@/hooks/geistdocs/use-sidebar'; import { SearchButton } from './search'; -function isCookbookNode(item: Node) { - if (item.type === 'page') { - return item.url.startsWith('/docs/cookbook'); - } - if (item.type === 'folder') { - return item.index?.url?.startsWith('/docs/cookbook') ?? false; - } - return false; -} - export const Sidebar = () => { const { root } = useTreeContext(); const { isOpen, setIsOpen } = useSidebarContext(); @@ -46,13 +36,6 @@ export const Sidebar = () => { } }, [pathname, setIsOpen]); - const sidebarItems = useMemo(() => { - if (pathname.includes('/cookbooks')) { - return root.children; - } - return root.children.filter((item) => !isCookbookNode(item)); - }, [pathname, root.children]); - const renderSidebarList = (items: Node[]) => items.map((item) => { if (item.type === 'separator') { @@ -77,7 +60,7 @@ export const Sidebar = () => { data-sidebar-placeholder >
- {renderSidebarList(sidebarItems)} + {renderSidebarList(root.children)}
@@ -89,7 +72,7 @@ export const Sidebar = () => { setIsOpen(false)} />
- {renderSidebarList(sidebarItems)} + {renderSidebarList(root.children)}
diff --git a/docs/content/docs/cookbook/index.mdx b/docs/content/docs/cookbook/index.mdx index 13c003a65d..8dc3b7490b 100644 --- a/docs/content/docs/cookbook/index.mdx +++ b/docs/content/docs/cookbook/index.mdx @@ -1,5 +1,5 @@ --- -title: Cookbook +title: Cookbooks description: Find the right workflow pattern for your use case. Browse 50 recipes with code examples. type: overview --- From eb3245f677dda03df7918c8cc33f85ff720bbed0 Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 07:39:23 -0600 Subject: [PATCH 11/17] refactor(docs): decouple cookbook routing Move cookbook rendering off the shared docs route so cookbook pages can behave like a first-class docs surface without leaking cookbook-specific UI into the main docs experience. Centralizing cookbook tree filtering keeps sidebar behavior consistent in one place and avoids duplicate cookbook navigation state across layouts. Ploop-Iter: 10 --- .../app/[lang]/cookbooks/[[...slug]]/page.tsx | 22 ++++++++++++++++--- docs/app/[lang]/docs/[[...slug]]/page.tsx | 2 -- docs/app/[lang]/docs/layout.tsx | 15 ++----------- docs/lib/geistdocs/cookbook-source.ts | 19 ++++++++++++++++ 4 files changed, 40 insertions(+), 18 deletions(-) diff --git a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx index a2c9d3222f..6c160cac40 100644 --- a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx +++ b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx @@ -2,8 +2,9 @@ import { Step, Steps } from 'fumadocs-ui/components/steps'; import { Tab, Tabs } from 'fumadocs-ui/components/tabs'; import { createRelativeLink } from 'fumadocs-ui/mdx'; import type { Metadata } from 'next'; +import dynamic from 'next/dynamic'; import { notFound } from 'next/navigation'; -import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; +import type { ComponentProps } from 'react'; import { rewriteCookbookUrl, rewriteCookbookUrlsInText, @@ -25,6 +26,14 @@ import { Badge } from '@/components/ui/badge'; import { Separator } from '@/components/ui/separator'; import { getLLMText, getPageImage, source } from '@/lib/geistdocs/source'; +const LazyCookbookExplorer = dynamic( + () => + import('@/components/geistdocs/cookbook-explorer').then( + (mod) => mod.CookbookExplorer, + ), + { loading: () => null }, +); + const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { const { slug, lang } = await params; @@ -42,6 +51,13 @@ const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { const markdown = rewriteCookbookUrlsInText(await getLLMText(page)); const MDX = page.data.body; + const RelativeLink = createRelativeLink(source, publicPage); + const PublicCookbookLink = (props: ComponentProps) => { + const href = + typeof props.href === 'string' ? rewriteCookbookUrl(props.href) : props.href; + return ; + }; + return ( ) => { , + CookbookExplorer: () => , })} /> diff --git a/docs/app/[lang]/docs/[[...slug]]/page.tsx b/docs/app/[lang]/docs/[[...slug]]/page.tsx index 4f3fffd159..0aaaf5fb7f 100644 --- a/docs/app/[lang]/docs/[[...slug]]/page.tsx +++ b/docs/app/[lang]/docs/[[...slug]]/page.tsx @@ -5,7 +5,6 @@ import type { Metadata } from 'next'; import { notFound, permanentRedirect } from 'next/navigation'; import { rewriteCookbookUrl } from '@/lib/geistdocs/cookbook-source'; import { AgentTraces } from '@/components/custom/agent-traces'; -import { CookbookExplorer } from '@/components/geistdocs/cookbook-explorer'; import { FluidComputeCallout } from '@/components/custom/fluid-compute-callout'; import { AskAI } from '@/components/geistdocs/ask-ai'; import { CopyPage } from '@/components/geistdocs/copy-page'; @@ -84,7 +83,6 @@ const Page = async ({ params }: PageProps<'/[lang]/docs/[[...slug]]'>) => { ...AccordionComponents, Tabs, Tab, - CookbookExplorer: () => , // No-op for world MDX files (they redirect to /worlds/[id]) WorldTestingPerformance: WorldTestingPerformanceNoop, })} diff --git a/docs/app/[lang]/docs/layout.tsx b/docs/app/[lang]/docs/layout.tsx index 866624d514..b59f605872 100644 --- a/docs/app/[lang]/docs/layout.tsx +++ b/docs/app/[lang]/docs/layout.tsx @@ -1,22 +1,11 @@ -import type { Root } from 'fumadocs-core/page-tree'; import { DocsLayout } from '@/components/geistdocs/docs-layout'; -import { source } from '@/lib/geistdocs/source'; - -function withoutCookbook(tree: Root): Root { - return { - ...tree, - children: tree.children.filter((node) => { - if (node.type !== 'folder') return true; - return !node.index?.url?.startsWith('/docs/cookbook'); - }), - }; -} +import { getDocsTreeWithoutCookbook } from '@/lib/geistdocs/cookbook-source'; const Layout = async ({ children, params }: LayoutProps<'/[lang]/docs'>) => { const { lang } = await params; return ( - + {children} ); diff --git a/docs/lib/geistdocs/cookbook-source.ts b/docs/lib/geistdocs/cookbook-source.ts index b6547ee3d1..b19b75e318 100644 --- a/docs/lib/geistdocs/cookbook-source.ts +++ b/docs/lib/geistdocs/cookbook-source.ts @@ -20,6 +20,25 @@ export function rewriteCookbookUrlsInText(text: string): string { return text.replace(COOKBOOK_DOCS_PREFIX_RE, '/cookbooks'); } +function isCookbookFolder(node: Node): boolean { + return ( + node.type === 'folder' && + (node.index?.url?.startsWith('/docs/cookbook') ?? false) + ); +} + +/** + * Return the docs page tree with cookbook nodes removed. + * Used by the docs layout so the sidebar never shows cookbook entries. + */ +export function getDocsTreeWithoutCookbook(lang: string): Root { + const fullTree = source.pageTree[lang]; + return { + ...fullTree, + children: fullTree.children.filter((node) => !isCookbookFolder(node)), + }; +} + function createOverviewPage(): PageNode { return { type: 'page', From acc7132f8cb696c77a374466e53127730dd9898e Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 08:27:48 -0600 Subject: [PATCH 12/17] docs: improve cookbook explorer accessibility Improve the cookbooks entrypoint so loading and keyboard navigation are usable without visual cues, and keep guided and browse modes resilient while the route hydrates. Ploop-Iter: 11 --- .../app/[lang]/cookbooks/[[...slug]]/page.tsx | 12 +- .../geistdocs/cookbook-explorer.tsx | 471 ++++++++++-------- 2 files changed, 281 insertions(+), 202 deletions(-) diff --git a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx index 6c160cac40..2e7cfb9f5c 100644 --- a/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx +++ b/docs/app/[lang]/cookbooks/[[...slug]]/page.tsx @@ -31,7 +31,17 @@ const LazyCookbookExplorer = dynamic( import('@/components/geistdocs/cookbook-explorer').then( (mod) => mod.CookbookExplorer, ), - { loading: () => null }, + { + loading: () => ( +
+ Loading cookbook explorer… +
+ ), + }, ); const Page = async ({ params }: PageProps<'/[lang]/cookbooks/[[...slug]]'>) => { diff --git a/docs/components/geistdocs/cookbook-explorer.tsx b/docs/components/geistdocs/cookbook-explorer.tsx index ee432b71b0..6c237fe1fe 100644 --- a/docs/components/geistdocs/cookbook-explorer.tsx +++ b/docs/components/geistdocs/cookbook-explorer.tsx @@ -2,7 +2,14 @@ import Link from 'next/link'; import { useRouter } from 'next/navigation'; -import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { + useCallback, + useEffect, + useMemo, + useRef, + useState, + type KeyboardEvent as ReactKeyboardEvent, +} from 'react'; import { tree, recipes, @@ -19,6 +26,15 @@ type PathEntry = { nodeId: string; branchIndex: number }; type ExplorerMode = 'guided' | 'browse'; type CategoryFilter = RecipeCategory | 'all'; +type RecipeCardProps = { + lang: string; + recipe: Recipe; + highlighted?: boolean; + linkRef?: (node: HTMLAnchorElement | null) => void; + onFocus?: () => void; + onKeyDown?: (event: ReactKeyboardEvent) => void; +}; + function getRecipeCategory(recipe: Recipe): RecipeCategory { return slugToCategory[recipe.slug] as RecipeCategory; } @@ -31,20 +47,19 @@ function RecipeCard({ lang, recipe, highlighted = false, -}: { - lang: string; - recipe: Recipe; - highlighted?: boolean; -}) { + linkRef, + onFocus, + onKeyDown, +}: RecipeCardProps) { const category = getRecipeCategory(recipe); return (
@@ -65,6 +80,7 @@ function RecipeCard({

{/* Mode switcher */} -
+
{/* Browse mode */} - {mode === 'browse' ? ( -
-
-
- - - - - setQuery(event.target.value)} - placeholder="Search recipes, use cases, or slugs" - autoComplete="off" - className="w-full bg-transparent text-sm outline-none placeholder:text-muted-foreground" - /> -
-
+ + + {/* Guided mode */} +
); } From e8b037111c1fd000158c8ea6b4f4bea96db2a15e Mon Sep 17 00:00:00 2001 From: John Lindquist Date: Tue, 31 Mar 2026 09:15:51 -0600 Subject: [PATCH 13/17] ploop: iteration 12 checkpoint Automated checkpoint commit. Ploop-Iter: 12 --- .../geistdocs/cookbook-explorer.tsx | 234 +++++++++++++++--- docs/content/docs/cookbook/index.mdx | 2 +- 2 files changed, 202 insertions(+), 34 deletions(-) diff --git a/docs/components/geistdocs/cookbook-explorer.tsx b/docs/components/geistdocs/cookbook-explorer.tsx index 6c237fe1fe..de4bc5f4aa 100644 --- a/docs/components/geistdocs/cookbook-explorer.tsx +++ b/docs/components/geistdocs/cookbook-explorer.tsx @@ -1,7 +1,7 @@ 'use client'; import Link from 'next/link'; -import { useRouter } from 'next/navigation'; +import { usePathname, useRouter, useSearchParams } from 'next/navigation'; import { useCallback, useEffect, @@ -35,6 +35,77 @@ type RecipeCardProps = { onKeyDown?: (event: ReactKeyboardEvent) => void; }; +type SearchParamReader = { get(name: string): string | null }; + +const VIEW_PARAM = 'view'; +const QUERY_PARAM = 'q'; +const CATEGORY_PARAM = 'category'; + +function isRecipeCategory(value: string | null): value is RecipeCategory { + return value != null && categoryOrder.includes(value as RecipeCategory); +} + +function readExplorerMode(params: SearchParamReader): ExplorerMode { + return params.get(VIEW_PARAM) === 'browse' ? 'browse' : 'guided'; +} + +function readExplorerQuery(params: SearchParamReader): string { + return params.get(QUERY_PARAM) ?? ''; +} + +function readExplorerCategory(params: SearchParamReader): CategoryFilter { + const value = params.get(CATEGORY_PARAM); + return isRecipeCategory(value) ? value : 'all'; +} + +function buildExplorerUrl( + pathname: string, + mode: ExplorerMode, + query: string, + category: CategoryFilter +) { + const params = new URLSearchParams(); + if (mode === 'browse') params.set(VIEW_PARAM, 'browse'); + if (query.trim()) params.set(QUERY_PARAM, query.trim()); + if (category !== 'all') params.set(CATEGORY_PARAM, category); + const search = params.toString(); + return search ? `${pathname}?${search}` : pathname; +} + +type QuickPick = { + label: string; + query: string; + category: CategoryFilter; + description: string; +}; + +const QUICK_PICKS: QuickPick[] = [ + { + label: 'Retry flaky APIs', + query: 'retry', + category: 'resilience', + description: 'Backoff, 429s, circuit breakers', + }, + { + label: 'Wait for approval', + query: 'approval', + category: 'approvals', + description: 'Single-step or chained sign-off', + }, + { + label: 'Handle webhooks', + query: 'webhook', + category: 'webhooks', + description: 'Callbacks, polling, claim checks', + }, + { + label: 'Route dynamically', + query: 'route', + category: 'routing', + description: 'Routers, slips, detours, filters', + }, +]; + function getRecipeCategory(recipe: Recipe): RecipeCategory { return slugToCategory[recipe.slug] as RecipeCategory; } @@ -100,15 +171,46 @@ function RecipeCard({ export function CookbookExplorer({ lang }: { lang: string }) { const router = useRouter(); + const pathname = usePathname(); + const searchParams = useSearchParams(); const searchInputRef = useRef(null); - const [mode, setMode] = useState('guided'); + const [mode, setMode] = useState(() => + readExplorerMode(searchParams) + ); const [path, setPath] = useState([]); - const [query, setQuery] = useState(''); - const [selectedCategory, setSelectedCategory] = - useState('all'); + const [query, setQuery] = useState(() => readExplorerQuery(searchParams)); + const [selectedCategory, setSelectedCategory] = useState(() => + readExplorerCategory(searchParams) + ); const [activeIndex, setActiveIndex] = useState(0); + // Sync state from URL on popstate / external navigation + useEffect(() => { + const nextMode = readExplorerMode(searchParams); + const nextQuery = readExplorerQuery(searchParams); + const nextCategory = readExplorerCategory(searchParams); + setMode((current) => (current === nextMode ? current : nextMode)); + setQuery((current) => (current === nextQuery ? current : nextQuery)); + setSelectedCategory((current) => + current === nextCategory ? current : nextCategory + ); + }, [searchParams]); + + // Push state changes to the URL + useEffect(() => { + const currentUrl = buildExplorerUrl( + pathname, + readExplorerMode(searchParams), + readExplorerQuery(searchParams), + readExplorerCategory(searchParams) + ); + const nextUrl = buildExplorerUrl(pathname, mode, query, selectedCategory); + if (nextUrl !== currentUrl) { + router.replace(nextUrl, { scroll: false }); + } + }, [pathname, router, searchParams, mode, query, selectedCategory]); + const allRecipes = useMemo( () => Object.values(recipes).sort((a, b) => { @@ -181,8 +283,38 @@ export function CookbookExplorer({ lang }: { lang: string }) { [filteredRecipes.length] ); + const openBrowse = useCallback( + (next?: Partial<{ query: string; category: CategoryFilter }>) => { + setMode('browse'); + if (next?.query !== undefined) setQuery(next.query); + if (next?.category !== undefined) setSelectedCategory(next.category); + setActiveIndex(0); + requestAnimationFrame(() => searchInputRef.current?.focus()); + }, + [] + ); + + const clearBrowse = useCallback(() => { + setQuery(''); + setSelectedCategory('all'); + setActiveIndex(0); + requestAnimationFrame(() => searchInputRef.current?.focus()); + }, []); + + const closeBrowse = useCallback(() => { + setMode('guided'); + setQuery(''); + setSelectedCategory('all'); + setActiveIndex(0); + }, []); + const handleRecipeKeyDown = useCallback( (index: number) => (event: ReactKeyboardEvent) => { + if (event.key === 'Escape') { + event.preventDefault(); + requestAnimationFrame(() => searchInputRef.current?.focus()); + return; + } if (filteredRecipes.length === 0) return; if (event.key === 'ArrowDown') { event.preventDefault(); @@ -204,7 +336,6 @@ export function CookbookExplorer({ lang }: { lang: string }) { [filteredRecipes.length, focusRecipe] ); - // Only / and ⌘K stay global — arrow keys are handled per-element useEffect(() => { const onKeyDown = (event: KeyboardEvent) => { const target = event.target as HTMLElement | null; @@ -213,21 +344,30 @@ export function CookbookExplorer({ lang }: { lang: string }) { (target.tagName === 'INPUT' || target.tagName === 'TEXTAREA' || target.isContentEditable); + const isInsideExplorer = !!target?.closest?.('[data-cookbook-explorer]'); if ( - ((event.metaKey || event.ctrlKey) && - event.key.toLowerCase() === 'k') || + ((event.metaKey || event.ctrlKey) && event.key.toLowerCase() === 'k') || (event.key === '/' && !isEditable) ) { event.preventDefault(); - setMode('browse'); - requestAnimationFrame(() => searchInputRef.current?.focus()); + openBrowse(); + return; + } + + if (event.key === 'Escape' && isInsideExplorer && mode === 'browse') { + event.preventDefault(); + if (query || selectedCategory !== 'all') { + clearBrowse(); + } else { + closeBrowse(); + } } }; window.addEventListener('keydown', onKeyDown); return () => window.removeEventListener('keydown', onKeyDown); - }, []); + }, [openBrowse, clearBrowse, closeBrowse, mode, query, selectedCategory]); // --- Guided mode state --- @@ -284,7 +424,7 @@ export function CookbookExplorer({ lang }: { lang: string }) { ); return ( -
+
{/* Mode switcher */}
setMode('guided')} + onClick={() => closeBrowse()} className={`min-h-11 rounded-full px-4 py-2 text-sm transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-primary ${ mode === 'guided' ? 'bg-primary text-primary-foreground' @@ -307,10 +447,7 @@ export function CookbookExplorer({ lang }: { lang: string }) { type="button" aria-pressed={mode === 'browse'} aria-controls="cookbook-browse-panel" - onClick={() => { - setMode('browse'); - requestAnimationFrame(() => searchInputRef.current?.focus()); - }} + onClick={() => openBrowse()} className={`min-h-11 rounded-full px-4 py-2 text-sm transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-primary ${ mode === 'browse' ? 'bg-primary text-primary-foreground' @@ -321,16 +458,35 @@ export function CookbookExplorer({ lang }: { lang: string }) { Press{' '} - /{' '} - or{' '} + / or{' '} + ⌘K /{' '} - ⌘K + Ctrl+K
{/* Browse mode */}