From 63b1200d1855fcd3df3c7f017ee066e50dc26b93 Mon Sep 17 00:00:00 2001 From: Linell Bonnette Date: Fri, 20 Mar 2026 16:29:41 -0700 Subject: [PATCH 1/4] feat: add docs for streaming durable endpoints --- next-env.d.ts | 1 - pages/docs/examples/durable-endpoints.mdx | 2 +- pages/docs/features/realtime.mdx | 2 + pages/docs/learn/durable-endpoints.mdx | 2 +- .../learn/durable-endpoints/streaming.mdx | 276 ++++++++++++++++++ .../reference/typescript/v4/client/create.mdx | 3 + .../typescript/v4/durable-endpoints.mdx | 234 ++++++++++++++- .../typescript/v4/serve/streaming.mdx | 4 + pages/docs/streaming.mdx | 2 + shared/Docs/navigationStructure.ts | 11 +- 10 files changed, 525 insertions(+), 12 deletions(-) create mode 100644 pages/docs/learn/durable-endpoints/streaming.mdx diff --git a/next-env.d.ts b/next-env.d.ts index 36a4fe488..3cd7048ed 100644 --- a/next-env.d.ts +++ b/next-env.d.ts @@ -1,7 +1,6 @@ /// /// /// -/// // NOTE: This file should not be edited // see https://nextjs.org/docs/app/api-reference/config/typescript for more information. diff --git a/pages/docs/examples/durable-endpoints.mdx b/pages/docs/examples/durable-endpoints.mdx index 5c24562aa..642cae091 100644 --- a/pages/docs/examples/durable-endpoints.mdx +++ b/pages/docs/examples/durable-endpoints.mdx @@ -9,7 +9,7 @@ export const description = "Learn how to use Inngest Durable Endpoints to make a [Durable Endpoints](/docs/learn/durable-endpoints) let you add durability to regular HTTP handlers without separate function definitions or event triggers. You wrap your existing API route with `inngest.endpoint()` and use `step.run()` inline to get automatic retries and memoization for each step. -This is useful when you want your endpoint to orchestrate multiple operations (like booking a flight, processing a payment, and sending a confirmation) and guarantee that each step completes exactly once, even if the handler crashes or restarts partway through. +This is useful when you want your endpoint to orchestrate multiple operations (like booking a flight, processing a payment, and sending a confirmation) and guarantee that each step completes exactly once, even if the handler crashes or restarts partway through. You can also [stream SSE data to clients in real-time](/docs/learn/durable-endpoints/streaming) during execution. ## How it differs from traditional Inngest diff --git a/pages/docs/features/realtime.mdx b/pages/docs/features/realtime.mdx index 8987b3c20..80765eb04 100644 --- a/pages/docs/features/realtime.mdx +++ b/pages/docs/features/realtime.mdx @@ -31,6 +31,8 @@ Realtime lets you stream function progress, push live updates into the browser, and build interactive workflows like human-in-the-loop approvals without managing your own websocket infrastructure. +If you're using [Durable Endpoints](/docs/learn/durable-endpoints), you can also [stream SSE data directly to clients](/docs/learn/durable-endpoints/streaming) using `stream.push()` and `stream.pipe()` without setting up channels or topics. + ## Concepts The v4 model revolves around five primitives: diff --git a/pages/docs/learn/durable-endpoints.mdx b/pages/docs/learn/durable-endpoints.mdx index 1b5443006..97949b91f 100644 --- a/pages/docs/learn/durable-endpoints.mdx +++ b/pages/docs/learn/durable-endpoints.mdx @@ -296,7 +296,7 @@ Durable Endpoints is currently in beta. The following limitations apply: - **Flow control is not supported** — Features like concurrency limits and rate limiting are not available for Durable Endpoints - **POST body is not yet supported** — Prefer using query strings for passing data. POST body support is coming soon -- **Standard HTTP responses only** — Durable Endpoints should return a standard HTTP response, not an SSE stream +- **Standard HTTP responses only** — Durable Endpoints should return a standard HTTP response. For streaming, see [Streaming SSE from Durable Endpoints](/docs/learn/durable-endpoints/streaming) ## Examples diff --git a/pages/docs/learn/durable-endpoints/streaming.mdx b/pages/docs/learn/durable-endpoints/streaming.mdx new file mode 100644 index 000000000..d9cc30f94 --- /dev/null +++ b/pages/docs/learn/durable-endpoints/streaming.mdx @@ -0,0 +1,276 @@ +import { Callout, VersionBadge } from "src/shared/Docs/mdx"; + +export const description = 'Learn how to stream SSE data from Durable Endpoints to clients using stream.push() and stream.pipe(), with automatic rollback on retry.'; + +# Streaming SSE from Durable Endpoints + +Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). This lets you stream AI inference tokens, progress updates, or any other data — while keeping the durability guarantees of [durable steps](/docs/learn/inngest-steps). + +Streaming works across multiple steps within a single endpoint invocation, and handles the transition from sync to async mode seamlessly. If a step fails and retries, any data streamed during that step is automatically rolled back on the client. + + + Streaming SSE from Durable Endpoints is currently only available in the TypeScript SDK. This guide assumes you've already [set up a Durable Endpoint](/docs/learn/durable-endpoints). + + +## When to use streaming + +- **AI inference** — Stream LLM tokens to the browser as they're generated, so users see results immediately. +- **Status updates** — Send progress messages during long-running endpoint executions. +- **Making existing streaming endpoints durable** — Wrap your existing streaming HTTP endpoints with steps to add retry and observability at no cost to functionality. + +If you don't need to stream data directly to an HTTP client, consider using [Realtime](/docs/features/realtime) to push updates from background Inngest functions via pub/sub channels. + +## Quick start + +### Server + +Import `step` from `inngest` and `stream` from `inngest/experimental/durable-endpoints`, then use `stream.push()` or `stream.pipe()` inside your endpoint handler: + +```typescript +import Anthropic from "@anthropic-ai/sdk"; +import { step } from "inngest"; +import { stream } from "inngest/experimental/durable-endpoints"; +import { inngest } from "@/inngest"; + +export const GET = inngest.endpoint(async () => { + // Option A: push() with an SDK event callback + const text = await step.run("generate", async () => { + stream.push("Generating...\n"); + + const client = new Anthropic(); + const response = client.messages.stream({ + model: "claude-sonnet-4-20250514", + max_tokens: 512, + messages: [{ role: "user", content: "Write a haiku about durability." }], + }); + + response.on("text", (token) => stream.push(token)); + return await response.finalText(); + }); + + // Option B: pipe() — streams each chunk AND returns the collected text + await step.run("translate", async () => { + stream.push(`\nTranslating...\n`); + + const client = new Anthropic(); + const response = client.messages.stream({ + model: "claude-sonnet-4-20250514", + max_tokens: 256, + messages: [{ role: "user", content: `Translate to French: ${text}` }], + }); + + return stream.pipe(async function* () { + for await (const event of response) { + if ( + event.type === "content_block_delta" && + event.delta.type === "text_delta" + ) { + yield event.delta.text; + } + } + }); + }); + + return new Response("\nDone!"); +}); +``` + +### Client + +Use `fetchWithStream()` from `inngest/experimental/durable-endpoints/client` to consume the stream. It handles SSE parsing, sync-to-async redirects, and commit/rollback automatically. Chunks arrive on the client in the order they are pushed or yielded on the server. + +```typescript +"use client"; + +import { useState, useRef } from "react"; +import { fetchWithStream } from "inngest/experimental/durable-endpoints/client"; + +export default function Generate() { + const [chunks, setChunks] = useState([]); + const uncommittedCountRef = useRef(0); + + async function run() { + setChunks([]); + uncommittedCountRef.current = 0; + + const resp = await fetchWithStream("/api/generate", { + onData: ({ data }) => { + if (typeof data === "string") { + uncommittedCountRef.current++; + setChunks((prev) => [...prev, data]); + } + }, + onRollback: () => { + // A step failed and will retry — remove the chunks it produced + const count = uncommittedCountRef.current; + setChunks((prev) => prev.slice(0, prev.length - count)); + uncommittedCountRef.current = 0; + }, + onCommit: () => { + // Step completed — its chunks are now permanent + uncommittedCountRef.current = 0; + }, + }); + + // The endpoint's return value is available as the Response body + const result = await resp.text(); + setChunks((prev) => [...prev, result]); + } + + return ( +
+ +
{chunks.join("")}
+
+ ); +} +``` + +## Server API + +### `stream.push(data)` + +Send a single chunk of data to the client as an SSE event. + +```typescript +stream.push("Loading..."); +stream.push({ progress: 50, message: "Halfway there" }); +``` + +- Accepts any JSON-serializable value. +- Fire-and-forget — does not block execution or return a value. +- No-op outside of an Inngest execution context, so your code works the same when called outside of a durable endpoint. + +`push()` is ideal for one-off status messages or streaming via provider SDK event callbacks. + +### `stream.pipe(source)` + +Pipe a stream source to the client and resolve with the concatenated text of all chunks. Each chunk is sent as an SSE event in real-time. + +The simplest case is piping a `ReadableStream`, like a `fetch` response body: + +```typescript +const response = await fetch("https://api.example.com/stream"); +const text = await stream.pipe(response.body); +// `text` contains the full response; the client received it chunk by chunk +``` + +When you need to transform or filter chunks before they're sent, pass an async generator function. Each `yield` sends one chunk to the client: + +```typescript +const text = await stream.pipe(async function* () { + for await (const event of response) { + // Only yield the parts you want the client to see + if (event.type === "content_block_delta") { + yield event.delta.text; + } + } +}); +``` + +`pipe()` accepts three source types: + +- **`ReadableStream`** — piped directly, decoded from bytes to string chunks. +- **`AsyncIterable`** — each value in the iterable becomes a chunk. +- **`() => AsyncIterable`** — a function that returns an async iterable. This is what lets you pass `async function*` generators directly to `pipe()`. + +No-op outside of an Inngest execution context (resolves with an empty string). + +For the full `stream.push()` and `stream.pipe()` API reference, see the [Streaming reference](/docs/reference/typescript/v4/durable-endpoints#streaming). + +## Client API + +### `fetchWithStream(url, options)` + +The primary way to consume a streaming Durable Endpoint. Import it from `inngest/experimental/durable-endpoints/client`: + +```typescript +import { fetchWithStream } from "inngest/experimental/durable-endpoints/client"; +``` + +`fetchWithStream()` returns a `Promise` — `await` it to drive the stream to completion. When the endpoint finishes, the returned `Response` contains the endpoint's final return value. If the endpoint does not use streaming, `fetchWithStream()` returns the raw `Response` as-is. + +The core callbacks handle the majority of streaming use cases: + +- **`onData({ data, hashedStepId })`** — Called for each chunk. `data` is the deserialized value; `hashedStepId` identifies which step produced it (or `null` if streamed outside a step). Data should be considered uncommitted until `onCommit` fires. +- **`onRollback({ hashedStepId })`** — Called when a step fails and will retry. Your code is responsible for tracking and removing the chunks produced by that step (see the [Quick start](#client) example for a pattern using a ref counter). +- **`onCommit({ hashedStepId })`** — Called when a step completes successfully. Chunks from that step are now permanent and will never be rolled back. + +Because `stream.push()` accepts any JSON-serializable value, `data` in the `onData` callback is typed as `unknown`. Narrow the type in your callback as needed: + +```typescript +const uncommittedCount = { current: 0 }; + +const resp = await fetchWithStream("/api/generate", { + onData: ({ data }) => { + if (typeof data === "string") { + uncommittedCount.current++; + console.log("Chunk:", data); + } + }, + onRollback: () => { + // Discard uncommitted chunks and reset counter + uncommittedCount.current = 0; + }, + onCommit: () => { + // Chunks are permanent — reset counter + uncommittedCount.current = 0; + }, +}); + +const result = await resp.text(); +``` + +For all available options (`fetch`, `fetchOpts`, `onMetadata`, `onDone`), see the [full API reference](/docs/reference/typescript/v4/durable-endpoints#client-fetchwithstream). + +## How it works + +### Sync-to-async transitions + +When a client calls a streaming Durable Endpoint, the SSE stream flows directly from your app to the client. If the endpoint needs to go async (e.g. due to `step.sleep()`, `step.waitForEvent()`, or a retry), the SDK sends a redirect event telling the client where to reconnect, and the stream continues through the Inngest server. + +`fetchWithStream()` handles this redirect automatically — the client sees a single continuous stream regardless of sync-to-async transitions. + +### Streaming activation + +Streaming is activated lazily. The endpoint only sends an SSE response if: + +- The client sends the `Accept: text/event-stream` header (which `fetchWithStream()` does automatically), **and** +- Your code calls `stream.push()` or `stream.pipe()` during execution. + +If neither `push()` nor `pipe()` is called, the endpoint behaves like a regular non-streaming Durable Endpoint. + +### Rollback on retry + +Each chunk is tagged with the step that produced it (via `hashedStepId`). When a step completes, `onCommit` fires and those chunks become permanent. When a step fails and retries, `onRollback` fires and your client code should discard the uncommitted chunks from that step. On the retry attempt, the step streams fresh data that replaces what was rolled back. See the [Quick start](#client) for an implementation pattern. + +Data streamed outside of a `step.run()` is never rolled back. + +### SSE event types + +The stream uses SSE with the following event types. The `inngest.*` events are internal protocol events handled by `fetchWithStream()` automatically — only `inngest.stream` events contain user data. + +| Event name | Payload | Purpose | +|---|---|---| +| `inngest.metadata` | `{ runId }` | Always first. Identifies the run. | +| `inngest.stream` | `{ data, stepId? }` | User data from `push()` / `pipe()`. | +| `inngest.commit` | `{ hashedStepId }` | Step succeeded — its streamed data is permanent. | +| `inngest.rollback` | `{ hashedStepId }` | Step failed — discard its uncommitted data. | +| `inngest.redirect_info` | `{ runId, url }` | Tells the client to reconnect for async continuation. | +| `inngest.response` | `{ status, response: { body, headers, statusCode } }` | Terminal event — closes the stream. | + +## Limitations + +Streaming SSE from Durable Endpoints is currently in developer preview. In addition to any [general Durable Endpoint limitations](/docs/learn/durable-endpoints#limitations), the following apply: + +- **15 minute timeout** — Client connections time out after 15 minutes, meaning your endpoint should complete within this window (including any retries) to ensure the stream is delivered end-to-end. +- **No rollback outside of steps** — Data streamed outside of a `step.run()` is never rolled back. If you need rollback guarantees, stream from within a step. +- **One streaming parallel step** — You can stream from at most one parallel step. Streaming from multiple parallel steps will result in interleaved output that cannot be disambiguated by the client. +- **No streaming from child functions** — `step.invoke()` calls cannot stream data back to the parent function's client. +- **Raw `Response` objects may be lost on async transition** — If your endpoint returns a `Response` (like a file download) and goes async, the Response is lost because it can't be memoized. Use `stream.push()` or `stream.pipe()` instead. + +## SDK support + +| SDK | Support | Version | +|-----|---------|---------| +| TypeScript | Developer Preview | >= 4.x (with `endpointAdapter`) | diff --git a/pages/docs/reference/typescript/v4/client/create.mdx b/pages/docs/reference/typescript/v4/client/create.mdx index d335b92e9..4f20bd2fe 100644 --- a/pages/docs/reference/typescript/v4/client/create.mdx +++ b/pages/docs/reference/typescript/v4/client/create.mdx @@ -68,6 +68,9 @@ const inngest = new Inngest({ A stack of [middleware](/docs/features/middleware) to add to the client. + + An endpoint adapter that enables [Durable Endpoints](/docs/reference/typescript/v4/durable-endpoints). Import from `inngest/edge` or `inngest/node`. When provided, `inngest.endpoint()` and `inngest.endpointProxy()` become available. + diff --git a/pages/docs/reference/typescript/v4/durable-endpoints.mdx b/pages/docs/reference/typescript/v4/durable-endpoints.mdx index ed3695039..d59420935 100644 --- a/pages/docs/reference/typescript/v4/durable-endpoints.mdx +++ b/pages/docs/reference/typescript/v4/durable-endpoints.mdx @@ -1,10 +1,9 @@ import { Info, Callout, CodeGroup, Col, Properties, Property, Row, VersionBadge } from "src/shared/Docs/mdx"; -export const description = 'TypeScript SDK reference for Durable Endpoints. Create HTTP handlers with step-based checkpointing.'; +export const description = 'TypeScript SDK reference for Durable Endpoints. Create HTTP handlers with step-based checkpointing, streaming, and automatic recovery.'; # Durable Endpoints - Create durable HTTP endpoints using `inngest.endpoint()`. Each step within the handler is checkpointed, allowing automatic recovery from failures. ```ts @@ -28,9 +27,10 @@ export const handler = inngest.endpoint(async (req: Request): Promise ### `endpointAdapter` -The `endpointAdapter` must be passed to the Inngest client constructor to enable Durable Endpoints. +The `endpointAdapter` must be passed to the Inngest client constructor to enable Durable Endpoints. Import it from the entry point matching your runtime: -```ts + +```ts {{ title: "Edge" }} import { Inngest } from "inngest"; import { endpointAdapter } from "inngest/edge"; @@ -38,14 +38,58 @@ const inngest = new Inngest({ id: "my-app", endpointAdapter, }); + ``` +```ts {{ title: "Node.js" }} +import { Inngest } from "inngest"; +import { endpointAdapter } from "inngest/node"; + +const inngest = new Inngest({ + id: "my-app", + endpointAdapter, +}); +``` + + - The `endpointAdapter` is imported from `inngest/edge` (or any compatible - framework entrypoint) and is required for Durable Endpoints to function. Without it, + The `endpointAdapter` is required for Durable Endpoints to function. Without it, `inngest.endpoint()` will not be available. +### `endpointAdapter.withOptions(options)` + +Use `withOptions()` to customize adapter behavior: + +```ts +const inngest = new Inngest({ + id: "my-app", + endpointAdapter: endpointAdapter.withOptions({ + asyncRedirectUrl: "/api/inngest/poll", + retries: 5, + }), +}); +``` + + + + + + Custom URL to redirect to when transitioning from sync to async mode. A string path is resolved relative to the request origin and automatically appends `runId` and `token` query parameters. A function gives you full control over URL construction. + + + Override the auto-detected function ID for this endpoint. Defaults to `{METHOD} {path}`. + + + Maximum retries for all steps in this endpoint. Must be between `0` and `20`. Defaults to `3`. + + + Response type when transitioning from sync to async mode. Defaults to `"redirect"`. + + + + + --- ## `inngest.endpoint(handler): Handler` @@ -78,6 +122,7 @@ Creates a durable endpoint handler that can use step primitives for checkpointin return Response.json({ result }); } ); + ``` @@ -221,7 +266,6 @@ can be used to create your own URL to satisfy CORS constraints when the endpoint is used from browsers. ```typescript -// When setting the `endpointAdapter`, use `.withOptions()` to set more config const inngest = new Inngest({ id: "my-app", endpointAdapter: endpointAdapter.withOptions({ @@ -229,7 +273,7 @@ const inngest = new Inngest({ }), }); -// Then create the route with `inngest.endpointProxy()` +// Create the proxy route with `inngest.endpointProxy()` Bun.serve({ port: 3000, routes: { @@ -240,3 +284,177 @@ Bun.serve({ ``` Requests will now be redirected to `/wait`. + +To stream data to the client during execution, see [Streaming](#streaming) below or the full [Streaming SSE guide](/docs/learn/durable-endpoints/streaming). + +--- + +## Streaming + +Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). This lets you stream AI inference tokens, progress updates, or any other data while keeping durability guarantees. If you don't need to stream data directly to an HTTP client, consider using [Realtime](/docs/features/realtime) to push updates via pub/sub channels instead. + +For a full guide covering concepts, examples, and rollback semantics, see [Streaming SSE from Durable Endpoints](/docs/learn/durable-endpoints/streaming). + +### Server: `stream.push()` and `stream.pipe()` + +Import the `stream` object from `inngest/experimental/durable-endpoints` and use it inside your endpoint handler within a [`step.run()`](/docs/reference/typescript/v4/functions/step-run) call: + +```ts +import { step } from "inngest"; +import { stream } from "inngest/experimental/durable-endpoints"; +``` + + + + + + Send a single chunk of data to the client as an SSE event. Accepts any JSON-serializable value. Does not block execution. No-op outside of an Inngest execution context. + + + + + ```ts + await step.run("process", async () => { + stream.push("Loading..."); + stream.push({ progress: 50 }); + const result = await doWork(); + stream.push("Done!"); + return result; + }); + ``` + + + + + + + + Pipe a stream to the client, sending each chunk as an SSE event in real-time. Resolves with the concatenated text of all chunks — it both streams to the client _and_ collects the result for you. + + Accepts three source types: + - **`ReadableStream`** — piped directly, decoded from bytes to string chunks. + - **`AsyncIterable`** — each yielded value becomes a chunk. + - **`() => AsyncIterable`** — a factory function, letting you pass `async function*` generators directly. + + Outside of an Inngest execution context, resolves with an empty string. + + + + + ```ts + await step.run("generate", async () => { + // Pipe a ReadableStream + const res = await fetch("https://api.example.com/stream"); + const text = await stream.pipe(res.body); + + // Or pipe an async generator + const text = await stream.pipe(async function* () { + for await (const event of llmStream) { + if (event.type === "content_block_delta") { + yield event.delta.text; + } + } + }); + }); + ``` + + + +### Client: `fetchWithStream()` + +Import from `inngest/experimental/durable-endpoints/client` to consume a streaming Durable Endpoint: + +```ts +import { fetchWithStream } from "inngest/experimental/durable-endpoints/client"; +``` + +Returns a `Promise`. The returned `Response` contains the endpoint's final return value. If the endpoint does not use streaming, the raw `Response` is returned as-is. Sync-to-async redirects are handled automatically. + + + + + + The URL of the Durable Endpoint to call. + + + Custom fetch implementation. Defaults to `globalThis.fetch`. + + + Options passed to the underlying `fetch` call (e.g. `{ signal }` for cancellation). + + + Called when run metadata is received. Always fires first. + + + Called for each streamed chunk. Each `stream.push()` or `stream.pipe()` yield produces one `onData` call. `data` is the deserialized value. Data should be considered uncommitted until `onCommit` fires. + + + Called when a step completes successfully. Chunks from that step are now permanent and will never be rolled back. + + + Called when a step fails and will retry. Your code is responsible for discarding the uncommitted chunks from that step. + + + Called when the stream is fully consumed (including on abort or error). + + + + + ```ts + const resp = await fetchWithStream("/api/generate", { + onData: ({ data }) => { + if (typeof data === "string") { + console.log("Chunk:", data); + } + }, + onCommit: () => { + // Chunks are permanent + }, + onRollback: () => { + // Discard uncommitted chunks + }, + }); + + const result = await resp.text(); + ``` + + + +--- + +## Node.js Utilities + +The `inngest/node` entry point exports helpers for serving Durable Endpoints in Node.js environments: + +```ts +import { serveEndpoint, createEndpointServer } from "inngest/node"; +``` + + + + + + Bridge a Web API endpoint handler to a Node.js `http.RequestListener`. Converts an incoming `http.IncomingMessage` into a Web API `Request`, invokes the handler, then streams the resulting `Response` back through the Node.js `http.ServerResponse`. + + + Create an `http.Server` that serves a Durable Endpoint handler directly. A convenience wrapper around `serveEndpoint()`. + + + + + ```ts + import { createEndpointServer } from "inngest/node"; + + const server = createEndpointServer( + inngest.endpoint(async (req) => { + const result = await step.run("work", async () => { + return await doWork(); + }); + return Response.json({ result }); + }) + ); + + server.listen(3000); + ``` + + diff --git a/pages/docs/reference/typescript/v4/serve/streaming.mdx b/pages/docs/reference/typescript/v4/serve/streaming.mdx index c3a19200f..c9d2e8419 100644 --- a/pages/docs/reference/typescript/v4/serve/streaming.mdx +++ b/pages/docs/reference/typescript/v4/serve/streaming.mdx @@ -2,6 +2,10 @@ import { Callout, CodeGroup } from "src/shared/Docs/mdx"; # Streaming + + This page covers streaming responses **back to Inngest** to extend serverless timeouts. To stream data **to clients** from Durable Endpoints using SSE, see [Streaming SSE from Durable Endpoints](/docs/reference/typescript/v4/durable-endpoints#streaming). + + In select environments, the SDK allows streaming responses back to Inngest, hugely increasing maximum timeouts on many serverless platforms up to 15 minutes. While we add wider support for streaming to other platforms, we currently support the following: diff --git a/pages/docs/streaming.mdx b/pages/docs/streaming.mdx index 7ee30cc16..5aab83e52 100644 --- a/pages/docs/streaming.mdx +++ b/pages/docs/streaming.mdx @@ -4,6 +4,8 @@ import { Callout, CodeGroup } from "src/shared/Docs/mdx"; In select environments, the SDK allows streaming responses back to Inngest, hugely increasing maximum timeouts on many serverless platforms up to 15 minutes. +Looking to stream SSE data to clients from Durable Endpoints? See [Streaming SSE from Durable Endpoints](/docs/learn/durable-endpoints/streaming) for a guide, or the [API reference](/docs/reference/typescript/v4/durable-endpoints#streaming) for complete documentation. + While we add wider support for streaming to other platforms, we currently support the following: - [Cloudflare Workers](/docs/learn/serving-inngest-functions#framework-cloudflare-workers) diff --git a/shared/Docs/navigationStructure.ts b/shared/Docs/navigationStructure.ts index c9168f7d6..624ca583d 100644 --- a/shared/Docs/navigationStructure.ts +++ b/shared/Docs/navigationStructure.ts @@ -693,7 +693,16 @@ const sectionLearn: (NavGroup | NavLink)[] = [ }, { title: "Durable Endpoints", - href: `/docs/learn/durable-endpoints`, + links: [ + { + title: "Overview", + href: `/docs/learn/durable-endpoints` + }, + { + title: "Streaming SSE", + href: "/docs/learn/durable-endpoints/streaming" + } + ], tag: "new", }, { From 48e45423a38b9f326b2e3a97a30c2dfc39e391a4 Mon Sep 17 00:00:00 2001 From: Linell Bonnette Date: Mon, 30 Mar 2026 11:50:52 -0700 Subject: [PATCH 2/4] chore: remove onDone too --- pages/docs/learn/durable-endpoints/streaming.mdx | 2 +- pages/docs/reference/typescript/v4/durable-endpoints.mdx | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/pages/docs/learn/durable-endpoints/streaming.mdx b/pages/docs/learn/durable-endpoints/streaming.mdx index d9cc30f94..702ca4018 100644 --- a/pages/docs/learn/durable-endpoints/streaming.mdx +++ b/pages/docs/learn/durable-endpoints/streaming.mdx @@ -221,7 +221,7 @@ const resp = await fetchWithStream("/api/generate", { const result = await resp.text(); ``` -For all available options (`fetch`, `fetchOpts`, `onMetadata`, `onDone`), see the [full API reference](/docs/reference/typescript/v4/durable-endpoints#client-fetchwithstream). +For all available options see the [full API reference](/docs/reference/typescript/v4/durable-endpoints#client-fetchwithstream). ## How it works diff --git a/pages/docs/reference/typescript/v4/durable-endpoints.mdx b/pages/docs/reference/typescript/v4/durable-endpoints.mdx index d59420935..eac3b54eb 100644 --- a/pages/docs/reference/typescript/v4/durable-endpoints.mdx +++ b/pages/docs/reference/typescript/v4/durable-endpoints.mdx @@ -394,9 +394,6 @@ Returns a `Promise`. The returned `Response` contains the endpoint's f Called when a step fails and will retry. Your code is responsible for discarding the uncommitted chunks from that step. - - Called when the stream is fully consumed (including on abort or error). - From 016aab435ccb52a9ba6fb0a443f9c65c0f5a40fa Mon Sep 17 00:00:00 2001 From: Aaron Harper Date: Tue, 7 Apr 2026 12:46:27 -0400 Subject: [PATCH 3/4] Tweaks --- pages/docs/examples/durable-endpoints.mdx | 2 +- pages/docs/features/realtime.mdx | 2 - pages/docs/learn/durable-endpoints.mdx | 10 +- .../learn/durable-endpoints/streaming.mdx | 8 +- .../reference/typescript/v4/client/create.mdx | 2 +- .../typescript/v4/durable-endpoints.mdx | 92 +++---------------- .../typescript/v4/serve/streaming.mdx | 2 +- pages/docs/streaming.mdx | 2 +- 8 files changed, 32 insertions(+), 88 deletions(-) diff --git a/pages/docs/examples/durable-endpoints.mdx b/pages/docs/examples/durable-endpoints.mdx index 642cae091..f1cdbda0d 100644 --- a/pages/docs/examples/durable-endpoints.mdx +++ b/pages/docs/examples/durable-endpoints.mdx @@ -9,7 +9,7 @@ export const description = "Learn how to use Inngest Durable Endpoints to make a [Durable Endpoints](/docs/learn/durable-endpoints) let you add durability to regular HTTP handlers without separate function definitions or event triggers. You wrap your existing API route with `inngest.endpoint()` and use `step.run()` inline to get automatic retries and memoization for each step. -This is useful when you want your endpoint to orchestrate multiple operations (like booking a flight, processing a payment, and sending a confirmation) and guarantee that each step completes exactly once, even if the handler crashes or restarts partway through. You can also [stream SSE data to clients in real-time](/docs/learn/durable-endpoints/streaming) during execution. +This is useful when you want your endpoint to orchestrate multiple operations (like booking a flight, processing a payment, and sending a confirmation) and guarantee that each step completes exactly once, even if the handler crashes or restarts partway through. You can also [stream data to clients](/docs/learn/durable-endpoints/streaming) during execution. ## How it differs from traditional Inngest diff --git a/pages/docs/features/realtime.mdx b/pages/docs/features/realtime.mdx index 80765eb04..8987b3c20 100644 --- a/pages/docs/features/realtime.mdx +++ b/pages/docs/features/realtime.mdx @@ -31,8 +31,6 @@ Realtime lets you stream function progress, push live updates into the browser, and build interactive workflows like human-in-the-loop approvals without managing your own websocket infrastructure. -If you're using [Durable Endpoints](/docs/learn/durable-endpoints), you can also [stream SSE data directly to clients](/docs/learn/durable-endpoints/streaming) using `stream.push()` and `stream.pipe()` without setting up channels or topics. - ## Concepts The v4 model revolves around five primitives: diff --git a/pages/docs/learn/durable-endpoints.mdx b/pages/docs/learn/durable-endpoints.mdx index 97949b91f..9691b003f 100644 --- a/pages/docs/learn/durable-endpoints.mdx +++ b/pages/docs/learn/durable-endpoints.mdx @@ -290,13 +290,19 @@ fetch(`/api/your-durable-endpoint`) | TypeScript | ✅ Beta | >= 3.x (with `endpointAdapter`) | | Go | ✅ | >= v0.14.0 | +## Streaming + +Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). Stream LLM tokens, progress updates, or any other data while keeping full durability guarantees. If a step fails and retries, streamed data from that step is automatically rolled back on the client. + +Read the [full guide](/docs/learn/durable-endpoints/streaming) for setup, client integration, rollback semantics, and more. + ## Limitations Durable Endpoints is currently in beta. The following limitations apply: - **Flow control is not supported** — Features like concurrency limits and rate limiting are not available for Durable Endpoints - **POST body is not yet supported** — Prefer using query strings for passing data. POST body support is coming soon -- **Standard HTTP responses only** — Durable Endpoints should return a standard HTTP response. For streaming, see [Streaming SSE from Durable Endpoints](/docs/learn/durable-endpoints/streaming) +- **Standard HTTP responses only** — Durable Endpoints should return a standard HTTP response. Streaming responses [are supported](#streaming) ## Examples @@ -330,4 +336,4 @@ The following demos are also available to check out and run locally with the Inn - [Durable Endpoint - TypeScript SDK Reference](/docs/reference/typescript/durable-endpoints) - [Steps Overview](/docs/learn/inngest-steps) - \ No newline at end of file + diff --git a/pages/docs/learn/durable-endpoints/streaming.mdx b/pages/docs/learn/durable-endpoints/streaming.mdx index 702ca4018..52ab8b29d 100644 --- a/pages/docs/learn/durable-endpoints/streaming.mdx +++ b/pages/docs/learn/durable-endpoints/streaming.mdx @@ -1,8 +1,8 @@ import { Callout, VersionBadge } from "src/shared/Docs/mdx"; -export const description = 'Learn how to stream SSE data from Durable Endpoints to clients using stream.push() and stream.pipe(), with automatic rollback on retry.'; +export const description = 'Learn how to durably stream data back to clients from Durable Endpoints'; -# Streaming SSE from Durable Endpoints +# Durable Endpoints Streaming Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). This lets you stream AI inference tokens, progress updates, or any other data — while keeping the durability guarantees of [durable steps](/docs/learn/inngest-steps). @@ -20,7 +20,9 @@ Streaming works across multiple steps within a single endpoint invocation, and h If you don't need to stream data directly to an HTTP client, consider using [Realtime](/docs/features/realtime) to push updates from background Inngest functions via pub/sub channels. -## Quick start +## Example + +In this example, we'll create an HTTP endpoint that generates a haiku and then translates it to French. The client will be the browser, and it'll render the haiku and its translation as they're generated. The user will see the streamed LLM output appear in realtime. ### Server diff --git a/pages/docs/reference/typescript/v4/client/create.mdx b/pages/docs/reference/typescript/v4/client/create.mdx index 4f20bd2fe..b2ddac5b4 100644 --- a/pages/docs/reference/typescript/v4/client/create.mdx +++ b/pages/docs/reference/typescript/v4/client/create.mdx @@ -69,7 +69,7 @@ const inngest = new Inngest({ A stack of [middleware](/docs/features/middleware) to add to the client. - An endpoint adapter that enables [Durable Endpoints](/docs/reference/typescript/v4/durable-endpoints). Import from `inngest/edge` or `inngest/node`. When provided, `inngest.endpoint()` and `inngest.endpointProxy()` become available. + An endpoint adapter that enables [Durable Endpoints](/docs/reference/typescript/durable-endpoints). When provided, `inngest.endpoint()` and `inngest.endpointProxy()` become available. diff --git a/pages/docs/reference/typescript/v4/durable-endpoints.mdx b/pages/docs/reference/typescript/v4/durable-endpoints.mdx index eac3b54eb..229a8b5f1 100644 --- a/pages/docs/reference/typescript/v4/durable-endpoints.mdx +++ b/pages/docs/reference/typescript/v4/durable-endpoints.mdx @@ -1,6 +1,6 @@ import { Info, Callout, CodeGroup, Col, Properties, Property, Row, VersionBadge } from "src/shared/Docs/mdx"; -export const description = 'TypeScript SDK reference for Durable Endpoints. Create HTTP handlers with step-based checkpointing, streaming, and automatic recovery.'; +export const description = 'TypeScript SDK reference for Durable Endpoints. Create HTTP handlers with step-based checkpointing and streaming.' # Durable Endpoints @@ -53,8 +53,7 @@ const inngest = new Inngest({ - The `endpointAdapter` is required for Durable Endpoints to function. Without it, - `inngest.endpoint()` will not be available. + The `endpointAdapter` is required. Without it, `inngest.endpoint()` will not be available. ### `endpointAdapter.withOptions(options)` @@ -122,7 +121,6 @@ Creates a durable endpoint handler that can use step primitives for checkpointin return Response.json({ result }); } ); - ``` @@ -266,6 +264,7 @@ can be used to create your own URL to satisfy CORS constraints when the endpoint is used from browsers. ```typescript +// When setting the `endpointAdapter`, use `.withOptions()` to set more config const inngest = new Inngest({ id: "my-app", endpointAdapter: endpointAdapter.withOptions({ @@ -273,7 +272,7 @@ const inngest = new Inngest({ }), }); -// Create the proxy route with `inngest.endpointProxy()` +// Then create the route with `inngest.endpointProxy()` Bun.serve({ port: 3000, routes: { @@ -285,15 +284,13 @@ Bun.serve({ Requests will now be redirected to `/wait`. -To stream data to the client during execution, see [Streaming](#streaming) below or the full [Streaming SSE guide](/docs/learn/durable-endpoints/streaming). +To stream data to the client during execution, see [streaming](#streaming) below or the full [streaming guide](/docs/learn/durable-endpoints/streaming). --- ## Streaming -Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). This lets you stream AI inference tokens, progress updates, or any other data while keeping durability guarantees. If you don't need to stream data directly to an HTTP client, consider using [Realtime](/docs/features/realtime) to push updates via pub/sub channels instead. - -For a full guide covering concepts, examples, and rollback semantics, see [Streaming SSE from Durable Endpoints](/docs/learn/durable-endpoints/streaming). +Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). For a full guide covering concepts, examples, and rollback semantics, see [the guide](/docs/learn/durable-endpoints/streaming). ### Server: `stream.push()` and `stream.pipe()` @@ -310,65 +307,25 @@ import { stream } from "inngest/experimental/durable-endpoints"; Send a single chunk of data to the client as an SSE event. Accepts any JSON-serializable value. Does not block execution. No-op outside of an Inngest execution context. - - - - ```ts - await step.run("process", async () => { - stream.push("Loading..."); - stream.push({ progress: 50 }); - const result = await doWork(); - stream.push("Done!"); - return result; - }); - ``` - - - - - - - Pipe a stream to the client, sending each chunk as an SSE event in real-time. Resolves with the concatenated text of all chunks — it both streams to the client _and_ collects the result for you. - - Accepts three source types: - - **`ReadableStream`** — piped directly, decoded from bytes to string chunks. - - **`AsyncIterable`** — each yielded value becomes a chunk. - - **`() => AsyncIterable`** — a factory function, letting you pass `async function*` generators directly. - - Outside of an Inngest execution context, resolves with an empty string. + Pipe a stream source to the client, sending each chunk as an SSE event in real-time. Resolves with the concatenated text of all chunks. No-op outside of an Inngest execution context (resolves with an empty string). - - ```ts - await step.run("generate", async () => { - // Pipe a ReadableStream - const res = await fetch("https://api.example.com/stream"); - const text = await stream.pipe(res.body); - - // Or pipe an async generator - const text = await stream.pipe(async function* () { - for await (const event of llmStream) { - if (event.type === "content_block_delta") { - yield event.delta.text; - } - } - }); - }); - ``` - ### Client: `fetchWithStream()` -Import from `inngest/experimental/durable-endpoints/client` to consume a streaming Durable Endpoint: +Handles the implementation details of streaming, including: +- Automatically committing and rolling back chunks based on the step's success or failure +- Filtering out internal events +- Handling the sync-to-async redirect ```ts import { fetchWithStream } from "inngest/experimental/durable-endpoints/client"; ``` -Returns a `Promise`. The returned `Response` contains the endpoint's final return value. If the endpoint does not use streaming, the raw `Response` is returned as-is. Sync-to-async redirects are handled automatically. +Returns a `Promise` containing the endpoint's final return value. Sync-to-async redirects are handled automatically. If the endpoint does not use streaming, the raw `Response` is returned as-is. @@ -386,35 +343,16 @@ Returns a `Promise`. The returned `Response` contains the endpoint's f Called when run metadata is received. Always fires first. - Called for each streamed chunk. Each `stream.push()` or `stream.pipe()` yield produces one `onData` call. `data` is the deserialized value. Data should be considered uncommitted until `onCommit` fires. + Called for each streamed chunk. Data should be considered uncommitted until `onCommit` fires. - Called when a step completes successfully. Chunks from that step are now permanent and will never be rolled back. + Called when a step completes successfully. Chunks from that step are now permanent. - Called when a step fails and will retry. Your code is responsible for discarding the uncommitted chunks from that step. + Called when a step fails and will retry. Discard uncommitted chunks from that step. - - ```ts - const resp = await fetchWithStream("/api/generate", { - onData: ({ data }) => { - if (typeof data === "string") { - console.log("Chunk:", data); - } - }, - onCommit: () => { - // Chunks are permanent - }, - onRollback: () => { - // Discard uncommitted chunks - }, - }); - - const result = await resp.text(); - ``` - --- diff --git a/pages/docs/reference/typescript/v4/serve/streaming.mdx b/pages/docs/reference/typescript/v4/serve/streaming.mdx index c9d2e8419..02f53979d 100644 --- a/pages/docs/reference/typescript/v4/serve/streaming.mdx +++ b/pages/docs/reference/typescript/v4/serve/streaming.mdx @@ -3,7 +3,7 @@ import { Callout, CodeGroup } from "src/shared/Docs/mdx"; # Streaming - This page covers streaming responses **back to Inngest** to extend serverless timeouts. To stream data **to clients** from Durable Endpoints using SSE, see [Streaming SSE from Durable Endpoints](/docs/reference/typescript/v4/durable-endpoints#streaming). + This page covers streaming responses **back to Inngest** to extend serverless timeouts. To stream data **to clients** from Durable Endpoints, see [Durable Endpoints streaming](/docs/reference/typescript/v4/durable-endpoints#streaming). In select environments, the SDK allows streaming responses back to Inngest, hugely increasing maximum timeouts on many serverless platforms up to 15 minutes. diff --git a/pages/docs/streaming.mdx b/pages/docs/streaming.mdx index 5aab83e52..adf8aeaa4 100644 --- a/pages/docs/streaming.mdx +++ b/pages/docs/streaming.mdx @@ -4,7 +4,7 @@ import { Callout, CodeGroup } from "src/shared/Docs/mdx"; In select environments, the SDK allows streaming responses back to Inngest, hugely increasing maximum timeouts on many serverless platforms up to 15 minutes. -Looking to stream SSE data to clients from Durable Endpoints? See [Streaming SSE from Durable Endpoints](/docs/learn/durable-endpoints/streaming) for a guide, or the [API reference](/docs/reference/typescript/v4/durable-endpoints#streaming) for complete documentation. +Looking to stream data to clients from Durable Endpoints? Read the [guide](/docs/learn/durable-endpoints/streaming) or the [API reference](/docs/reference/typescript/v4/durable-endpoints#streaming) for complete documentation. While we add wider support for streaming to other platforms, we currently support the following: From 42bf1875e9c7db0b5bee17287dacc5597b2e89bd Mon Sep 17 00:00:00 2001 From: Aaron Harper Date: Tue, 7 Apr 2026 13:00:40 -0400 Subject: [PATCH 4/4] Tweaks --- next-env.d.ts | 1 + pages/docs/learn/durable-endpoints.mdx | 2 +- .../learn/durable-endpoints/streaming.mdx | 22 +++++++++---------- .../reference/typescript/v4/client/create.mdx | 2 +- .../typescript/v4/serve/streaming.mdx | 2 +- 5 files changed, 15 insertions(+), 14 deletions(-) diff --git a/next-env.d.ts b/next-env.d.ts index 3cd7048ed..36a4fe488 100644 --- a/next-env.d.ts +++ b/next-env.d.ts @@ -1,6 +1,7 @@ /// /// /// +/// // NOTE: This file should not be edited // see https://nextjs.org/docs/app/api-reference/config/typescript for more information. diff --git a/pages/docs/learn/durable-endpoints.mdx b/pages/docs/learn/durable-endpoints.mdx index 9691b003f..f6d71d84a 100644 --- a/pages/docs/learn/durable-endpoints.mdx +++ b/pages/docs/learn/durable-endpoints.mdx @@ -294,7 +294,7 @@ fetch(`/api/your-durable-endpoint`) Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). Stream LLM tokens, progress updates, or any other data while keeping full durability guarantees. If a step fails and retries, streamed data from that step is automatically rolled back on the client. -Read the [full guide](/docs/learn/durable-endpoints/streaming) for setup, client integration, rollback semantics, and more. +Read the [full guide](/docs/learn/durable-endpoints/streaming?ref=docs-durable-endpoints) for setup, client integration, rollback semantics, and more. ## Limitations diff --git a/pages/docs/learn/durable-endpoints/streaming.mdx b/pages/docs/learn/durable-endpoints/streaming.mdx index 52ab8b29d..8add87dd3 100644 --- a/pages/docs/learn/durable-endpoints/streaming.mdx +++ b/pages/docs/learn/durable-endpoints/streaming.mdx @@ -4,7 +4,7 @@ export const description = 'Learn how to durably stream data back to clients fro # Durable Endpoints Streaming -Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). This lets you stream AI inference tokens, progress updates, or any other data — while keeping the durability guarantees of [durable steps](/docs/learn/inngest-steps). +Durable Endpoints can stream data back to clients in real-time using Server-Sent Events (SSE). This lets you stream AI inference tokens, progress updates, or any other data, while keeping the durability guarantees of [durable steps](/docs/learn/inngest-steps). Streaming works across multiple steps within a single endpoint invocation, and handles the transition from sync to async mode seamlessly. If a step fails and retries, any data streamed during that step is automatically rolled back on the client. @@ -140,7 +140,7 @@ stream.push({ progress: 50, message: "Halfway there" }); ``` - Accepts any JSON-serializable value. -- Fire-and-forget — does not block execution or return a value. +- Fire-and-forget. Does not block execution or return a value. - No-op outside of an Inngest execution context, so your code works the same when called outside of a durable endpoint. `push()` is ideal for one-off status messages or streaming via provider SDK event callbacks. @@ -190,12 +190,12 @@ The primary way to consume a streaming Durable Endpoint. Import it from `inngest import { fetchWithStream } from "inngest/experimental/durable-endpoints/client"; ``` -`fetchWithStream()` returns a `Promise` — `await` it to drive the stream to completion. When the endpoint finishes, the returned `Response` contains the endpoint's final return value. If the endpoint does not use streaming, `fetchWithStream()` returns the raw `Response` as-is. +`fetchWithStream()` returns a `Promise`. `await` it to drive the stream to completion. When the endpoint finishes, the returned `Response` contains the endpoint's final return value. If the endpoint does not use streaming, `fetchWithStream()` returns the raw `Response` as-is. The core callbacks handle the majority of streaming use cases: - **`onData({ data, hashedStepId })`** — Called for each chunk. `data` is the deserialized value; `hashedStepId` identifies which step produced it (or `null` if streamed outside a step). Data should be considered uncommitted until `onCommit` fires. -- **`onRollback({ hashedStepId })`** — Called when a step fails and will retry. Your code is responsible for tracking and removing the chunks produced by that step (see the [Quick start](#client) example for a pattern using a ref counter). +- **`onRollback({ hashedStepId })`** — Called when a step fails and will retry. Your code is responsible for tracking and removing the chunks produced by that step (see the [example above](#client) for a pattern using a ref counter). - **`onCommit({ hashedStepId })`** — Called when a step completes successfully. Chunks from that step are now permanent and will never be rolled back. Because `stream.push()` accepts any JSON-serializable value, `data` in the `onData` callback is typed as `unknown`. Narrow the type in your callback as needed: @@ -231,7 +231,7 @@ For all available options see the [full API reference](/docs/reference/typescrip When a client calls a streaming Durable Endpoint, the SSE stream flows directly from your app to the client. If the endpoint needs to go async (e.g. due to `step.sleep()`, `step.waitForEvent()`, or a retry), the SDK sends a redirect event telling the client where to reconnect, and the stream continues through the Inngest server. -`fetchWithStream()` handles this redirect automatically — the client sees a single continuous stream regardless of sync-to-async transitions. +`fetchWithStream()` handles this redirect automatically. The client sees a single continuous stream regardless of sync-to-async transitions. ### Streaming activation @@ -244,22 +244,22 @@ If neither `push()` nor `pipe()` is called, the endpoint behaves like a regular ### Rollback on retry -Each chunk is tagged with the step that produced it (via `hashedStepId`). When a step completes, `onCommit` fires and those chunks become permanent. When a step fails and retries, `onRollback` fires and your client code should discard the uncommitted chunks from that step. On the retry attempt, the step streams fresh data that replaces what was rolled back. See the [Quick start](#client) for an implementation pattern. +Each chunk is tagged with the step that produced it (via `hashedStepId`). When a step completes, `onCommit` fires and those chunks become permanent. When a step fails and retries, `onRollback` fires and your client code should discard the uncommitted chunks from that step. On the retry attempt, the step streams fresh data that replaces what was rolled back. See the [example above](#client) for an implementation pattern. Data streamed outside of a `step.run()` is never rolled back. ### SSE event types -The stream uses SSE with the following event types. The `inngest.*` events are internal protocol events handled by `fetchWithStream()` automatically — only `inngest.stream` events contain user data. +The stream uses SSE with the following event types. The `inngest.*` events are internal protocol events handled by `fetchWithStream()` automatically; only `inngest.stream` events contain user data. | Event name | Payload | Purpose | |---|---|---| | `inngest.metadata` | `{ runId }` | Always first. Identifies the run. | -| `inngest.stream` | `{ data, stepId? }` | User data from `push()` / `pipe()`. | -| `inngest.commit` | `{ hashedStepId }` | Step succeeded — its streamed data is permanent. | -| `inngest.rollback` | `{ hashedStepId }` | Step failed — discard its uncommitted data. | +| `inngest.stream` | `{ data, hashedStepId? }` | User data from `push()` / `pipe()`. | +| `inngest.commit` | `{ hashedStepId }` | Step succeeded. Its streamed data is permanent. | +| `inngest.rollback` | `{ hashedStepId }` | Step failed. Discard its uncommitted data. | | `inngest.redirect_info` | `{ runId, url }` | Tells the client to reconnect for async continuation. | -| `inngest.response` | `{ status, response: { body, headers, statusCode } }` | Terminal event — closes the stream. | +| `inngest.response` | `{ status, response: { body, headers, statusCode } }` | Terminal event. Closes the stream. | ## Limitations diff --git a/pages/docs/reference/typescript/v4/client/create.mdx b/pages/docs/reference/typescript/v4/client/create.mdx index b2ddac5b4..2c7047ba5 100644 --- a/pages/docs/reference/typescript/v4/client/create.mdx +++ b/pages/docs/reference/typescript/v4/client/create.mdx @@ -69,7 +69,7 @@ const inngest = new Inngest({ A stack of [middleware](/docs/features/middleware) to add to the client. - An endpoint adapter that enables [Durable Endpoints](/docs/reference/typescript/durable-endpoints). When provided, `inngest.endpoint()` and `inngest.endpointProxy()` become available. + An endpoint adapter that enables [Durable Endpoints](/docs/reference/typescript/v4/durable-endpoints). When provided, `inngest.endpoint()` and `inngest.endpointProxy()` become available. diff --git a/pages/docs/reference/typescript/v4/serve/streaming.mdx b/pages/docs/reference/typescript/v4/serve/streaming.mdx index 02f53979d..2bb25fb8c 100644 --- a/pages/docs/reference/typescript/v4/serve/streaming.mdx +++ b/pages/docs/reference/typescript/v4/serve/streaming.mdx @@ -3,7 +3,7 @@ import { Callout, CodeGroup } from "src/shared/Docs/mdx"; # Streaming - This page covers streaming responses **back to Inngest** to extend serverless timeouts. To stream data **to clients** from Durable Endpoints, see [Durable Endpoints streaming](/docs/reference/typescript/v4/durable-endpoints#streaming). + This page covers streaming responses **back to Inngest** to extend serverless timeouts. To stream data **to clients** from Durable Endpoints, see [Durable Endpoints streaming](/docs/reference/typescript/v4/durable-endpoints#streaming?ref=docs-serve-streaming). In select environments, the SDK allows streaming responses back to Inngest, hugely increasing maximum timeouts on many serverless platforms up to 15 minutes.