feat: Node.js native stream rendering for App Router#89566
feat: Node.js native stream rendering for App Router#89566feedthejim wants to merge 47 commits intocanaryfrom
Conversation
7f2d09b to
15bb67a
Compare
Tests Passed |
Stats from current PR🔴 3 regressions
📊 All Metrics📖 Metrics GlossaryDev Server Metrics:
Build Metrics:
Change Thresholds:
⚡ Dev Server
📦 Dev Server (Webpack) (Legacy)📦 Dev Server (Webpack)
⚡ Production Builds
📦 Production Builds (Webpack) (Legacy)📦 Production Builds (Webpack)
📦 Bundle SizesBundle Sizes⚡ TurbopackClient Main Bundles: **437 kB** → **437 kB** ✅ -7 B81 files with content-based hashes (individual files not comparable between builds) Server Middleware
Build DetailsBuild Manifests
📦 WebpackClient Main Bundles
Polyfills
Pages
Server Edge SSR
Middleware
Build DetailsBuild Manifests
Build Cache
🔄 Shared (bundler-independent)Runtimes
📝 Changed Files (45 files)Files with changes:
View diffsapp-page-exp..ntime.dev.jsfailed to diffapp-page-exp..time.prod.jsDiff too large to display app-page-exp..ntime.dev.jsfailed to diffapp-page-exp..time.prod.jsfailed to diffapp-page-nod..ntime.dev.jsfailed to diffapp-page-nod..time.prod.jsDiff too large to display app-page-tur..ntime.dev.jsfailed to diffapp-page-tur..time.prod.jsDiff too large to display app-page-tur..ntime.dev.jsfailed to diffapp-page-tur..time.prod.jsfailed to diffapp-page-tur..ntime.dev.jsfailed to diffapp-page-tur..time.prod.jsDiff too large to display app-page-tur..ntime.dev.jsfailed to diffapp-page-tur..time.prod.jsfailed to diffapp-page.runtime.dev.jsfailed to diffapp-page.runtime.prod.jsfailed to diffapp-route-ex..ntime.dev.jsDiff too large to display app-route-ex..time.prod.jsDiff too large to display app-route-ex..ntime.dev.jsDiff too large to display app-route-ex..time.prod.jsDiff too large to display app-route-no..ntime.dev.jsDiff too large to display app-route-no..time.prod.jsDiff too large to display app-route-tu..ntime.dev.jsDiff too large to display app-route-tu..time.prod.jsDiff too large to display app-route-tu..ntime.dev.jsDiff too large to display app-route-tu..time.prod.jsDiff too large to display app-route-tu..ntime.dev.jsDiff too large to display app-route-tu..time.prod.jsDiff too large to display app-route-tu..ntime.dev.jsDiff too large to display app-route-tu..time.prod.jsDiff too large to display app-route.runtime.dev.jsDiff too large to display app-route.ru..time.prod.jsDiff too large to display dist_client_..ntime.dev.js@@ -0,0 +1,2 @@
+"use strict";exports.ids=["dist_client_dev_noop-turbopack-hmr_js"],exports.modules={"./dist/client/dev/noop-turbopack-hmr.js"(module,exports1){function connect(){}Object.defineProperty(exports1,"__esModule",{value:!0}),Object.defineProperty(exports1,"connect",{enumerable:!0,get:function(){return connect}}),("function"==typeof exports1.default||"object"==typeof exports1.default&&null!==exports1.default)&&void 0===exports1.default.__esModule&&(Object.defineProperty(exports1.default,"__esModule",{value:!0}),Object.assign(exports1.default,exports1),module.exports=exports1.default)}};
+//# sourceMappingURL=dist_client_dev_noop-turbopack-hmr_js-experimental-nodestreams.runtime.dev.js.map
\ No newline at end of filedist_client_..ntime.dev.js@@ -0,0 +1,2 @@
+"use strict";exports.ids=["dist_client_dev_noop-turbopack-hmr_js"],exports.modules={"./dist/client/dev/noop-turbopack-hmr.js"(module,exports1){function connect(){}Object.defineProperty(exports1,"__esModule",{value:!0}),Object.defineProperty(exports1,"connect",{enumerable:!0,get:function(){return connect}}),("function"==typeof exports1.default||"object"==typeof exports1.default&&null!==exports1.default)&&void 0===exports1.default.__esModule&&(Object.defineProperty(exports1.default,"__esModule",{value:!0}),Object.assign(exports1.default,exports1),module.exports=exports1.default)}};
+//# sourceMappingURL=dist_client_dev_noop-turbopack-hmr_js-nodestreams.runtime.dev.js.map
\ No newline at end of filedist_client_..ntime.dev.js@@ -0,0 +1,2 @@
+"use strict";exports.ids=["dist_client_dev_noop-turbopack-hmr_js"],exports.modules={"./dist/client/dev/noop-turbopack-hmr.js"(module,exports1){function connect(){}Object.defineProperty(exports1,"__esModule",{value:!0}),Object.defineProperty(exports1,"connect",{enumerable:!0,get:function(){return connect}}),("function"==typeof exports1.default||"object"==typeof exports1.default&&null!==exports1.default)&&void 0===exports1.default.__esModule&&(Object.defineProperty(exports1.default,"__esModule",{value:!0}),Object.assign(exports1.default,exports1),module.exports=exports1.default)}};
+//# sourceMappingURL=dist_client_dev_noop-turbopack-hmr_js-turbo-experimental-nodestreams.runtime.dev.js.map
\ No newline at end of filedist_client_..ntime.dev.js@@ -0,0 +1,2 @@
+"use strict";exports.ids=["dist_client_dev_noop-turbopack-hmr_js"],exports.modules={"./dist/client/dev/noop-turbopack-hmr.js"(module,exports1){function connect(){}Object.defineProperty(exports1,"__esModule",{value:!0}),Object.defineProperty(exports1,"connect",{enumerable:!0,get:function(){return connect}}),("function"==typeof exports1.default||"object"==typeof exports1.default&&null!==exports1.default)&&void 0===exports1.default.__esModule&&(Object.defineProperty(exports1.default,"__esModule",{value:!0}),Object.assign(exports1.default,exports1),module.exports=exports1.default)}};
+//# sourceMappingURL=dist_client_dev_noop-turbopack-hmr_js-turbo-nodestreams.runtime.dev.js.map
\ No newline at end of filepages-api-tu..ntime.dev.jsDiff too large to display pages-api-tu..time.prod.jsDiff too large to display pages-api.runtime.dev.jsDiff too large to display pages-api.ru..time.prod.jsDiff too large to display pages-turbo...ntime.dev.jsDiff too large to display pages-turbo...time.prod.jsDiff too large to display pages.runtime.dev.jsDiff too large to display pages.runtime.prod.jsDiff too large to display server.runtime.prod.jsDiff too large to display |
e3855fa to
2befd89
Compare
feedthejim
left a comment
There was a problem hiding this comment.
Should also rename the web methods to be more explicit
| pnpm build # Required before running tests (Turborepo dedupes if unchanged) | ||
| ``` | ||
|
|
||
| ## Bundler Selection |
There was a problem hiding this comment.
I'm surprised this wasn't in there
AGENTS.md
Outdated
| - `pnpm test-start-turbo` - Production build+start with Turbopack | ||
| - `pnpm test-start-webpack` - Production build+start with Webpack | ||
|
|
||
| **Run tests headless** (no browser window): Always set `HEADLESS=true` when running e2e tests: |
There was a problem hiding this comment.
Probably should be more flexible
AGENTS.md
Outdated
|
|
||
| **CI Analysis Tips:** | ||
|
|
||
| - **Assume test failures are NOT flaky by default.** Investigate every failure as if it is caused by the current changes until proven otherwise. |
There was a problem hiding this comment.
Should say check for historical data
packages/next/src/server/app-render/app-render-prerender-utils.ts
Outdated
Show resolved
Hide resolved
|
definitely not exhaustive around the PPR rendering pipeline |
52ec009 to
1c3eac6
Compare
|
dev is hanging now :( |
827f41c to
436e2d4
Compare
| | import('node:stream').Readable | ||
| let streamIterator: AsyncIterable<Uint8Array> | ||
|
|
||
| if (process.env.__NEXT_USE_NODE_STREAMS) { |
There was a problem hiding this comment.
there's no point forking the impl into node/web inside this file, we can just use node streams everywhere. i did that at some point but then rolled it back while debugging and never brought it back
(at the end of createCombinedPayloadStream we return a node Readable anyway, and in other places we immediately iterate over the stream to collect its chunks)
| filterStackFrame, | ||
| debugChannel: debugChannel?.serverSide, | ||
| }, | ||
| (fn) => workUnitAsyncStorage.run(requestStore, fn) |
There was a problem hiding this comment.
this runInContext business feels weird, why can't we just wrap this whole thing in a workUnitAsyncStorage.run(requestStore, () => ...)? same for ReactServerResult
| export function teeNodeReadable( | ||
| source: NodeReadable, | ||
| runInContext: <T>(fn: () => T) => T = (fn) => fn() | ||
| ): [NodeReadable, NodeReadable] { |
There was a problem hiding this comment.
i haven't reviewed the implementation but do we actually need to replicate how ReadableStream#tee() works? it's a pain that tee-ing consumes the original stream, and i don't think we need to do that with node streams, so why make it more painful than it has to be, when the API could probably just be
const readable = ...
const teed = teeNodeReadable(readable)There was a problem hiding this comment.
also. what's with the runInContext again. we've never needed things like this, why are they needed now
| // --------------------------------------------------------------------------- | ||
| // Continue functions (replaces ~8 large if/else blocks in app-render.tsx) | ||
| // --------------------------------------------------------------------------- |
There was a problem hiding this comment.
"(replaces ~8 large if/else blocks in app-render.tsx)" is not useful as a code comment
| /** | ||
| * Creates a stream that never emits data (used for resume-and-abort patterns). | ||
| */ | ||
| export function createPendingStream(): AnyStream { |
There was a problem hiding this comment.
i don't know if this is new in this PR or just mirroring existing code, but creating a stream that never ends without some kind of abortSignal seems smelly
| */ | ||
| export function getServerPrerender( | ||
| ComponentMod: ServerPrerenderComponentMod | ||
| ): (...args: any[]) => any { |
| // With createFromNodeStream + debugChannel, the response can resolve | ||
| // before debug stream completion. In restart-on-cache-miss flows this | ||
| // can leave metadata in the body instead of the head for the request | ||
| // render. Wait for debug completion for request work units. | ||
| waitForDebugEnd = new Promise<void>((resolve, reject) => { | ||
| nodeDebugStream!.once('end', resolve) | ||
| nodeDebugStream!.once('close', resolve) | ||
| nodeDebugStream!.once('error', reject) | ||
| }) |
There was a problem hiding this comment.
this doesn't make sense to me and i suspect this whole waitForDebugEnd thing is hallucinated
| // Cast through unknown: global ReadableStream and stream/web.ReadableStream | ||
| // differ slightly in type declarations. | ||
| nodeDebugStream = Readable.fromWeb( | ||
| debugStream as unknown as import('stream/web').ReadableStream<Uint8Array> | ||
| ) |
There was a problem hiding this comment.
is this needed? previously we've manually ensured that both streams are the same type
(it's not visible on the types of the component which i dislike but that's how it works in practice)
| // --------------------------------------------------------------------------- | ||
|
|
||
| export function nodeStreamFromString(str: string): Readable { | ||
| const { PassThrough: PT } = getNodeStream() |
There was a problem hiding this comment.
is there a reason we need to alias this? i'd rather just have PassThrough
| ) => boolean) | ||
| | undefined | ||
| onError?: (error: unknown) => void | ||
| signal?: AbortSignal |
There was a problem hiding this comment.
bad robot, there's no signal in renderToPipeableStream, that's what the abort() on the result is for
https://github.com/facebook/react/blob/b07aa7d643ec9028e452612c3ff2c17a6cee6bb7/packages/react-server-dom-webpack/src/server/ReactFlightDOMServerNode.js#L148-L163
| filterStackFrame: | ||
| | (( | ||
| url: string, | ||
| functionName: string, | ||
| lineNumber: number, | ||
| columnNumber: number | ||
| ) => boolean) | ||
| | undefined |
There was a problem hiding this comment.
i don't think that's right, at least according to react's types
https://github.com/facebook/react/blob/4e82d48f1f66ca15ad38b1cca291bc5794a4211b/packages/react-server-dom-webpack/src/server/ReactFlightDOMServerNode.js#L151
| pipe<Writable extends NodeJS.WritableStream>( | ||
| destination: Writable | ||
| ): Writable |
There was a problem hiding this comment.
confusing naming of generic param, it looks like it's referring to node's Writable when it's not
| pipe<Writable extends NodeJS.WritableStream>( | |
| destination: Writable | |
| ): Writable | |
| pipe<Destination extends NodeJS.WritableStream>( | |
| destination: Destination | |
| ): Destination |
| ...options, | ||
| onHeaders: wrappedOnHeaders, | ||
| onShellReady() { | ||
| pipe(passthrough as unknown as Writable) |
Context: - Use Node native Buffer.indexOf in indexOfUint8Array when available. - Use subarray instead of slice in removeFromUint8Array to reduce copies. Benchmark (micro scenario, 300 iterations, 30 warmup, parent-file swap): - Web continueStaticPrerender median: 4.873ms -> 0.658ms (+86.50% faster) - Web continueDynamicHTMLResume median: 4.868ms -> 0.604ms (+87.59% faster)
Context: - Extract shared flight payload encoding helpers and avoid repeated JSON wrapper construction for binary chunks. - Use chunked base64 conversion fallback and keep output format identical for client hydration scripts. Benchmark (micro scenario, 300 iterations, 30 warmup, file-swap A/B): - Web continueStaticPrerender median: 5.881ms -> 0.674ms (+88.55% faster) - Web continueDynamicHTMLResume median: 5.708ms -> 0.773ms (+86.46% faster) - Web continueDynamicPrerender median: 0.255ms -> 0.269ms (-5.35%) - Web continueDynamicHTMLResume (utf8 flight) median: 1.187ms -> 1.329ms (-11.93%)
Context: - Avoid repeated Buffer.concat in createBufferedTransformNode by buffering chunks and flushing once. - Keep callback timing unchanged while reducing per-chunk allocation overhead. Benchmark (micro scenario, 300 iterations, 30 warmup, parent-file swap): - createBufferedTransformNode only median: 0.070ms -> 0.052ms (+26.85% faster) - Node continueDynamicHTMLResume median: 0.657ms -> 0.571ms (+13.01% faster) - Node continueStaticPrerender median: 0.609ms -> 0.611ms (-0.40%)
Context: - Build node flight script frames using fewer temporary arrays/objects. - Keep output bytes identical while reducing framing overhead for large payloads. Benchmark (micro scenario, 300 iterations, 30 warmup, parent-file swap): - createInlinedDataNodeStream only median: 0.456ms -> 0.415ms (+9.07% faster) - createInlinedDataNodeStream (utf8) median: 0.884ms -> 0.825ms (+6.73% faster) - Node continueDynamicHTMLResume median: 0.657ms -> 0.571ms (+12.98% faster)
Context: - Replace Array.shift in tee queue handling with index-based O(1) dequeue. - Preserve backpressure semantics and callback order. Benchmark (micro scenario, 300 iterations, 30 warmup, parent-file swap): - teeNodeReadable median: 0.072ms -> 0.071ms (+0.70% faster) - Node continueDynamicHTMLResume median: 0.586ms -> 0.571ms (+2.41% faster) - Node continueStaticPrerender median: 0.593ms -> 0.611ms (-3.15%)
1296685 to
99a39cf
Compare
69f2ea6 to
bca423c
Compare
Move detailed runtime internals documentation (experimental flags, pre-compiled bundles, DCE patterns, React vendoring) from always-loaded AGENTS.md into focused skills under .agents/skills/. AGENTS.md keeps one-liner guardrails and points to skills for deep-dive workflows. Skills added: - flags: feature-flag wiring end-to-end - dce-edge: DCE-safe require patterns and edge constraints - react-vendoring: entry-base boundaries and vendored React - runtime-debug: bundle regression diagnosis workflow - pr-status-triage: CI failure and PR review triage - authoring-skills: how to create and maintain skills AGENTS.md: 489 -> 426 lines (-13%), deep content now loads on demand.
Add `node-stream-helpers.ts` with Node.js native stream utilities that parallel the WhatWG stream helpers in `node-web-streams-helper.ts`. These are the foundational building blocks needed for the node-streams rendering effort (PRs vercel#89566, vercel#89859, vercel#89860, vercel#90500). Key functions: - `chainNodeStreams()` - chains multiple Readable streams sequentially - `createBufferedTransformNode()` - batches small chunks before flushing - `createInlinedDataNodeStream()` - inlines flight data into HTML stream - `pipeNodeReadableToResponse()` - pipes Readable directly to ServerResponse - `nodeStreamToBuffer()` / `nodeStreamToString()` - collection utilities ALS context propagation uses `bindSnapshot()` from the existing `async-local-storage.ts` module, which wraps `AsyncLocalStorage.bind()`. This addresses the review feedback from @lubieowoce on PR vercel#89859 where ALS context was incorrectly propagated by wrapping callback return values instead of binding the callbacks themselves. This PR adds only the helper utilities as new files. No existing files are modified. Wiring into the render pipeline is a separate step. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
…eams flag Add `experimental.useNodeStreams` config flag that switches the stream operations module (stream-ops.ts) to load native Node.js implementations for hot-path functions: - chainStreams: uses chainNodeStreams (PassThrough-based sequential piping) - streamToBuffer: uses nodeStreamToBuffer (for-await on Node Readable) - streamToString: uses nodeStreamToString (streaming TextDecoder) - renderToFizzStream: uses renderToPipeableStream instead of renderToReadableStream, avoiding web→node conversion overhead in React Complex transform chains (continueFizzStream, prerender continuations) still delegate to the web implementation as a stopgap — the native buffering and data inlining transforms from node-stream-helpers will be wired in a follow-up once the web transform chain is decomposed. Includes the node-stream-helpers module from PR vercel#91580 which provides the underlying native stream utilities. References: vercel#91580, vercel#89566, vercel#90500 Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
Related work: standalone helpers + wiring PRsWe've created complementary PRs that contribute to this effort:
Why native streams > fast-webstreamsWe also benchmarked `experimental-fast-webstreams` (#89686) on Node.js v25.7.0 and found it's -6.7% slower on complex routes (10 nested layouts). Node 25's native WhatWG streams have improved enough that the fast-webstreams interop layer adds overhead. Native Node.js streams bypass the WhatWG API entirely, which is the better long-term path. Additional perf PRs (benchmarked +11.5% on web streams path)We also have 14 micro-optimization PRs (#91559-#91577) targeting non-stream hotspots (structuredClone elimination in metadata resolution at 914ms, tracer early exit, pre-compiled regex, O(1) header lookups, etc.) that deliver +11.5% throughput independently of the stream implementation. |
Summary
Replaces WHATWG web stream APIs with Node.js native streams for server-side rendering on the Node.js runtime, gated behind
experimental.useNodeStreams.Core approach:
-nodestreamsvariants) to avoid size regression when the flag is off (same pattern asexperimentalReact channel)Readable-to-ServerResponsepiping, no web stream conversion overheadprocess.env.__NEXT_USE_NODE_STREAMSthat eliminate branching inapp-render.tsx(~35 call sites unified)false(nonode:streamon edge), with proper webpack DCE viaif/elseguardsBenchmark (bench/basic-app, force-dynamic, Turbopack prod)
Key changes
New files
process.env.__NEXT_USE_NODE_STREAMS. Provides unified function names (continueFizzStream,continueStaticPrerender,streamToBuffer,chainStreams,createInlinedDataStream,resumeAndAbort, etc.) that resolve to web or node implementations at bundle time via DCE.renderToPipeableStream/resumeToPipeableStream/ Flight pipeable in async/await interface matching the web stream equivalents.pipeNodeReadableToResponse()for direct NodeReadabletoServerResponsepiping.Modified files
useNodeStreams/useNodeStreamsPPRlocal variables. Remaining ~11 inlineprocess.env.__NEXT_USE_NODE_STREAMSchecks are for Fizz render wrapping, stream tee+accumulate patterns, and dynamic render paths where web/node APIs differ too much to abstract.ReactServerPrerenderResult(asFlightStream(),consumeAsFlightStream(),asUnclosingFlightStream()) andcreateReactServerPrerenderResultFromPrerender()factory.Readableas response type.chainNodeStreamsfor use in theapp-page.tstemplate (avoids relative require in user-bundled code).-nodestreamsruntime bundle variants (includingexperimental-nodestreamscombos).__NEXT_USE_NODE_STREAMSenv var.experimental.useNodeStreamsflag definition.__NEXT_USE_NODE_STREAMS=falsefor edge builds.__NEXT_USE_NODE_STREAMSenv var support for CI testing.Covered render paths
renderToHTMLOrFlightImpl): Flight RSC render, Fizz HTML render, dynamic HTML resume, error fallbackprerenderToStream): RSC prerender, client prerender (PPR + non-PPR + legacy), static/dynamic/fallback output, resume-and-abortselectRSCPrerenderForRuntimePrefetch): RSC prerender with runtime prefetch transformStagedRenderingControllervalidateStagedShell,validateNavigationShell,warmUpInstantValidationClientCI
test-node-streams-dev(Turbopack) andtest-node-streams-prod(Webpack) CI jobsuseNodeStreamsandcacheComponentsenabledtest/use-node-streams-tests-manifest.json(includes all app-dir e2e, integration, production, and development tests via glob rules)bench/basic-app/benchmark.shTest plan
pnpm --filter=next build(both normal and nodestreams bundles)bench/basic-appbuilds and serves withuseNodeStreams: true__NEXT_USE_NODE_STREAMS=false)Extracted Follow-up Stack