-
Notifications
You must be signed in to change notification settings - Fork 27.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Introduce dynamic IO experimental configuration
In this mode prerenders must complete in a single render Task. When PPR is off if a prerender is incomplete after a single Render Task that page is considered dynamic. When PPR is on if a prerender is incomplete after a single Render Task then all the branches that are incomplete will postpone triggering the nearest parent suspense boundary. The Dynamic APIs that bail out of prerendering will no longer trigger a postpone via a throw but will instead abort the render synchronously. This is very aggressive and to make this mode useful we intend to alter these dynamic APIs in a way that will allow their use to only exclude their local sub-tree. If you experiment with this mode expect that many of your previously static pages will become dynamic and your mostly static PPR prerenders will become empty. React was bumped and nwo there is a new prerender method in the flight package. We can use this to implement an improved technique to ensure proper timing of task boundaries. This change switches to using prerender which also means that dynamicIO by itself must opt into experimental react
- Loading branch information
Showing
62 changed files
with
3,263 additions
and
496 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,9 @@ | ||
import type { NextConfig } from '../server/config-shared' | ||
|
||
export function needsExperimentalReact(config: NextConfig) { | ||
return Boolean(config.experimental?.ppr || config.experimental?.taint) | ||
return Boolean( | ||
config.experimental?.ppr || | ||
config.experimental?.taint || | ||
config.experimental?.dynamicIO | ||
) | ||
} |
226 changes: 226 additions & 0 deletions
226
packages/next/src/server/app-render/app-render-prerender-utils.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,226 @@ | ||
/** | ||
* This utility function is extracted to make it easier to find places where we are doing | ||
* specific timing tricks to try to schedule work after React has rendered. This is especially | ||
* import at the moment because Next.js uses the edge builds of React which use setTimeout to | ||
* schedule work when you might expect that something like setImmediate would do the trick. | ||
* | ||
* Long term we should switch to the node versions of React rendering when possible and then | ||
* update this to use setImmediate rather than setTimeout | ||
* | ||
* A shorter term work around would be to patch React to use setImmediate instead of setTimeout | ||
* in the edge builds since this might also avoid setTimeout throttling. | ||
*/ | ||
export function waitAtLeastOneReactRenderTask(): Promise<void> { | ||
if (process.env.NEXT_RUNTIME === 'edge') { | ||
return new Promise((r) => setTimeout(r, 0)) | ||
} else { | ||
return new Promise((r) => setImmediate(r)) | ||
} | ||
} | ||
|
||
/** | ||
* This is a utility function to make scheduling sequential tasks that run back to back easier. | ||
* We schedule on the same queue (setImmediate) at the same time to ensure no other events can | ||
* sneak in between. We pass the return value from the first one to the second one to make typing | ||
* easier. | ||
* | ||
* We do not call the second function if the first errored. practiaclly spea | ||
*/ | ||
export function prerenderAndAbortInSequentialTasks<R>( | ||
prerender: () => Promise<R>, | ||
abort: () => void | ||
): Promise<R> { | ||
if (process.env.NEXT_RUNTIME === 'edge') { | ||
throw new Error( | ||
'schedulePrerendreWork should not be called in edge runtime. This is a bug in Next.js' | ||
) | ||
} | ||
return new Promise((resolve, reject) => { | ||
let pendingResult: Promise<R> | ||
setImmediate(() => { | ||
try { | ||
pendingResult = prerender() | ||
} catch (err) { | ||
reject(err) | ||
} | ||
}) | ||
setImmediate(() => { | ||
abort() | ||
resolve(pendingResult) | ||
}) | ||
}) | ||
} | ||
|
||
// React's RSC prerender function will emit an incomplete flight stream when using `prerender`. If the connection | ||
// closes then whatever hanging chunks exist will be errored. This is because prerender (an experimental feature) | ||
// has not yet implemented a concept of resume. For now we will simulate a paused connection by wrapping the stream | ||
// in one that doesn't close even when the underlying is complete. | ||
export class ReactServerResult { | ||
private _stream: null | ReadableStream<Uint8Array> | ||
|
||
constructor(stream: ReadableStream<Uint8Array>) { | ||
this._stream = stream | ||
} | ||
|
||
tee() { | ||
if (this._stream === null) { | ||
throw new Error( | ||
'Cannot tee a ReactServerResult that has already been consumed' | ||
) | ||
} | ||
const tee = this._stream.tee() | ||
this._stream = tee[0] | ||
return tee[1] | ||
} | ||
|
||
consume() { | ||
if (this._stream === null) { | ||
throw new Error( | ||
'Cannot consume a ReactServerResult that has already been consumed' | ||
) | ||
} | ||
const stream = this._stream | ||
this._stream = null | ||
return stream | ||
} | ||
} | ||
|
||
type ReactPromise<T> = Promise<T> & { | ||
status?: string | ||
value?: T | ||
reason?: unknown | ||
} | ||
export type ReactServerPrerenderResolveToType = { | ||
prelude: ReadableStream<Uint8Array> | ||
} | ||
|
||
export async function createReactServerPrerenderResult( | ||
underlying: Promise<ReactServerPrerenderResolveToType> | ||
): Promise<ReactServerPrerenderResult> | ||
export async function createReactServerPrerenderResult( | ||
underlying: ReactPromise<ReactServerPrerenderResolveToType> | ||
): Promise<ReactServerPrerenderResult> { | ||
underlying.catch(() => {}) | ||
|
||
const chunks: Array<Uint8Array> = [] | ||
const { prelude } = await underlying | ||
await new Promise((resolve, reject) => { | ||
const reader = prelude.getReader() | ||
function progress(result: ReadableStreamReadResult<Uint8Array>) { | ||
if (result.done) { | ||
resolve(chunks) | ||
return | ||
} else { | ||
chunks.push(result.value) | ||
reader.read().then(progress, error) | ||
} | ||
} | ||
function error(e: unknown) { | ||
reader.cancel(e) | ||
reject(e) | ||
} | ||
reader.read().then(progress, error) | ||
}) | ||
return new ReactServerPrerenderResult(chunks) | ||
} | ||
|
||
export async function createReactServerPrerenderResultFromRender( | ||
underlying: ReadableStream<Uint8Array> | ||
): Promise<ReactServerPrerenderResult> { | ||
const chunks: Array<Uint8Array> = [] | ||
await new Promise((resolve, reject) => { | ||
const reader = underlying.getReader() | ||
function progress(result: ReadableStreamReadResult<Uint8Array>) { | ||
if (result.done) { | ||
resolve(chunks) | ||
return | ||
} else { | ||
chunks.push(result.value) | ||
reader.read().then(progress, error) | ||
} | ||
} | ||
function error(e: unknown) { | ||
reader.cancel(e) | ||
reject(e) | ||
} | ||
reader.read().then(progress, error) | ||
}) | ||
return new ReactServerPrerenderResult(chunks) | ||
} | ||
export class ReactServerPrerenderResult { | ||
private _chunks: null | Array<Uint8Array> | ||
|
||
private assertChunks(expression: string): Array<Uint8Array> { | ||
if (this._chunks === null) { | ||
throw new Error( | ||
`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed` | ||
) | ||
} | ||
return this._chunks | ||
} | ||
|
||
private consumeChunks(expression: string): Array<Uint8Array> { | ||
const chunks = this.assertChunks(expression) | ||
this.consume() | ||
return chunks | ||
} | ||
|
||
consume(): void { | ||
this._chunks = null | ||
} | ||
|
||
constructor(chunks: Array<Uint8Array>) { | ||
this._chunks = chunks | ||
} | ||
|
||
asUnclosingStream(): ReadableStream<Uint8Array> { | ||
const chunks = this.assertChunks('asUnclosingStream()') | ||
return createUnclosingStream(chunks) | ||
} | ||
|
||
consumeAsUnclosingStream(): ReadableStream<Uint8Array> { | ||
const chunks = this.consumeChunks('consumeAsUnclosingStream()') | ||
return createUnclosingStream(chunks) | ||
} | ||
|
||
asStream(): ReadableStream<Uint8Array> { | ||
const chunks = this.assertChunks('asStream()') | ||
return createClosingStream(chunks) | ||
} | ||
|
||
consumeAsStream(): ReadableStream<Uint8Array> { | ||
const chunks = this.consumeChunks('consumeAsStream()') | ||
return createClosingStream(chunks) | ||
} | ||
} | ||
|
||
function createUnclosingStream( | ||
chunks: Array<Uint8Array> | ||
): ReadableStream<Uint8Array> { | ||
let i = 0 | ||
return new ReadableStream({ | ||
async pull(controller) { | ||
if (i < chunks.length) { | ||
controller.enqueue(chunks[i++]) | ||
} | ||
// we intentionally keep the stream open. The consumer will clear | ||
// out chunks once finished and the remaining memory will be GC'd | ||
// when this object goes out of scope | ||
}, | ||
}) | ||
} | ||
|
||
function createClosingStream( | ||
chunks: Array<Uint8Array> | ||
): ReadableStream<Uint8Array> { | ||
let i = 0 | ||
return new ReadableStream({ | ||
async pull(controller) { | ||
if (i < chunks.length) { | ||
controller.enqueue(chunks[i++]) | ||
} else { | ||
controller.close() | ||
} | ||
}, | ||
}) | ||
} |
Oops, something went wrong.