diff --git a/packages/next/errors.json b/packages/next/errors.json index 4611f5fc0f9df5..d3d2352a662726 100644 --- a/packages/next/errors.json +++ b/packages/next/errors.json @@ -898,5 +898,6 @@ "897": "Expected HTML document to start with doctype prefix", "898": "When using Cache Components, all `generateStaticParams` functions must return at least one result. This is to ensure that we can perform build-time validation that there is no other dynamic accesses that would cause a runtime error.\n\nLearn more: https://nextjs.org/docs/messages/empty-generate-static-params", "899": "Both \"%s\" and \"%s\" files are detected. Please use \"%s\" instead. Learn more: https://nextjs.org/docs/messages/middleware-to-proxy", - "900": "Both %s file \"./%s\" and %s file \"./%s\" are detected. Please use \"./%s\" only. Learn more: https://nextjs.org/docs/messages/middleware-to-proxy" + "900": "Both %s file \"./%s\" and %s file \"./%s\" are detected. Please use \"./%s\" only. Learn more: https://nextjs.org/docs/messages/middleware-to-proxy", + "901": "`accumulateStringChunks` received a chunk after deadline" } diff --git a/packages/next/src/server/app-render/app-render-render-utils.ts b/packages/next/src/server/app-render/app-render-render-utils.ts index ef1801cd359f0d..2645ca334325ff 100644 --- a/packages/next/src/server/app-render/app-render-render-utils.ts +++ b/packages/next/src/server/app-render/app-render-render-utils.ts @@ -38,7 +38,7 @@ export function scheduleInSequentialTasks( export function pipelineInSequentialTasks( one: () => A, two: (a: A) => B, - three: (b: B) => C | Promise + three: (b: B) => C ): Promise { if (process.env.NEXT_RUNTIME === 'edge') { throw new InvariantError( @@ -46,18 +46,19 @@ export function pipelineInSequentialTasks( ) } else { return new Promise((resolve, reject) => { - let oneResult: A | undefined = undefined + let oneResult: A setTimeout(() => { try { oneResult = one() } catch (err) { clearTimeout(twoId) clearTimeout(threeId) + clearTimeout(fourId) reject(err) } }, 0) - let twoResult: B | undefined = undefined + let twoResult: B const twoId = setTimeout(() => { // if `one` threw, then this timeout would've been cleared, // so if we got here, we're guaranteed to have a value. @@ -65,19 +66,27 @@ export function pipelineInSequentialTasks( twoResult = two(oneResult!) } catch (err) { clearTimeout(threeId) + clearTimeout(fourId) reject(err) } }, 0) + let threeResult: C const threeId = setTimeout(() => { // if `two` threw, then this timeout would've been cleared, // so if we got here, we're guaranteed to have a value. try { - resolve(three(twoResult!)) + threeResult = three(twoResult!) } catch (err) { + clearTimeout(fourId) reject(err) } }, 0) + + // We wait a task before resolving/rejecting + const fourId = setTimeout(() => { + resolve(threeResult) + }, 0) }) } } diff --git a/packages/next/src/server/app-render/app-render.tsx b/packages/next/src/server/app-render/app-render.tsx index 6725d7ba7217e3..98a5c48342e73c 100644 --- a/packages/next/src/server/app-render/app-render.tsx +++ b/packages/next/src/server/app-render/app-render.tsx @@ -2495,6 +2495,8 @@ async function renderToStream( ) { const { stream: serverStream, + staticStream, + runtimeStream, debugChannel: returnedDebugChannel, requestStore: finalRequestStore, } = await renderWithRestartOnCacheMissInDev( @@ -2505,6 +2507,19 @@ async function renderToStream( serverComponentsErrorHandler ) + consoleAsyncStorage.run( + { dim: true }, + spawnStaticShellValidationInDev, + resolveValidation, + staticStream, + runtimeStream, + ctx, + clientReferenceManifest, + requestStore, + devValidatingFallbackParams, + returnedDebugChannel + ) + reactServerResult = new ReactServerResult(serverStream) requestStore = finalRequestStore debugChannel = returnedDebugChannel @@ -2541,21 +2556,6 @@ async function renderToStream( requestId ) } - - // TODO(restart-on-cache-miss): - // This can probably be optimized to do less work, - // because we've already made sure that we have warm caches. - consoleAsyncStorage.run( - { dim: true }, - spawnDynamicValidationInDev, - resolveValidation, - tree, - ctx, - res.statusCode === 404, - clientReferenceManifest, - requestStore, - devValidatingFallbackParams - ) } else { // This is a dynamic render. We don't do dynamic tracking because we're not prerendering const RSCPayload: RSCPayload & RSCPayloadDevProperties = @@ -2997,6 +2997,9 @@ async function renderWithRestartOnCacheMissInDev( let debugChannel = setReactDebugChannel && createDebugChannel() + const staticChunks: Array = [] + const runtimeChunks: Array = [] + const initialRscPayload = await getPayload(requestStore) const maybeInitialServerStream = await workUnitAsyncStorage.run( requestStore, @@ -3024,10 +3027,7 @@ async function renderWithRestartOnCacheMissInDev( return stream }, (stream) => { - // Runtime stage - initialStageController.advanceStage(RenderStage.Runtime) - - // If we had a cache miss in the static stage, we'll have to disard this stream + // If we had a cache miss in the static stage, we'll have to discard this stream // and render again once the caches are warm. if (cacheSignal.hasPendingReads()) { return null @@ -3035,35 +3035,64 @@ async function renderWithRestartOnCacheMissInDev( // If there's no cache misses, we'll continue rendering, // and see if there's any cache misses in the runtime stage. - return stream + // We accumulate static chunks up to the stage deadline within this task + const [continuationStream, staticStream] = stream.tee() + accumulateStreamChunksToDeadline( + staticStream, + staticChunks, + initialStageController, + RenderStage.Runtime + ) + return continuationStream }, - async (maybeStream) => { + async (stream) => { // Dynamic stage // If we had cache misses in either of the previous stages, // then we'll only use this render for filling caches. // We won't advance the stage, and thus leave dynamic APIs hanging, // because they won't be cached anyway, so it'd be wasted work. - if (maybeStream === null || cacheSignal.hasPendingReads()) { + if (stream === null || cacheSignal.hasPendingReads()) { return null } - // If there's no cache misses, we'll use this render, so let it advance to the dynamic stage. - initialStageController.advanceStage(RenderStage.Dynamic) - return maybeStream + // If there were no cache misses we can just use this stream without + // restarting. + const [continuationStream, runtimeStream] = stream.tee() + accumulateStreamChunksToDeadline( + runtimeStream, + runtimeChunks, + initialStageController, + RenderStage.Dynamic + ) + return continuationStream } ) ) if (maybeInitialServerStream !== null) { + let dec = new TextDecoder() + console.log('FROM INITIAL', { + staticChunks: staticChunks.map((c) => dec.decode(c)), + }) + console.log('FROM INITIAL', { + runtimeChunks: runtimeChunks.map((c) => dec.decode(c)), + }) // No cache misses. We can use the stream as is. return { stream: maybeInitialServerStream, + staticStream: unclosingStreamFromChunks(staticChunks), + runtimeStream: unclosingStreamFromChunks(runtimeChunks), debugChannel, requestStore, } } + // We had a cache miss and need to restart after filling caches. Let's clear out the + // staticChunks and runtimeChunks we previously accumulated + staticChunks.length = 0 + runtimeChunks.length = 0 + if (process.env.NODE_ENV === 'development' && setCacheStatus) { setCacheStatus('filling', htmlRequestId, requestId) } @@ -3125,29 +3154,103 @@ async function renderWithRestartOnCacheMissInDev( ) }, (stream) => { - // Runtime stage - finalStageController.advanceStage(RenderStage.Runtime) - return stream + const [continuationStream, staticStream] = stream.tee() + accumulateStreamChunksToDeadline( + staticStream, + staticChunks, + finalStageController, + RenderStage.Runtime + ) + return continuationStream }, (stream) => { - // Dynamic stage - finalStageController.advanceStage(RenderStage.Dynamic) - return stream + const [continuationStream, runtimeStream] = stream.tee() + accumulateStreamChunksToDeadline( + runtimeStream, + runtimeChunks, + finalStageController, + RenderStage.Dynamic + ) + return continuationStream } ) ) + let dec = new TextDecoder() + console.log('FROM SECOND', { + staticChunks: staticChunks.map((c) => dec.decode(c)), + }) + console.log('FROM SECOND', { + runtimeChunks: runtimeChunks.map((c) => dec.decode(c)), + }) + if (process.env.NODE_ENV === 'development' && setCacheStatus) { setCacheStatus('filled', htmlRequestId, requestId) } return { stream: finalServerStream, + staticStream: unclosingStreamFromChunks(staticChunks), + runtimeStream: unclosingStreamFromChunks(runtimeChunks), debugChannel, requestStore, } } +/** + * We call this at the beginning of the task callback that begins a new stage. + * We don't advance the stage synchronously because we don't want any work to get + * scheduled in React before we have had a chance to read all enqueue chunks + * + * It is ok to await before calling this as long as that await is guaranteed to + * be microtasky. + * + * We wait a tick in a microtask which allows the currently enqueue chunks to flush. + * We then cancel the stream and advance the stage. This should be compatible with + * libraries that use techniques like data loader because we're in the first phase + * of a new task so there will be nothing on the microtask queue or tick queue that + * can schedule new work + */ +async function accumulateStreamChunksToDeadline( + stream: ReadableStream, + target: Array, + stageController: StagedRenderingController, + stage: typeof RenderStage.Runtime | typeof RenderStage.Dynamic +): Promise { + const reader = stream.getReader() + + let expired = false + Promise.resolve().then(() => { + process.nextTick(() => { + // Runtime stage + expired = true + reader.cancel() + stageController.advanceStage(stage) + }) + }) + + try { + while (true) { + const { done, value } = await reader.read() + if (done) { + console.log('stream done') + break + } + if (expired) { + console.log('stream expired') + throw new InvariantError( + '`accumulateStringChunks` received a chunk after deadline' + ) + } + console.log('chunk enqueued') + target.push(value) + } + } catch (e) { + console.log('read rejected', { e }) + // When we release the lock we may reject the read + } +} + function createAsyncApiPromisesInDev( stagedRendering: StagedRenderingController, cookies: RequestStore['cookies'], @@ -3211,7 +3314,7 @@ function createDebugChannel(): DebugChannelPair | undefined { let readableController: ReadableStreamDefaultController | undefined - const clientSideReadable = new ReadableStream({ + let clientSideReadable = new ReadableStream({ start(controller) { readableController = controller }, @@ -3231,8 +3334,12 @@ function createDebugChannel(): DebugChannelPair | undefined { }, }), }, - clientSide: { - readable: clientSideReadable, + get clientSide() { + const [t1, t2] = clientSideReadable.tee() + clientSideReadable = t1 + return { + readable: t2, + } }, } } @@ -3245,6 +3352,330 @@ function createValidationOutlet() { return [resolveValidation!, outlet] as const } +/** + * This function is a fork of prerenderToStream cacheComponents branch. + * While it doesn't return a stream we want it to have identical + * prerender semantics to prerenderToStream and should update it + * in conjunction with any changes to that function. + */ +async function spawnStaticShellValidationInDev( + resolveValidation: (validatingElement: ReactNode) => void, + _staticServerStream: ReadableStream, + runtimeServerStream: ReadableStream, + ctx: AppRenderContext, + clientReferenceManifest: NonNullable, + requestStore: RequestStore, + fallbackRouteParams: OpaqueFallbackRouteParams | null, + debugChannel: DebugChannelPair | undefined +): Promise { + console.log('WAITING 5 SECONDS', { debugChannel }) + await new Promise((r) => setTimeout(r, 5000)) + const { + componentMod: ComponentMod, + getDynamicParamFromSegment, + implicitTags, + nonce, + renderOpts, + workStore, + } = ctx + + const { allowEmptyStaticShell = false } = renderOpts + + // These values are placeholder values for this validating render + // that are provided during the actual prerenderToStream. + const preinitScripts = () => {} + const { ServerInsertedHTMLProvider } = createServerInsertedHTML() + + const rootParams = getRootParams( + ComponentMod.routeModule.userland.loaderTree, + getDynamicParamFromSegment + ) + + const hmrRefreshHash = requestStore.cookies.get( + NEXT_HMR_REFRESH_HASH_COOKIE + )?.value + + // The cacheSignal helps us track whether caches are still filling or we are + // ready to cut the render off. + const cacheSignal = new CacheSignal() + + const captureOwnerStackClient = ReactClient.captureOwnerStack + const { createElement } = ComponentMod + + // We don't need to continue the prerender process if we already + // detected invalid dynamic usage in the initial prerender phase. + const { invalidDynamicUsageError } = workStore + if (invalidDynamicUsageError) { + resolveValidation( + createElement(LogSafely, { + fn: () => { + console.error(invalidDynamicUsageError) + }, + }) + ) + return + } + + // Warmup SSR + const initialClientPrerenderController = new AbortController() + const initialClientReactController = new AbortController() + const initialClientRenderController = new AbortController() + + const initialClientPrerenderStore: PrerenderStore = { + type: 'prerender-client', + phase: 'render', + rootParams, + fallbackRouteParams, + implicitTags, + renderSignal: initialClientRenderController.signal, + controller: initialClientPrerenderController, + // For HTML Generation the only cache tracked activity + // is module loading, which has it's own cache signal + cacheSignal: null, + dynamicTracking: null, + allowEmptyStaticShell, + revalidate: INFINITE_CACHE, + expire: INFINITE_CACHE, + stale: INFINITE_CACHE, + tags: [...implicitTags.tags], + // TODO should this be removed from client stores? + prerenderResumeDataCache: null, + renderResumeDataCache: null, + hmrRefreshHash: undefined, + captureOwnerStack: captureOwnerStackClient, + } + + const prerender = ( + require('react-dom/static') as typeof import('react-dom/static') + ).prerender + const pendingInitialClientResult = workUnitAsyncStorage.run( + initialClientPrerenderStore, + prerender, + // eslint-disable-next-line @next/internal/no-ambiguous-jsx -- React Client + , + { + signal: initialClientReactController.signal, + onError: (err) => { + const digest = getDigestForWellKnownError(err) + + if (digest) { + return digest + } + + if (isReactLargeShellError(err)) { + // TODO: Aggregate + console.error(err) + return undefined + } + + if (initialClientReactController.signal.aborted) { + // These are expected errors that might error the prerender. we ignore them. + } else if ( + process.env.NEXT_DEBUG_BUILD || + process.env.__NEXT_VERBOSE_LOGGING + ) { + // We don't normally log these errors because we are going to retry anyway but + // it can be useful for debugging Next.js itself to get visibility here when needed + printDebugThrownValueForProspectiveRender(err, workStore.route) + } + }, + // We don't need bootstrap scripts in this prerender + // bootstrapScripts: [bootstrapScript], + } + ) + + // The listener to abort our own render controller must be added after React + // has added its listener, to ensure that pending I/O is not + // aborted/rejected too early. + initialClientReactController.signal.addEventListener( + 'abort', + () => { + initialClientRenderController.abort() + }, + { once: true } + ) + + pendingInitialClientResult.catch((err) => { + if ( + initialClientReactController.signal.aborted || + isPrerenderInterruptedError(err) + ) { + // These are expected errors that might error the prerender. we ignore them. + } else if ( + process.env.NEXT_DEBUG_BUILD || + process.env.__NEXT_VERBOSE_LOGGING + ) { + // We don't normally log these errors because we are going to retry anyway but + // it can be useful for debugging Next.js itself to get visibility here when needed + printDebugThrownValueForProspectiveRender(err, workStore.route) + } + }) + + // This is mostly needed for dynamic `import()`s in client components. + // Promises passed to client were already awaited above (assuming that they came from cached functions) + trackPendingModules(cacheSignal) + await cacheSignal.cacheReady() + initialClientReactController.abort() + + // END warmup + + const clientDynamicTracking = createDynamicTrackingState( + false //isDebugDynamicAccesses + ) + const finalClientReactController = new AbortController() + const finalClientRenderController = new AbortController() + + const finalClientPrerenderStore: PrerenderStore = { + type: 'prerender-client', + phase: 'render', + rootParams, + fallbackRouteParams, + implicitTags, + renderSignal: finalClientRenderController.signal, + controller: finalClientReactController, + // No APIs require a cacheSignal through the workUnitStore during the HTML prerender + cacheSignal: null, + dynamicTracking: clientDynamicTracking, + allowEmptyStaticShell, + revalidate: INFINITE_CACHE, + expire: INFINITE_CACHE, + stale: INFINITE_CACHE, + tags: [...implicitTags.tags], + prerenderResumeDataCache: null, + renderResumeDataCache: null, + hmrRefreshHash, + captureOwnerStack: captureOwnerStackClient, + } + + let runtimeDynamicValidation = createDynamicValidationState() + + try { + let { prelude: unprocessedPrelude } = + await prerenderAndAbortInSequentialTasks( + () => { + const pendingFinalClientResult = workUnitAsyncStorage.run( + finalClientPrerenderStore, + prerender, + // eslint-disable-next-line @next/internal/no-ambiguous-jsx -- React Client + , + { + signal: finalClientReactController.signal, + onError: (err: unknown, errorInfo: ErrorInfo) => { + if ( + isPrerenderInterruptedError(err) || + finalClientReactController.signal.aborted + ) { + const componentStack = errorInfo.componentStack + if (typeof componentStack === 'string') { + trackAllowedDynamicAccess( + workStore, + componentStack, + runtimeDynamicValidation, + clientDynamicTracking + ) + } + return + } + + if (isReactLargeShellError(err)) { + // TODO: Aggregate + console.error(err) + return undefined + } + + return getDigestForWellKnownError(err) + }, + // We don't need bootstrap scripts in this prerender + // bootstrapScripts: [bootstrapScript], + } + ) + + // The listener to abort our own render controller must be added after + // React has added its listener, to ensure that pending I/O is not + // aborted/rejected too early. + finalClientReactController.signal.addEventListener( + 'abort', + () => { + finalClientRenderController.abort() + }, + { once: true } + ) + + return pendingFinalClientResult + }, + () => { + finalClientReactController.abort() + } + ) + + const DUMMY_SERVER_DYNAMIC_TRACKING_REMOVE_ME = createDynamicTrackingState( + false // isDebugDynamicAccesses + ) + + const { preludeIsEmpty } = await processPrelude(unprocessedPrelude) + resolveValidation( + createElement(LogSafely, { + fn: throwIfDisallowedDynamic.bind( + null, + workStore, + preludeIsEmpty ? PreludeState.Empty : PreludeState.Full, + runtimeDynamicValidation, + DUMMY_SERVER_DYNAMIC_TRACKING_REMOVE_ME + ), + }) + ) + } catch (thrownValue) { + // Even if the root errors we still want to report any cache components errors + // that were discovered before the root errored. + + const DUMMY_SERVER_DYNAMIC_TRACKING_REMOVE_ME = createDynamicTrackingState( + false // isDebugDynamicAccesses + ) + let loggingFunction = throwIfDisallowedDynamic.bind( + null, + workStore, + PreludeState.Errored, + runtimeDynamicValidation, + DUMMY_SERVER_DYNAMIC_TRACKING_REMOVE_ME + ) + + if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) { + // We don't normally log these errors because we are going to retry anyway but + // it can be useful for debugging Next.js itself to get visibility here when needed + const originalLoggingFunction = loggingFunction + loggingFunction = () => { + console.error( + 'During dynamic validation the root of the page errored. The next logged error is the thrown value. It may be a duplicate of errors reported during the normal development mode render.' + ) + console.error(thrownValue) + originalLoggingFunction() + } + } + + resolveValidation( + createElement(LogSafely, { + fn: loggingFunction, + }) + ) + } +} + /** * This function is a fork of prerenderToStream cacheComponents branch. * While it doesn't return a stream we want it to have identical @@ -5449,3 +5880,15 @@ function WarnForBypassCachesInDev({ route }: { route: string }) { ) return null } + +export function unclosingStreamFromChunks( + chunks: Uint8Array[] +): ReadableStream { + return new ReadableStream({ + start(controller) { + for (const chunk of chunks) { + controller.enqueue(chunk) + } + }, + }) +} diff --git a/test/e2e/app-dir/hello-world/app/page.tsx b/test/e2e/app-dir/hello-world/app/page.tsx index ff7159d9149fee..89c4118f39237d 100644 --- a/test/e2e/app-dir/hello-world/app/page.tsx +++ b/test/e2e/app-dir/hello-world/app/page.tsx @@ -1,3 +1,35 @@ +import { cookies } from 'next/headers' +import { Suspense } from 'react' + export default function Page() { - return

hello world

+ return ( +
+
this is static
+ + + + + + + + + +
+ ) +} + +async function Dynamic() { + await fetch('https://example.com') + return

hello dynamic

+} + +async function Runtime() { + await cookies() + return

hello runtime

+} + +async function Cached() { + 'use cache' + await new Promise((resolve) => setTimeout(resolve, 1000)) + return

hello cached

} diff --git a/test/e2e/app-dir/hello-world/next.config.js b/test/e2e/app-dir/hello-world/next.config.js index 807126e4cf0bf5..12698eac9903db 100644 --- a/test/e2e/app-dir/hello-world/next.config.js +++ b/test/e2e/app-dir/hello-world/next.config.js @@ -1,6 +1,11 @@ /** * @type {import('next').NextConfig} */ -const nextConfig = {} +const nextConfig = { + cacheComponents: true, + experimental: { + reactDebugChannel: true, + }, +} module.exports = nextConfig