Auto-commit 2026-04-29 16:31

This commit is contained in:
2026-04-29 16:31:27 -04:00
parent e8687bb6b2
commit 0495ee5bd2
19691 changed files with 3272886 additions and 138 deletions

View File

@@ -0,0 +1 @@
export * from 'react-dom/server.edge'

16
node_modules/next/dist/server/ReactDOMServerPages.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
"use strict";
let ReactDOMServer;
try {
// TODO: Use Node.js build unless we're in an Edge runtime.
ReactDOMServer = require('react-dom/server.edge');
} catch (error) {
if (error.code !== 'MODULE_NOT_FOUND' && error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED') {
throw error;
}
// In React versions without react-dom/server.edge, the browser build works in Node.js.
// The Node.js build does not support renderToReadableStream.
ReactDOMServer = require('react-dom/server.browser');
}
module.exports = ReactDOMServer;
//# sourceMappingURL=ReactDOMServerPages.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../src/server/ReactDOMServerPages.js"],"sourcesContent":["let ReactDOMServer\n\ntry {\n // TODO: Use Node.js build unless we're in an Edge runtime.\n ReactDOMServer = require('react-dom/server.edge')\n} catch (error) {\n if (\n error.code !== 'MODULE_NOT_FOUND' &&\n error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED'\n ) {\n throw error\n }\n // In React versions without react-dom/server.edge, the browser build works in Node.js.\n // The Node.js build does not support renderToReadableStream.\n ReactDOMServer = require('react-dom/server.browser')\n}\n\nmodule.exports = ReactDOMServer\n"],"names":["ReactDOMServer","require","error","code","module","exports"],"mappings":";AAAA,IAAIA;AAEJ,IAAI;IACF,2DAA2D;IAC3DA,iBAAiBC,QAAQ;AAC3B,EAAE,OAAOC,OAAO;IACd,IACEA,MAAMC,IAAI,KAAK,sBACfD,MAAMC,IAAI,KAAK,iCACf;QACA,MAAMD;IACR;IACA,uFAAuF;IACvF,6DAA6D;IAC7DF,iBAAiBC,QAAQ;AAC3B;AAEAG,OAAOC,OAAO,GAAGL","ignoreList":[0]}

1
node_modules/next/dist/server/accept-header.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export declare function acceptLanguage(header?: string, preferences?: readonly string[]): string;

132
node_modules/next/dist/server/accept-header.js generated vendored Normal file
View File

@@ -0,0 +1,132 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "acceptLanguage", {
enumerable: true,
get: function() {
return acceptLanguage;
}
});
function parse(raw, preferences, options) {
const lowers = new Map();
const header = raw.replace(/[ \t]/g, '');
if (preferences) {
let pos = 0;
for (const preference of preferences){
const lower = preference.toLowerCase();
lowers.set(lower, {
orig: preference,
pos: pos++
});
if (options.prefixMatch) {
const parts = lower.split('-');
while(parts.pop(), parts.length > 0){
const joined = parts.join('-');
if (!lowers.has(joined)) {
lowers.set(joined, {
orig: preference,
pos: pos++
});
}
}
}
}
}
const parts = header.split(',');
const selections = [];
const map = new Set();
for(let i = 0; i < parts.length; ++i){
const part = parts[i];
if (!part) {
continue;
}
const params = part.split(';');
if (params.length > 2) {
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
value: "E77",
enumerable: false,
configurable: true
});
}
let token = params[0].toLowerCase();
if (!token) {
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
value: "E77",
enumerable: false,
configurable: true
});
}
const selection = {
token,
pos: i,
q: 1
};
if (preferences && lowers.has(token)) {
selection.pref = lowers.get(token).pos;
}
map.add(selection.token);
if (params.length === 2) {
const q = params[1];
const [key, value] = q.split('=');
if (!value || key !== 'q' && key !== 'Q') {
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
value: "E77",
enumerable: false,
configurable: true
});
}
const score = parseFloat(value);
if (score === 0) {
continue;
}
if (Number.isFinite(score) && score <= 1 && score >= 0.001) {
selection.q = score;
}
}
selections.push(selection);
}
selections.sort((a, b)=>{
if (b.q !== a.q) {
return b.q - a.q;
}
if (b.pref !== a.pref) {
if (a.pref === undefined) {
return 1;
}
if (b.pref === undefined) {
return -1;
}
return a.pref - b.pref;
}
return a.pos - b.pos;
});
const values = selections.map((selection)=>selection.token);
if (!preferences || !preferences.length) {
return values;
}
const preferred = [];
for (const selection of values){
if (selection === '*') {
for (const [preference, value] of lowers){
if (!map.has(preference)) {
preferred.push(value.orig);
}
}
} else {
const lower = selection.toLowerCase();
if (lowers.has(lower)) {
preferred.push(lowers.get(lower).orig);
}
}
}
return preferred;
}
function acceptLanguage(header = '', preferences) {
return parse(header, preferences, {
type: 'accept-language',
prefixMatch: true
})[0] || '';
}
//# sourceMappingURL=accept-header.js.map

1
node_modules/next/dist/server/accept-header.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

21
node_modules/next/dist/server/after/after-context.d.ts generated vendored Normal file
View File

@@ -0,0 +1,21 @@
import type { RequestLifecycleOpts } from '../base-server';
import type { AfterTask } from './after';
export type AfterContextOpts = {
waitUntil: RequestLifecycleOpts['waitUntil'] | undefined;
onClose: RequestLifecycleOpts['onClose'];
onTaskError: RequestLifecycleOpts['onAfterTaskError'] | undefined;
};
export declare class AfterContext {
private waitUntil;
private onClose;
private onTaskError;
private runCallbacksOnClosePromise;
private callbackQueue;
private workUnitStores;
constructor({ waitUntil, onClose, onTaskError }: AfterContextOpts);
after(task: AfterTask): void;
private addCallback;
private runCallbacksOnClose;
private runCallbacks;
private reportTaskError;
}

140
node_modules/next/dist/server/after/after-context.js generated vendored Normal file
View File

@@ -0,0 +1,140 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "AfterContext", {
enumerable: true,
get: function() {
return AfterContext;
}
});
const _pqueue = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/p-queue"));
const _invarianterror = require("../../shared/lib/invariant-error");
const _isthenable = require("../../shared/lib/is-thenable");
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
const _revalidationutils = require("../revalidation-utils");
const _asynclocalstorage = require("../app-render/async-local-storage");
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
const _aftertaskasyncstorageexternal = require("../app-render/after-task-async-storage.external");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
class AfterContext {
constructor({ waitUntil, onClose, onTaskError }){
this.workUnitStores = new Set();
this.waitUntil = waitUntil;
this.onClose = onClose;
this.onTaskError = onTaskError;
this.callbackQueue = new _pqueue.default();
this.callbackQueue.pause();
}
after(task) {
if ((0, _isthenable.isThenable)(task)) {
if (!this.waitUntil) {
errorWaitUntilNotAvailable();
}
this.waitUntil(task.catch((error)=>this.reportTaskError('promise', error)));
} else if (typeof task === 'function') {
// TODO(after): implement tracing
this.addCallback(task);
} else {
throw Object.defineProperty(new Error('`after()`: Argument must be a promise or a function'), "__NEXT_ERROR_CODE", {
value: "E50",
enumerable: false,
configurable: true
});
}
}
addCallback(callback) {
// if something is wrong, throw synchronously, bubbling up to the `after` callsite.
if (!this.waitUntil) {
errorWaitUntilNotAvailable();
}
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
this.workUnitStores.add(workUnitStore);
}
const afterTaskStore = _aftertaskasyncstorageexternal.afterTaskAsyncStorage.getStore();
// This is used for checking if request APIs can be called inside `after`.
// Note that we need to check the phase in which the *topmost* `after` was called (which should be "action"),
// not the current phase (which might be "after" if we're in a nested after).
// Otherwise, we might allow `after(() => headers())`, but not `after(() => after(() => headers()))`.
const rootTaskSpawnPhase = afterTaskStore ? afterTaskStore.rootTaskSpawnPhase // nested after
: workUnitStore == null ? void 0 : workUnitStore.phase // topmost after
;
// this should only happen once.
if (!this.runCallbacksOnClosePromise) {
this.runCallbacksOnClosePromise = this.runCallbacksOnClose();
this.waitUntil(this.runCallbacksOnClosePromise);
}
// Bind the callback to the current execution context (i.e. preserve all currently available ALS-es).
// We do this because we want all of these to be equivalent in every regard except timing:
// after(() => x())
// after(x())
// await x()
const wrappedCallback = (0, _asynclocalstorage.bindSnapshot)(// WARNING: Don't make this a named function. It must be anonymous.
// See: https://github.com/facebook/react/pull/34911
async ()=>{
try {
await _aftertaskasyncstorageexternal.afterTaskAsyncStorage.run({
rootTaskSpawnPhase
}, ()=>callback());
} catch (error) {
this.reportTaskError('function', error);
}
});
this.callbackQueue.add(wrappedCallback);
}
async runCallbacksOnClose() {
await new Promise((resolve)=>this.onClose(resolve));
return this.runCallbacks();
}
async runCallbacks() {
if (this.callbackQueue.size === 0) return;
for (const workUnitStore of this.workUnitStores){
workUnitStore.phase = 'after';
}
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
if (!workStore) {
throw Object.defineProperty(new _invarianterror.InvariantError('Missing workStore in AfterContext.runCallbacks'), "__NEXT_ERROR_CODE", {
value: "E547",
enumerable: false,
configurable: true
});
}
return (0, _revalidationutils.withExecuteRevalidates)(workStore, ()=>{
this.callbackQueue.start();
return this.callbackQueue.onIdle();
});
}
reportTaskError(taskKind, error) {
// TODO(after): this is fine for now, but will need better intergration with our error reporting.
// TODO(after): should we log this if we have a onTaskError callback?
console.error(taskKind === 'promise' ? `A promise passed to \`after()\` rejected:` : `An error occurred in a function passed to \`after()\`:`, error);
if (this.onTaskError) {
// this is very defensive, but we really don't want anything to blow up in an error handler
try {
this.onTaskError == null ? void 0 : this.onTaskError.call(this, error);
} catch (handlerError) {
console.error(Object.defineProperty(new _invarianterror.InvariantError('`onTaskError` threw while handling an error thrown from an `after` task', {
cause: handlerError
}), "__NEXT_ERROR_CODE", {
value: "E569",
enumerable: false,
configurable: true
}));
}
}
}
}
function errorWaitUntilNotAvailable() {
throw Object.defineProperty(new Error('`after()` will not work correctly, because `waitUntil` is not available in the current environment.'), "__NEXT_ERROR_CODE", {
value: "E91",
enumerable: false,
configurable: true
});
}
//# sourceMappingURL=after-context.js.map

File diff suppressed because one or more lines are too long

6
node_modules/next/dist/server/after/after.d.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
export type AfterTask<T = unknown> = Promise<T> | AfterCallback<T>;
export type AfterCallback<T = unknown> = () => T | Promise<T>;
/**
* This function allows you to schedule callbacks to be executed after the current request finishes.
*/
export declare function after<T>(task: AfterTask<T>): void;

26
node_modules/next/dist/server/after/after.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "after", {
enumerable: true,
get: function() {
return after;
}
});
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
function after(task) {
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
if (!workStore) {
// TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore
throw Object.defineProperty(new Error('`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'), "__NEXT_ERROR_CODE", {
value: "E468",
enumerable: false,
configurable: true
});
}
const { afterContext } = workStore;
return afterContext.after(task);
}
//# sourceMappingURL=after.js.map

1
node_modules/next/dist/server/after/after.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/after/after.ts"],"sourcesContent":["import { workAsyncStorage } from '../app-render/work-async-storage.external'\n\nexport type AfterTask<T = unknown> = Promise<T> | AfterCallback<T>\nexport type AfterCallback<T = unknown> = () => T | Promise<T>\n\n/**\n * This function allows you to schedule callbacks to be executed after the current request finishes.\n */\nexport function after<T>(task: AfterTask<T>): void {\n const workStore = workAsyncStorage.getStore()\n\n if (!workStore) {\n // TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore\n throw new Error(\n '`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'\n )\n }\n\n const { afterContext } = workStore\n return afterContext.after(task)\n}\n"],"names":["after","task","workStore","workAsyncStorage","getStore","Error","afterContext"],"mappings":";;;;+BAQgBA;;;eAAAA;;;0CARiB;AAQ1B,SAASA,MAASC,IAAkB;IACzC,MAAMC,YAAYC,0CAAgB,CAACC,QAAQ;IAE3C,IAAI,CAACF,WAAW;QACd,kGAAkG;QAClG,MAAM,qBAEL,CAFK,IAAIG,MACR,2HADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,MAAM,EAAEC,YAAY,EAAE,GAAGJ;IACzB,OAAOI,aAAaN,KAAK,CAACC;AAC5B","ignoreList":[0]}

29
node_modules/next/dist/server/after/awaiter.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
/**
* Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).
* Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --
* if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,
* that second promise will also be awaited.
*/
export declare class AwaiterMulti {
private promises;
private onError;
constructor({ onError }?: {
onError?: (error: unknown) => void;
});
waitUntil: (promise: Promise<unknown>) => void;
awaiting(): Promise<void>;
}
/**
* Like {@link AwaiterMulti}, but can only be awaited once.
* If {@link AwaiterOnce.waitUntil} is called after that, it will throw.
*/
export declare class AwaiterOnce {
private awaiter;
private done;
private pending;
constructor(options?: {
onError?: (error: unknown) => void;
});
waitUntil: (promise: Promise<unknown>) => void;
awaiting(): Promise<void>;
}

74
node_modules/next/dist/server/after/awaiter.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
AwaiterMulti: null,
AwaiterOnce: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
AwaiterMulti: function() {
return AwaiterMulti;
},
AwaiterOnce: function() {
return AwaiterOnce;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
class AwaiterMulti {
constructor({ onError } = {}){
this.promises = new Set();
this.waitUntil = (promise)=>{
// if a promise settles before we await it, we should drop it --
// storing them indefinitely could result in a memory leak.
const cleanup = ()=>{
this.promises.delete(promise);
};
promise.then(cleanup, (err)=>{
cleanup();
this.onError(err);
});
this.promises.add(promise);
};
this.onError = onError ?? console.error;
}
async awaiting() {
while(this.promises.size > 0){
const promises = Array.from(this.promises);
this.promises.clear();
await Promise.allSettled(promises);
}
}
}
class AwaiterOnce {
constructor(options = {}){
this.done = false;
this.waitUntil = (promise)=>{
if (this.done) {
throw Object.defineProperty(new _invarianterror.InvariantError('Cannot call waitUntil() on an AwaiterOnce that was already awaited'), "__NEXT_ERROR_CODE", {
value: "E563",
enumerable: false,
configurable: true
});
}
return this.awaiter.waitUntil(promise);
};
this.awaiter = new AwaiterMulti(options);
}
async awaiting() {
if (!this.pending) {
this.pending = this.awaiter.awaiting().finally(()=>{
this.done = true;
});
}
return this.pending;
}
}
//# sourceMappingURL=awaiter.js.map

1
node_modules/next/dist/server/after/awaiter.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/after/awaiter.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\n\n/**\n * Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).\n * Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --\n * if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,\n * that second promise will also be awaited.\n */\nexport class AwaiterMulti {\n private promises: Set<Promise<unknown>> = new Set()\n private onError: (error: unknown) => void\n\n constructor({ onError }: { onError?: (error: unknown) => void } = {}) {\n this.onError = onError ?? console.error\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n // if a promise settles before we await it, we should drop it --\n // storing them indefinitely could result in a memory leak.\n const cleanup = () => {\n this.promises.delete(promise)\n }\n\n promise.then(cleanup, (err) => {\n cleanup()\n this.onError(err)\n })\n\n this.promises.add(promise)\n }\n\n public async awaiting(): Promise<void> {\n while (this.promises.size > 0) {\n const promises = Array.from(this.promises)\n this.promises.clear()\n await Promise.allSettled(promises)\n }\n }\n}\n\n/**\n * Like {@link AwaiterMulti}, but can only be awaited once.\n * If {@link AwaiterOnce.waitUntil} is called after that, it will throw.\n */\nexport class AwaiterOnce {\n private awaiter: AwaiterMulti\n private done: boolean = false\n private pending: Promise<void> | undefined\n\n constructor(options: { onError?: (error: unknown) => void } = {}) {\n this.awaiter = new AwaiterMulti(options)\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n if (this.done) {\n throw new InvariantError(\n 'Cannot call waitUntil() on an AwaiterOnce that was already awaited'\n )\n }\n return this.awaiter.waitUntil(promise)\n }\n\n public async awaiting(): Promise<void> {\n if (!this.pending) {\n this.pending = this.awaiter.awaiting().finally(() => {\n this.done = true\n })\n }\n return this.pending\n }\n}\n"],"names":["AwaiterMulti","AwaiterOnce","constructor","onError","promises","Set","waitUntil","promise","cleanup","delete","then","err","add","console","error","awaiting","size","Array","from","clear","Promise","allSettled","options","done","InvariantError","awaiter","pending","finally"],"mappings":";;;;;;;;;;;;;;;IAQaA,YAAY;eAAZA;;IAoCAC,WAAW;eAAXA;;;gCA5CkB;AAQxB,MAAMD;IAIXE,YAAY,EAAEC,OAAO,EAA0C,GAAG,CAAC,CAAC,CAAE;aAH9DC,WAAkC,IAAIC;aAOvCC,YAAY,CAACC;YAClB,gEAAgE;YAChE,2DAA2D;YAC3D,MAAMC,UAAU;gBACd,IAAI,CAACJ,QAAQ,CAACK,MAAM,CAACF;YACvB;YAEAA,QAAQG,IAAI,CAACF,SAAS,CAACG;gBACrBH;gBACA,IAAI,CAACL,OAAO,CAACQ;YACf;YAEA,IAAI,CAACP,QAAQ,CAACQ,GAAG,CAACL;QACpB;QAhBE,IAAI,CAACJ,OAAO,GAAGA,WAAWU,QAAQC,KAAK;IACzC;IAiBA,MAAaC,WAA0B;QACrC,MAAO,IAAI,CAACX,QAAQ,CAACY,IAAI,GAAG,EAAG;YAC7B,MAAMZ,WAAWa,MAAMC,IAAI,CAAC,IAAI,CAACd,QAAQ;YACzC,IAAI,CAACA,QAAQ,CAACe,KAAK;YACnB,MAAMC,QAAQC,UAAU,CAACjB;QAC3B;IACF;AACF;AAMO,MAAMH;IAKXC,YAAYoB,UAAkD,CAAC,CAAC,CAAE;aAH1DC,OAAgB;aAOjBjB,YAAY,CAACC;YAClB,IAAI,IAAI,CAACgB,IAAI,EAAE;gBACb,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,uEADI,qBAAA;2BAAA;gCAAA;kCAAA;gBAEN;YACF;YACA,OAAO,IAAI,CAACC,OAAO,CAACnB,SAAS,CAACC;QAChC;QAVE,IAAI,CAACkB,OAAO,GAAG,IAAIzB,aAAasB;IAClC;IAWA,MAAaP,WAA0B;QACrC,IAAI,CAAC,IAAI,CAACW,OAAO,EAAE;YACjB,IAAI,CAACA,OAAO,GAAG,IAAI,CAACD,OAAO,CAACV,QAAQ,GAAGY,OAAO,CAAC;gBAC7C,IAAI,CAACJ,IAAI,GAAG;YACd;QACF;QACA,OAAO,IAAI,CAACG,OAAO;IACrB;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,17 @@
export declare function getBuiltinRequestContext(): BuiltinRequestContextValue | undefined;
/** A request context provided by the platform. */
export type BuiltinRequestContext = {
get(): BuiltinRequestContextValue | undefined;
};
export type RunnableBuiltinRequestContext = BuiltinRequestContext & {
run<T>(value: BuiltinRequestContextValue, callback: () => T): T;
};
export type BuiltinRequestContextValue = {
waitUntil?: WaitUntil;
};
export type WaitUntil = (promise: Promise<any>) => void;
/** "@next/request-context" has a different signature from AsyncLocalStorage,
* matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).
* We don't need a full AsyncContext adapter here, just having `.get()` is enough
*/
export declare function createLocalRequestContext(): RunnableBuiltinRequestContext;

View File

@@ -0,0 +1,38 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createLocalRequestContext: null,
getBuiltinRequestContext: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createLocalRequestContext: function() {
return createLocalRequestContext;
},
getBuiltinRequestContext: function() {
return getBuiltinRequestContext;
}
});
const _asynclocalstorage = require("../app-render/async-local-storage");
function getBuiltinRequestContext() {
const _globalThis = globalThis;
const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL];
return ctx == null ? void 0 : ctx.get();
}
const NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context');
function createLocalRequestContext() {
const storage = (0, _asynclocalstorage.createAsyncLocalStorage)();
return {
get: ()=>storage.getStore(),
run: (value, callback)=>storage.run(value, callback)
};
}
//# sourceMappingURL=builtin-request-context.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/after/builtin-request-context.ts"],"sourcesContent":["import { createAsyncLocalStorage } from '../app-render/async-local-storage'\n\nexport function getBuiltinRequestContext():\n | BuiltinRequestContextValue\n | undefined {\n const _globalThis = globalThis as GlobalThisWithRequestContext\n const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL]\n return ctx?.get()\n}\n\nconst NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context')\n\ntype GlobalThisWithRequestContext = typeof globalThis & {\n [NEXT_REQUEST_CONTEXT_SYMBOL]?: BuiltinRequestContext\n}\n\n/** A request context provided by the platform. */\nexport type BuiltinRequestContext = {\n get(): BuiltinRequestContextValue | undefined\n}\n\nexport type RunnableBuiltinRequestContext = BuiltinRequestContext & {\n run<T>(value: BuiltinRequestContextValue, callback: () => T): T\n}\n\nexport type BuiltinRequestContextValue = {\n waitUntil?: WaitUntil\n}\nexport type WaitUntil = (promise: Promise<any>) => void\n\n/** \"@next/request-context\" has a different signature from AsyncLocalStorage,\n * matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).\n * We don't need a full AsyncContext adapter here, just having `.get()` is enough\n */\nexport function createLocalRequestContext(): RunnableBuiltinRequestContext {\n const storage = createAsyncLocalStorage<BuiltinRequestContextValue>()\n return {\n get: () => storage.getStore(),\n run: (value, callback) => storage.run(value, callback),\n }\n}\n"],"names":["createLocalRequestContext","getBuiltinRequestContext","_globalThis","globalThis","ctx","NEXT_REQUEST_CONTEXT_SYMBOL","get","Symbol","for","storage","createAsyncLocalStorage","getStore","run","value","callback"],"mappings":";;;;;;;;;;;;;;;IAkCgBA,yBAAyB;eAAzBA;;IAhCAC,wBAAwB;eAAxBA;;;mCAFwB;AAEjC,SAASA;IAGd,MAAMC,cAAcC;IACpB,MAAMC,MAAMF,WAAW,CAACG,4BAA4B;IACpD,OAAOD,uBAAAA,IAAKE,GAAG;AACjB;AAEA,MAAMD,8BAA8BE,OAAOC,GAAG,CAAC;AAwBxC,SAASR;IACd,MAAMS,UAAUC,IAAAA,0CAAuB;IACvC,OAAO;QACLJ,KAAK,IAAMG,QAAQE,QAAQ;QAC3BC,KAAK,CAACC,OAAOC,WAAaL,QAAQG,GAAG,CAACC,OAAOC;IAC/C;AACF","ignoreList":[0]}

1
node_modules/next/dist/server/after/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export * from './after';

21
node_modules/next/dist/server/after/index.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && __export(require("./after"));
_export_star(require("./after"), exports);
function _export_star(from, to) {
Object.keys(from).forEach(function(k) {
if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
Object.defineProperty(to, k, {
enumerable: true,
get: function() {
return from[k];
}
});
}
});
return from;
}
//# sourceMappingURL=index.js.map

1
node_modules/next/dist/server/after/index.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/after/index.ts"],"sourcesContent":["export * from './after'\n"],"names":[],"mappings":";;;;;qBAAc","ignoreList":[0]}

View File

@@ -0,0 +1,14 @@
import type { AfterContextOpts } from './after-context';
type Ctx = {
waitUntil: NonNullable<AfterContextOpts['waitUntil']>;
onClose: NonNullable<AfterContextOpts['onClose']>;
onTaskError: NonNullable<AfterContextOpts['onTaskError']>;
};
export declare class AfterRunner {
private awaiter;
private closeController;
private finishedWithoutErrors;
readonly context: Ctx;
executeAfter(): Promise<void>;
}
export {};

35
node_modules/next/dist/server/after/run-with-after.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "AfterRunner", {
enumerable: true,
get: function() {
return AfterRunner;
}
});
const _detachedpromise = require("../../lib/detached-promise");
const _webonclose = require("../web/web-on-close");
const _awaiter = require("./awaiter");
class AfterRunner {
async executeAfter() {
this.closeController.dispatchClose();
await this.awaiter.awaiting();
// if we got an error while running the callbacks,
// thenthis is a noop, because the promise is already rejected
this.finishedWithoutErrors.resolve();
return this.finishedWithoutErrors.promise;
}
constructor(){
this.awaiter = new _awaiter.AwaiterOnce();
this.closeController = new _webonclose.CloseController();
this.finishedWithoutErrors = new _detachedpromise.DetachedPromise();
this.context = {
waitUntil: this.awaiter.waitUntil.bind(this.awaiter),
onClose: this.closeController.onClose.bind(this.closeController),
onTaskError: (error)=>this.finishedWithoutErrors.reject(error)
};
}
}
//# sourceMappingURL=run-with-after.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/after/run-with-after.ts"],"sourcesContent":["import { DetachedPromise } from '../../lib/detached-promise'\nimport { CloseController } from '../web/web-on-close'\nimport type { AfterContextOpts } from './after-context'\nimport { AwaiterOnce } from './awaiter'\n\ntype Ctx = {\n waitUntil: NonNullable<AfterContextOpts['waitUntil']>\n onClose: NonNullable<AfterContextOpts['onClose']>\n onTaskError: NonNullable<AfterContextOpts['onTaskError']>\n}\n\nexport class AfterRunner {\n private awaiter = new AwaiterOnce()\n private closeController = new CloseController()\n private finishedWithoutErrors = new DetachedPromise<void>()\n\n readonly context: Ctx = {\n waitUntil: this.awaiter.waitUntil.bind(this.awaiter),\n onClose: this.closeController.onClose.bind(this.closeController),\n onTaskError: (error) => this.finishedWithoutErrors.reject(error),\n }\n\n public async executeAfter() {\n this.closeController.dispatchClose()\n await this.awaiter.awaiting()\n\n // if we got an error while running the callbacks,\n // thenthis is a noop, because the promise is already rejected\n this.finishedWithoutErrors.resolve()\n\n return this.finishedWithoutErrors.promise\n }\n}\n"],"names":["AfterRunner","executeAfter","closeController","dispatchClose","awaiter","awaiting","finishedWithoutErrors","resolve","promise","AwaiterOnce","CloseController","DetachedPromise","context","waitUntil","bind","onClose","onTaskError","error","reject"],"mappings":";;;;+BAWaA;;;eAAAA;;;iCAXmB;4BACA;yBAEJ;AAQrB,MAAMA;IAWX,MAAaC,eAAe;QAC1B,IAAI,CAACC,eAAe,CAACC,aAAa;QAClC,MAAM,IAAI,CAACC,OAAO,CAACC,QAAQ;QAE3B,kDAAkD;QAClD,8DAA8D;QAC9D,IAAI,CAACC,qBAAqB,CAACC,OAAO;QAElC,OAAO,IAAI,CAACD,qBAAqB,CAACE,OAAO;IAC3C;;aAnBQJ,UAAU,IAAIK,oBAAW;aACzBP,kBAAkB,IAAIQ,2BAAe;aACrCJ,wBAAwB,IAAIK,gCAAe;aAE1CC,UAAe;YACtBC,WAAW,IAAI,CAACT,OAAO,CAACS,SAAS,CAACC,IAAI,CAAC,IAAI,CAACV,OAAO;YACnDW,SAAS,IAAI,CAACb,eAAe,CAACa,OAAO,CAACD,IAAI,CAAC,IAAI,CAACZ,eAAe;YAC/Dc,aAAa,CAACC,QAAU,IAAI,CAACX,qBAAqB,CAACY,MAAM,CAACD;QAC5D;;AAYF","ignoreList":[0]}

View File

@@ -0,0 +1,8 @@
import type { NextApiRequestCookies } from '.';
/**
* Parse cookies from the `headers` of request
* @param req request object
*/
export declare function getCookieParser(headers: {
[key: string]: string | string[] | null | undefined;
}): () => NextApiRequestCookies;

View File

@@ -0,0 +1,22 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "getCookieParser", {
enumerable: true,
get: function() {
return getCookieParser;
}
});
function getCookieParser(headers) {
return function parseCookie() {
const { cookie } = headers;
if (!cookie) {
return {};
}
const { parse: parseCookieFn } = require('next/dist/compiled/cookie');
return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie);
};
}
//# sourceMappingURL=get-cookie-parser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/api-utils/get-cookie-parser.ts"],"sourcesContent":["import type { NextApiRequestCookies } from '.'\n\n/**\n * Parse cookies from the `headers` of request\n * @param req request object\n */\n\nexport function getCookieParser(headers: {\n [key: string]: string | string[] | null | undefined\n}): () => NextApiRequestCookies {\n return function parseCookie(): NextApiRequestCookies {\n const { cookie } = headers\n\n if (!cookie) {\n return {}\n }\n\n const { parse: parseCookieFn } =\n require('next/dist/compiled/cookie') as typeof import('next/dist/compiled/cookie')\n return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie)\n }\n}\n"],"names":["getCookieParser","headers","parseCookie","cookie","parse","parseCookieFn","require","Array","isArray","join"],"mappings":";;;;+BAOgBA;;;eAAAA;;;AAAT,SAASA,gBAAgBC,OAE/B;IACC,OAAO,SAASC;QACd,MAAM,EAAEC,MAAM,EAAE,GAAGF;QAEnB,IAAI,CAACE,QAAQ;YACX,OAAO,CAAC;QACV;QAEA,MAAM,EAAEC,OAAOC,aAAa,EAAE,GAC5BC,QAAQ;QACV,OAAOD,cAAcE,MAAMC,OAAO,CAACL,UAAUA,OAAOM,IAAI,CAAC,QAAQN;IACnE;AACF","ignoreList":[0]}

65
node_modules/next/dist/server/api-utils/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,65 @@
import type { IncomingMessage } from 'http';
import type { BaseNextRequest } from '../base-http';
import type { NextApiResponse } from '../../shared/lib/utils';
export type NextApiRequestCookies = Partial<{
[key: string]: string;
}>;
export type NextApiRequestQuery = Partial<{
[key: string]: string | string[];
}>;
export type __ApiPreviewProps = {
previewModeId: string;
previewModeEncryptionKey: string;
previewModeSigningKey: string;
};
export declare function wrapApiHandler<T extends (...args: any[]) => any>(page: string, handler: T): T;
/**
*
* @param res response object
* @param statusCode `HTTP` status code of response
*/
export declare function sendStatusCode(res: NextApiResponse, statusCode: number): NextApiResponse<any>;
/**
*
* @param res response object
* @param [statusOrUrl] `HTTP` status code of redirect
* @param url URL of redirect
*/
export declare function redirect(res: NextApiResponse, statusOrUrl: string | number, url?: string): NextApiResponse<any>;
export declare function checkIsOnDemandRevalidate(req: Request | IncomingMessage | BaseNextRequest, previewProps: __ApiPreviewProps): {
isOnDemandRevalidate: boolean;
revalidateOnlyGenerated: boolean;
};
export declare const COOKIE_NAME_PRERENDER_BYPASS = "__prerender_bypass";
export declare const COOKIE_NAME_PRERENDER_DATA = "__next_preview_data";
export declare const RESPONSE_LIMIT_DEFAULT: number;
export declare const SYMBOL_PREVIEW_DATA: unique symbol;
export declare const SYMBOL_CLEARED_COOKIES: unique symbol;
export declare function clearPreviewData<T>(res: NextApiResponse<T>, options?: {
path?: string;
}): NextApiResponse<T>;
/**
* Custom error class
*/
export declare class ApiError extends Error {
readonly statusCode: number;
constructor(statusCode: number, message: string);
}
/**
* Sends error in `response`
* @param res response object
* @param statusCode of response
* @param message of response
*/
export declare function sendError(res: NextApiResponse, statusCode: number, message: string): void;
interface LazyProps {
req: IncomingMessage;
}
/**
* Execute getter function only if its needed
* @param LazyProps `req` and `params` for lazyProp
* @param prop name of property
* @param getter function to get data
*/
export declare function setLazyProp<T>({ req }: LazyProps, prop: string, getter: () => T): void;
export {};

201
node_modules/next/dist/server/api-utils/index.js generated vendored Normal file
View File

@@ -0,0 +1,201 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
ApiError: null,
COOKIE_NAME_PRERENDER_BYPASS: null,
COOKIE_NAME_PRERENDER_DATA: null,
RESPONSE_LIMIT_DEFAULT: null,
SYMBOL_CLEARED_COOKIES: null,
SYMBOL_PREVIEW_DATA: null,
checkIsOnDemandRevalidate: null,
clearPreviewData: null,
redirect: null,
sendError: null,
sendStatusCode: null,
setLazyProp: null,
wrapApiHandler: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
ApiError: function() {
return ApiError;
},
COOKIE_NAME_PRERENDER_BYPASS: function() {
return COOKIE_NAME_PRERENDER_BYPASS;
},
COOKIE_NAME_PRERENDER_DATA: function() {
return COOKIE_NAME_PRERENDER_DATA;
},
RESPONSE_LIMIT_DEFAULT: function() {
return RESPONSE_LIMIT_DEFAULT;
},
SYMBOL_CLEARED_COOKIES: function() {
return SYMBOL_CLEARED_COOKIES;
},
SYMBOL_PREVIEW_DATA: function() {
return SYMBOL_PREVIEW_DATA;
},
checkIsOnDemandRevalidate: function() {
return checkIsOnDemandRevalidate;
},
clearPreviewData: function() {
return clearPreviewData;
},
redirect: function() {
return redirect;
},
sendError: function() {
return sendError;
},
sendStatusCode: function() {
return sendStatusCode;
},
setLazyProp: function() {
return setLazyProp;
},
wrapApiHandler: function() {
return wrapApiHandler;
}
});
const _headers = require("../web/spec-extension/adapters/headers");
const _constants = require("../../lib/constants");
const _tracer = require("../lib/trace/tracer");
const _constants1 = require("../lib/trace/constants");
function wrapApiHandler(page, handler) {
return (...args)=>{
(0, _tracer.getTracer)().setRootSpanAttribute('next.route', page);
// Call API route method
return (0, _tracer.getTracer)().trace(_constants1.NodeSpan.runHandler, {
spanName: `executing api route (pages) ${page}`
}, ()=>handler(...args));
};
}
function sendStatusCode(res, statusCode) {
res.statusCode = statusCode;
return res;
}
function redirect(res, statusOrUrl, url) {
if (typeof statusOrUrl === 'string') {
url = statusOrUrl;
statusOrUrl = 307;
}
if (typeof statusOrUrl !== 'number' || typeof url !== 'string') {
throw Object.defineProperty(new Error(`Invalid redirect arguments. Please use a single argument URL, e.g. res.redirect('/destination') or use a status code and URL, e.g. res.redirect(307, '/destination').`), "__NEXT_ERROR_CODE", {
value: "E389",
enumerable: false,
configurable: true
});
}
res.writeHead(statusOrUrl, {
Location: url
});
res.write(url);
res.end();
return res;
}
function checkIsOnDemandRevalidate(req, previewProps) {
const headers = _headers.HeadersAdapter.from(req.headers);
const previewModeId = headers.get(_constants.PRERENDER_REVALIDATE_HEADER);
const isOnDemandRevalidate = previewModeId === previewProps.previewModeId;
const revalidateOnlyGenerated = headers.has(_constants.PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER);
return {
isOnDemandRevalidate,
revalidateOnlyGenerated
};
}
const COOKIE_NAME_PRERENDER_BYPASS = `__prerender_bypass`;
const COOKIE_NAME_PRERENDER_DATA = `__next_preview_data`;
const RESPONSE_LIMIT_DEFAULT = 4 * 1024 * 1024;
const SYMBOL_PREVIEW_DATA = Symbol(COOKIE_NAME_PRERENDER_DATA);
const SYMBOL_CLEARED_COOKIES = Symbol(COOKIE_NAME_PRERENDER_BYPASS);
function clearPreviewData(res, options = {}) {
if (SYMBOL_CLEARED_COOKIES in res) {
return res;
}
const { serialize } = require('next/dist/compiled/cookie');
const previous = res.getHeader('Set-Cookie');
res.setHeader(`Set-Cookie`, [
...typeof previous === 'string' ? [
previous
] : Array.isArray(previous) ? previous : [],
serialize(COOKIE_NAME_PRERENDER_BYPASS, '', {
// To delete a cookie, set `expires` to a date in the past:
// https://tools.ietf.org/html/rfc6265#section-4.1.1
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
expires: new Date(0),
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.path !== undefined ? {
path: options.path
} : undefined
}),
serialize(COOKIE_NAME_PRERENDER_DATA, '', {
// To delete a cookie, set `expires` to a date in the past:
// https://tools.ietf.org/html/rfc6265#section-4.1.1
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
expires: new Date(0),
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.path !== undefined ? {
path: options.path
} : undefined
})
]);
Object.defineProperty(res, SYMBOL_CLEARED_COOKIES, {
value: true,
enumerable: false
});
return res;
}
class ApiError extends Error {
constructor(statusCode, message){
super(message);
this.statusCode = statusCode;
}
}
function sendError(res, statusCode, message) {
res.statusCode = statusCode;
res.statusMessage = message;
res.end(message);
}
function setLazyProp({ req }, prop, getter) {
const opts = {
configurable: true,
enumerable: true
};
const optsReset = {
...opts,
writable: true
};
Object.defineProperty(req, prop, {
...opts,
get: ()=>{
const value = getter();
// we set the property on the object to avoid recalculating it
Object.defineProperty(req, prop, {
...optsReset,
value
});
return value;
},
set: (value)=>{
Object.defineProperty(req, prop, {
...optsReset,
value
});
}
});
}
//# sourceMappingURL=index.js.map

1
node_modules/next/dist/server/api-utils/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,14 @@
import type { IncomingMessage, ServerResponse } from 'http';
import type { __ApiPreviewProps } from '../.';
import type { RevalidateFn } from '../../lib/router-utils/router-server-context';
import type { InstrumentationOnRequestError } from '../../instrumentation/types';
type ApiContext = __ApiPreviewProps & {
trustHostHeader?: boolean;
allowedRevalidateHeaderKeys?: string[];
hostname?: string;
multiZoneDraftMode?: boolean;
dev: boolean;
internalRevalidate?: RevalidateFn;
};
export declare function apiResolver(req: IncomingMessage, res: ServerResponse, query: any, resolverModule: any, apiContext: ApiContext, propagateError: boolean, dev?: boolean, page?: string, onError?: InstrumentationOnRequestError): Promise<void>;
export {};

View File

@@ -0,0 +1,395 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "apiResolver", {
enumerable: true,
get: function() {
return apiResolver;
}
});
const _bytes = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/bytes"));
const _etag = require("../../lib/etag");
const _sendpayload = require("../../send-payload");
const _stream = require("stream");
const _iserror = /*#__PURE__*/ _interop_require_default(require("../../../lib/is-error"));
const _utils = require("../../../shared/lib/utils");
const _interopdefault = require("../../../lib/interop-default");
const _index = require("./../index");
const _getcookieparser = require("./../get-cookie-parser");
const _constants = require("../../../lib/constants");
const _trygetpreviewdata = require("./try-get-preview-data");
const _parsebody = require("./parse-body");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function getMaxContentLength(responseLimit) {
if (responseLimit && typeof responseLimit !== 'boolean') {
return _bytes.default.parse(responseLimit);
}
return _index.RESPONSE_LIMIT_DEFAULT;
}
/**
* Send `any` body to response
* @param req request object
* @param res response object
* @param body of response
*/ function sendData(req, res, body) {
if (body === null || body === undefined) {
res.end();
return;
}
// strip irrelevant headers/body
if (res.statusCode === 204 || res.statusCode === 304) {
res.removeHeader('Content-Type');
res.removeHeader('Content-Length');
res.removeHeader('Transfer-Encoding');
if (process.env.NODE_ENV === 'development' && body) {
console.warn(`A body was attempted to be set with a 204 statusCode for ${req.url}, this is invalid and the body was ignored.\n` + `See more info here https://nextjs.org/docs/messages/invalid-api-status-body`);
}
res.end();
return;
}
const contentType = res.getHeader('Content-Type');
if (body instanceof _stream.Stream) {
if (!contentType) {
res.setHeader('Content-Type', 'application/octet-stream');
}
body.pipe(res);
return;
}
const isJSONLike = [
'object',
'number',
'boolean'
].includes(typeof body);
const stringifiedBody = isJSONLike ? JSON.stringify(body) : body;
const etag = (0, _etag.generateETag)(stringifiedBody);
if ((0, _sendpayload.sendEtagResponse)(req, res, etag)) {
return;
}
if (Buffer.isBuffer(body)) {
if (!contentType) {
res.setHeader('Content-Type', 'application/octet-stream');
}
res.setHeader('Content-Length', body.length);
res.end(body);
return;
}
if (isJSONLike) {
res.setHeader('Content-Type', _constants.JSON_CONTENT_TYPE_HEADER);
}
res.setHeader('Content-Length', Buffer.byteLength(stringifiedBody));
res.end(stringifiedBody);
}
/**
* Send `JSON` object
* @param res response object
* @param jsonBody of data
*/ function sendJson(res, jsonBody) {
// Set header to application/json
res.setHeader('Content-Type', _constants.JSON_CONTENT_TYPE_HEADER);
// Use send to handle request
res.send(JSON.stringify(jsonBody));
}
function isValidData(str) {
return typeof str === 'string' && str.length >= 16;
}
function setDraftMode(res, options) {
if (!isValidData(options.previewModeId)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
value: "E169",
enumerable: false,
configurable: true
});
}
const expires = options.enable ? undefined : new Date(0);
// To delete a cookie, set `expires` to a date in the past:
// https://tools.ietf.org/html/rfc6265#section-4.1.1
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
const { serialize } = require('next/dist/compiled/cookie');
const previous = res.getHeader('Set-Cookie');
res.setHeader(`Set-Cookie`, [
...typeof previous === 'string' ? [
previous
] : Array.isArray(previous) ? previous : [],
serialize(_index.COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
expires
})
]);
return res;
}
function setPreviewData(res, data, options) {
if (!isValidData(options.previewModeId)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
value: "E169",
enumerable: false,
configurable: true
});
}
if (!isValidData(options.previewModeEncryptionKey)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeEncryptionKey'), "__NEXT_ERROR_CODE", {
value: "E334",
enumerable: false,
configurable: true
});
}
if (!isValidData(options.previewModeSigningKey)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeSigningKey'), "__NEXT_ERROR_CODE", {
value: "E436",
enumerable: false,
configurable: true
});
}
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
const { encryptWithSecret } = require('../../crypto-utils');
const payload = jsonwebtoken.sign({
data: encryptWithSecret(Buffer.from(options.previewModeEncryptionKey), JSON.stringify(data))
}, options.previewModeSigningKey, {
algorithm: 'HS256',
...options.maxAge !== undefined ? {
expiresIn: options.maxAge
} : undefined
});
// limit preview mode cookie to 2KB since we shouldn't store too much
// data here and browsers drop cookies over 4KB
if (payload.length > 2048) {
throw Object.defineProperty(new Error(`Preview data is limited to 2KB currently, reduce how much data you are storing as preview data to continue`), "__NEXT_ERROR_CODE", {
value: "E465",
enumerable: false,
configurable: true
});
}
const { serialize } = require('next/dist/compiled/cookie');
const previous = res.getHeader('Set-Cookie');
res.setHeader(`Set-Cookie`, [
...typeof previous === 'string' ? [
previous
] : Array.isArray(previous) ? previous : [],
serialize(_index.COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.maxAge !== undefined ? {
maxAge: options.maxAge
} : undefined,
...options.path !== undefined ? {
path: options.path
} : undefined
}),
serialize(_index.COOKIE_NAME_PRERENDER_DATA, payload, {
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.maxAge !== undefined ? {
maxAge: options.maxAge
} : undefined,
...options.path !== undefined ? {
path: options.path
} : undefined
})
]);
return res;
}
async function revalidate(urlPath, opts, req, context) {
if (typeof urlPath !== 'string' || !urlPath.startsWith('/')) {
throw Object.defineProperty(new Error(`Invalid urlPath provided to revalidate(), must be a path e.g. /blog/post-1, received ${urlPath}`), "__NEXT_ERROR_CODE", {
value: "E153",
enumerable: false,
configurable: true
});
}
const headers = {
[_constants.PRERENDER_REVALIDATE_HEADER]: context.previewModeId,
...opts.unstable_onlyGenerated ? {
[_constants.PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER]: '1'
} : {}
};
const allowedRevalidateHeaderKeys = [
...context.allowedRevalidateHeaderKeys || []
];
if (context.trustHostHeader || context.dev) {
allowedRevalidateHeaderKeys.push('cookie');
}
if (context.trustHostHeader) {
allowedRevalidateHeaderKeys.push('x-vercel-protection-bypass');
}
for (const key of Object.keys(req.headers)){
if (allowedRevalidateHeaderKeys.includes(key)) {
headers[key] = req.headers[key];
}
}
const internalRevalidate = context.internalRevalidate;
try {
// We use the revalidate in router-server if available.
// If we are operating without router-server (serverless)
// we must go through network layer with fetch request
if (internalRevalidate) {
return await internalRevalidate({
urlPath,
headers,
opts
});
}
if (context.trustHostHeader) {
const res = await fetch(`https://${req.headers.host}${urlPath}`, {
method: 'HEAD',
headers
});
// we use the cache header to determine successful revalidate as
// a non-200 status code can be returned from a successful revalidate
// e.g. notFound: true returns 404 status code but is successful
const cacheHeader = res.headers.get('x-vercel-cache') || res.headers.get('x-nextjs-cache');
if ((cacheHeader == null ? void 0 : cacheHeader.toUpperCase()) !== 'REVALIDATED' && res.status !== 200 && !(res.status === 404 && opts.unstable_onlyGenerated)) {
throw Object.defineProperty(new Error(`Invalid response ${res.status}`), "__NEXT_ERROR_CODE", {
value: "E175",
enumerable: false,
configurable: true
});
}
} else {
throw Object.defineProperty(new Error(`Invariant: missing internal router-server-methods this is an internal bug`), "__NEXT_ERROR_CODE", {
value: "E676",
enumerable: false,
configurable: true
});
}
} catch (err) {
throw Object.defineProperty(new Error(`Failed to revalidate ${urlPath}: ${(0, _iserror.default)(err) ? err.message : err}`), "__NEXT_ERROR_CODE", {
value: "E240",
enumerable: false,
configurable: true
});
}
}
async function apiResolver(req, res, query, resolverModule, apiContext, propagateError, dev, page, onError) {
const apiReq = req;
const apiRes = res;
try {
var _config_api, _config_api1, _config_api2;
if (!resolverModule) {
res.statusCode = 404;
res.end('Not Found');
return;
}
const config = resolverModule.config || {};
const bodyParser = ((_config_api = config.api) == null ? void 0 : _config_api.bodyParser) !== false;
const responseLimit = ((_config_api1 = config.api) == null ? void 0 : _config_api1.responseLimit) ?? true;
const externalResolver = ((_config_api2 = config.api) == null ? void 0 : _config_api2.externalResolver) || false;
// Parsing of cookies
(0, _index.setLazyProp)({
req: apiReq
}, 'cookies', (0, _getcookieparser.getCookieParser)(req.headers));
// Ensure req.query is a writable, enumerable property by using Object.defineProperty.
// This addresses Express 5.x, which defines query as a getter only (read-only).
Object.defineProperty(apiReq, 'query', {
value: {
...query
},
writable: true,
enumerable: true,
configurable: true
});
// Parsing preview data
(0, _index.setLazyProp)({
req: apiReq
}, 'previewData', ()=>(0, _trygetpreviewdata.tryGetPreviewData)(req, res, apiContext, !!apiContext.multiZoneDraftMode));
// Checking if preview mode is enabled
(0, _index.setLazyProp)({
req: apiReq
}, 'preview', ()=>apiReq.previewData !== false ? true : undefined);
// Set draftMode to the same value as preview
(0, _index.setLazyProp)({
req: apiReq
}, 'draftMode', ()=>apiReq.preview);
// Parsing of body
if (bodyParser && !apiReq.body) {
apiReq.body = await (0, _parsebody.parseBody)(apiReq, config.api && config.api.bodyParser && config.api.bodyParser.sizeLimit ? config.api.bodyParser.sizeLimit : '1mb');
}
let contentLength = 0;
const maxContentLength = getMaxContentLength(responseLimit);
const writeData = apiRes.write;
const endResponse = apiRes.end;
apiRes.write = (...args)=>{
contentLength += Buffer.byteLength(args[0] || '');
return writeData.apply(apiRes, args);
};
apiRes.end = (...args)=>{
if (args.length && typeof args[0] !== 'function') {
contentLength += Buffer.byteLength(args[0] || '');
}
if (responseLimit && contentLength >= maxContentLength) {
console.warn(`API response for ${req.url} exceeds ${_bytes.default.format(maxContentLength)}. API Routes are meant to respond quickly. https://nextjs.org/docs/messages/api-routes-response-size-limit`);
}
return endResponse.apply(apiRes, args);
};
apiRes.status = (statusCode)=>(0, _index.sendStatusCode)(apiRes, statusCode);
apiRes.send = (data)=>sendData(apiReq, apiRes, data);
apiRes.json = (data)=>sendJson(apiRes, data);
apiRes.redirect = (statusOrUrl, url)=>(0, _index.redirect)(apiRes, statusOrUrl, url);
apiRes.setDraftMode = (options = {
enable: true
})=>setDraftMode(apiRes, Object.assign({}, apiContext, options));
apiRes.setPreviewData = (data, options = {})=>setPreviewData(apiRes, data, Object.assign({}, apiContext, options));
apiRes.clearPreviewData = (options = {})=>(0, _index.clearPreviewData)(apiRes, options);
apiRes.revalidate = (urlPath, opts)=>revalidate(urlPath, opts || {}, req, apiContext);
const resolver = (0, _interopdefault.interopDefault)(resolverModule);
let wasPiped = false;
if (process.env.NODE_ENV !== 'production') {
// listen for pipe event and don't show resolve warning
res.once('pipe', ()=>wasPiped = true);
}
const apiRouteResult = await resolver(req, res);
if (process.env.NODE_ENV !== 'production') {
if (typeof apiRouteResult !== 'undefined') {
if (apiRouteResult instanceof Response) {
throw Object.defineProperty(new Error('API route returned a Response object in the Node.js runtime, this is not supported. Please use `runtime: "edge"` instead: https://nextjs.org/docs/api-routes/edge-api-routes'), "__NEXT_ERROR_CODE", {
value: "E36",
enumerable: false,
configurable: true
});
}
console.warn(`API handler should not return a value, received ${typeof apiRouteResult}.`);
}
if (!externalResolver && !(0, _utils.isResSent)(res) && !wasPiped) {
console.warn(`API resolved without sending a response for ${req.url}, this may result in stalled requests.`);
}
}
} catch (err) {
await (onError == null ? void 0 : onError(err, {
method: req.method || 'GET',
headers: req.headers,
path: req.url || '/'
}, {
routerKind: 'Pages Router',
routePath: page || '',
routeType: 'route',
revalidateReason: undefined
}));
if (err instanceof _index.ApiError) {
(0, _index.sendError)(apiRes, err.statusCode, err.message);
} else {
if (dev) {
if ((0, _iserror.default)(err)) {
err.page = page;
}
throw err;
}
console.error(err);
if (propagateError) {
throw err;
}
(0, _index.sendError)(apiRes, 500, 'Internal Server Error');
}
}
}
//# sourceMappingURL=api-resolver.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,7 @@
import type { IncomingMessage } from 'http';
import type { SizeLimit } from '../../../types';
/**
* Parse incoming message like `json` or `urlencoded`
* @param req request object
*/
export declare function parseBody(req: IncomingMessage, limit: SizeLimit): Promise<any>;

View File

@@ -0,0 +1,79 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "parseBody", {
enumerable: true,
get: function() {
return parseBody;
}
});
const _contenttype = require("next/dist/compiled/content-type");
const _iserror = /*#__PURE__*/ _interop_require_default(require("../../../lib/is-error"));
const _index = require("../index");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
/**
* Parse `JSON` and handles invalid `JSON` strings
* @param str `JSON` string
*/ function parseJson(str) {
if (str.length === 0) {
// special-case empty json body, as it's a common client-side mistake
return {};
}
try {
return JSON.parse(str);
} catch (e) {
throw Object.defineProperty(new _index.ApiError(400, 'Invalid JSON'), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
}
}
async function parseBody(req, limit) {
let contentType;
try {
contentType = (0, _contenttype.parse)(req.headers['content-type'] || 'text/plain');
} catch {
contentType = (0, _contenttype.parse)('text/plain');
}
const { type, parameters } = contentType;
const encoding = parameters.charset || 'utf-8';
let buffer;
try {
const getRawBody = require('next/dist/compiled/raw-body');
buffer = await getRawBody(req, {
encoding,
limit
});
} catch (e) {
if ((0, _iserror.default)(e) && e.type === 'entity.too.large') {
throw Object.defineProperty(new _index.ApiError(413, `Body exceeded ${limit} limit`), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
} else {
throw Object.defineProperty(new _index.ApiError(400, 'Invalid body'), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
}
}
const body = buffer.toString();
if (type === 'application/json' || type === 'application/ld+json') {
return parseJson(body);
} else if (type === 'application/x-www-form-urlencoded') {
const qs = require('querystring');
return qs.decode(body);
} else {
return body;
}
}
//# sourceMappingURL=parse-body.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/api-utils/node/parse-body.ts"],"sourcesContent":["import type { IncomingMessage } from 'http'\n\nimport { parse } from 'next/dist/compiled/content-type'\nimport isError from '../../../lib/is-error'\nimport type { SizeLimit } from '../../../types'\nimport { ApiError } from '../index'\n\n/**\n * Parse `JSON` and handles invalid `JSON` strings\n * @param str `JSON` string\n */\nfunction parseJson(str: string): object {\n if (str.length === 0) {\n // special-case empty json body, as it's a common client-side mistake\n return {}\n }\n\n try {\n return JSON.parse(str)\n } catch (e) {\n throw new ApiError(400, 'Invalid JSON')\n }\n}\n\n/**\n * Parse incoming message like `json` or `urlencoded`\n * @param req request object\n */\nexport async function parseBody(\n req: IncomingMessage,\n limit: SizeLimit\n): Promise<any> {\n let contentType\n try {\n contentType = parse(req.headers['content-type'] || 'text/plain')\n } catch {\n contentType = parse('text/plain')\n }\n const { type, parameters } = contentType\n const encoding = parameters.charset || 'utf-8'\n\n let buffer\n\n try {\n const getRawBody =\n require('next/dist/compiled/raw-body') as typeof import('next/dist/compiled/raw-body')\n buffer = await getRawBody(req, { encoding, limit })\n } catch (e) {\n if (isError(e) && e.type === 'entity.too.large') {\n throw new ApiError(413, `Body exceeded ${limit} limit`)\n } else {\n throw new ApiError(400, 'Invalid body')\n }\n }\n\n const body = buffer.toString()\n\n if (type === 'application/json' || type === 'application/ld+json') {\n return parseJson(body)\n } else if (type === 'application/x-www-form-urlencoded') {\n const qs = require('querystring') as typeof import('querystring')\n return qs.decode(body)\n } else {\n return body\n }\n}\n"],"names":["parseBody","parseJson","str","length","JSON","parse","e","ApiError","req","limit","contentType","headers","type","parameters","encoding","charset","buffer","getRawBody","require","isError","body","toString","qs","decode"],"mappings":";;;;+BA4BsBA;;;eAAAA;;;6BA1BA;gEACF;uBAEK;;;;;;AAEzB;;;CAGC,GACD,SAASC,UAAUC,GAAW;IAC5B,IAAIA,IAAIC,MAAM,KAAK,GAAG;QACpB,qEAAqE;QACrE,OAAO,CAAC;IACV;IAEA,IAAI;QACF,OAAOC,KAAKC,KAAK,CAACH;IACpB,EAAE,OAAOI,GAAG;QACV,MAAM,qBAAiC,CAAjC,IAAIC,eAAQ,CAAC,KAAK,iBAAlB,qBAAA;mBAAA;wBAAA;0BAAA;QAAgC;IACxC;AACF;AAMO,eAAeP,UACpBQ,GAAoB,EACpBC,KAAgB;IAEhB,IAAIC;IACJ,IAAI;QACFA,cAAcL,IAAAA,kBAAK,EAACG,IAAIG,OAAO,CAAC,eAAe,IAAI;IACrD,EAAE,OAAM;QACND,cAAcL,IAAAA,kBAAK,EAAC;IACtB;IACA,MAAM,EAAEO,IAAI,EAAEC,UAAU,EAAE,GAAGH;IAC7B,MAAMI,WAAWD,WAAWE,OAAO,IAAI;IAEvC,IAAIC;IAEJ,IAAI;QACF,MAAMC,aACJC,QAAQ;QACVF,SAAS,MAAMC,WAAWT,KAAK;YAAEM;YAAUL;QAAM;IACnD,EAAE,OAAOH,GAAG;QACV,IAAIa,IAAAA,gBAAO,EAACb,MAAMA,EAAEM,IAAI,KAAK,oBAAoB;YAC/C,MAAM,qBAAiD,CAAjD,IAAIL,eAAQ,CAAC,KAAK,CAAC,cAAc,EAAEE,MAAM,MAAM,CAAC,GAAhD,qBAAA;uBAAA;4BAAA;8BAAA;YAAgD;QACxD,OAAO;YACL,MAAM,qBAAiC,CAAjC,IAAIF,eAAQ,CAAC,KAAK,iBAAlB,qBAAA;uBAAA;4BAAA;8BAAA;YAAgC;QACxC;IACF;IAEA,MAAMa,OAAOJ,OAAOK,QAAQ;IAE5B,IAAIT,SAAS,sBAAsBA,SAAS,uBAAuB;QACjE,OAAOX,UAAUmB;IACnB,OAAO,IAAIR,SAAS,qCAAqC;QACvD,MAAMU,KAAKJ,QAAQ;QACnB,OAAOI,GAAGC,MAAM,CAACH;IACnB,OAAO;QACL,OAAOA;IACT;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,5 @@
import type { IncomingMessage, ServerResponse } from 'http';
import type { __ApiPreviewProps } from '../.';
import type { BaseNextRequest, BaseNextResponse } from '../../base-http';
import type { PreviewData } from '../../../types';
export declare function tryGetPreviewData(req: IncomingMessage | BaseNextRequest | Request, res: ServerResponse | BaseNextResponse, options: __ApiPreviewProps, multiZoneDraftMode: boolean): PreviewData;

View File

@@ -0,0 +1,86 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "tryGetPreviewData", {
enumerable: true,
get: function() {
return tryGetPreviewData;
}
});
const _ = require("../.");
const _index = require("../index");
const _cookies = require("../../web/spec-extension/cookies");
const _headers = require("../../web/spec-extension/adapters/headers");
function tryGetPreviewData(req, res, options, multiZoneDraftMode) {
var _cookies_get, _cookies_get1;
// if an On-Demand revalidation is being done preview mode
// is disabled
if (options && (0, _.checkIsOnDemandRevalidate)(req, options).isOnDemandRevalidate) {
return false;
}
// Read cached preview data if present
// TODO: use request metadata instead of a symbol
if (_index.SYMBOL_PREVIEW_DATA in req) {
return req[_index.SYMBOL_PREVIEW_DATA];
}
const headers = _headers.HeadersAdapter.from(req.headers);
const cookies = new _cookies.RequestCookies(headers);
const previewModeId = (_cookies_get = cookies.get(_index.COOKIE_NAME_PRERENDER_BYPASS)) == null ? void 0 : _cookies_get.value;
const tokenPreviewData = (_cookies_get1 = cookies.get(_index.COOKIE_NAME_PRERENDER_DATA)) == null ? void 0 : _cookies_get1.value;
// Case: preview mode cookie set but data cookie is not set
if (previewModeId && !tokenPreviewData && previewModeId === options.previewModeId) {
// This is "Draft Mode" which doesn't use
// previewData, so we return an empty object
// for backwards compat with "Preview Mode".
const data = {};
Object.defineProperty(req, _index.SYMBOL_PREVIEW_DATA, {
value: data,
enumerable: false
});
return data;
}
// Case: neither cookie is set.
if (!previewModeId && !tokenPreviewData) {
return false;
}
// Case: one cookie is set, but not the other.
if (!previewModeId || !tokenPreviewData) {
if (!multiZoneDraftMode) {
(0, _index.clearPreviewData)(res);
}
return false;
}
// Case: preview session is for an old build.
if (previewModeId !== options.previewModeId) {
if (!multiZoneDraftMode) {
(0, _index.clearPreviewData)(res);
}
return false;
}
let encryptedPreviewData;
try {
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
encryptedPreviewData = jsonwebtoken.verify(tokenPreviewData, options.previewModeSigningKey);
} catch {
// TODO: warn
(0, _index.clearPreviewData)(res);
return false;
}
const { decryptWithSecret } = require('../../crypto-utils');
const decryptedPreviewData = decryptWithSecret(Buffer.from(options.previewModeEncryptionKey), encryptedPreviewData.data);
try {
// TODO: strict runtime type checking
const data = JSON.parse(decryptedPreviewData);
// Cache lookup
Object.defineProperty(req, _index.SYMBOL_PREVIEW_DATA, {
value: data,
enumerable: false
});
return data;
} catch {
return false;
}
}
//# sourceMappingURL=try-get-preview-data.js.map

File diff suppressed because one or more lines are too long

1
node_modules/next/dist/server/api-utils/web.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export declare function byteLength(payload: string): number;

17
node_modules/next/dist/server/api-utils/web.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings
// supported at the moment.
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "byteLength", {
enumerable: true,
get: function() {
return byteLength;
}
});
function byteLength(payload) {
return new TextEncoder().encode(payload).buffer.byteLength;
}
//# sourceMappingURL=web.js.map

1
node_modules/next/dist/server/api-utils/web.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/api-utils/web.ts"],"sourcesContent":["// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings\n// supported at the moment.\nexport function byteLength(payload: string): number {\n return new TextEncoder().encode(payload).buffer.byteLength\n}\n"],"names":["byteLength","payload","TextEncoder","encode","buffer"],"mappings":"AAAA,yEAAyE;AACzE,2BAA2B;;;;;+BACXA;;;eAAAA;;;AAAT,SAASA,WAAWC,OAAe;IACxC,OAAO,IAAIC,cAAcC,MAAM,CAACF,SAASG,MAAM,CAACJ,UAAU;AAC5D","ignoreList":[0]}

View File

@@ -0,0 +1,2 @@
import type { ActionAsyncStorage } from './action-async-storage.external';
export declare const actionAsyncStorageInstance: ActionAsyncStorage;

View File

@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "actionAsyncStorageInstance", {
enumerable: true,
get: function() {
return actionAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const actionAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=action-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/action-async-storage-instance.ts"],"sourcesContent":["import type { ActionAsyncStorage } from './action-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const actionAsyncStorageInstance: ActionAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["actionAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAF2B;AAEjC,MAAMA,6BACXC,IAAAA,0CAAuB","ignoreList":[0]}

View File

@@ -0,0 +1,8 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { actionAsyncStorageInstance } from './action-async-storage-instance';
export interface ActionStore {
readonly isAction?: boolean;
readonly isAppRoute?: boolean;
}
export type ActionAsyncStorage = AsyncLocalStorage<ActionStore>;
export { actionAsyncStorageInstance as actionAsyncStorage };

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "actionAsyncStorage", {
enumerable: true,
get: function() {
return _actionasyncstorageinstance.actionAsyncStorageInstance;
}
});
const _actionasyncstorageinstance = require("./action-async-storage-instance");
//# sourceMappingURL=action-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/action-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { actionAsyncStorageInstance } from './action-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nexport interface ActionStore {\n readonly isAction?: boolean\n readonly isAppRoute?: boolean\n}\n\nexport type ActionAsyncStorage = AsyncLocalStorage<ActionStore>\n\nexport { actionAsyncStorageInstance as actionAsyncStorage }\n"],"names":["actionAsyncStorage","actionAsyncStorageInstance"],"mappings":";;;;+BAWuCA;;;eAA9BC,sDAA0B;;;4CARQ","ignoreList":[0]}

View File

@@ -0,0 +1,42 @@
import type { IncomingHttpHeaders } from 'node:http';
import type { SizeLimit } from '../../types';
import type { RequestStore } from '../app-render/work-unit-async-storage.external';
import type { AppRenderContext, GenerateFlight } from './app-render';
import type { AppPageModule } from '../route-modules/app-page/module';
import type { BaseNextRequest, BaseNextResponse } from '../base-http';
import RenderResult, { type AppPageRenderResultMetadata } from '../render-result';
import type { WorkStore } from '../app-render/work-async-storage.external';
declare const enum HostType {
XForwardedHost = "x-forwarded-host",
Host = "host"
}
export declare function parseHostHeader(headers: IncomingHttpHeaders, originDomain?: string): {
type: HostType;
value: string;
} | undefined;
type ServerActionsConfig = {
bodySizeLimit?: SizeLimit;
allowedOrigins?: string[];
};
type HandleActionResult = {
/** An MPA action threw notFound(), and we need to render the appropriate HTML */
type: 'not-found';
} | {
type: 'done';
result: RenderResult | undefined;
formState?: any;
}
/** The request turned out not to be a server action. */
| null;
export declare function handleAction({ req, res, ComponentMod, generateFlight, workStore, requestStore, serverActions, ctx, metadata, }: {
req: BaseNextRequest;
res: BaseNextResponse;
ComponentMod: AppPageModule;
generateFlight: GenerateFlight;
workStore: WorkStore;
requestStore: RequestStore;
serverActions?: ServerActionsConfig;
ctx: AppRenderContext;
metadata: AppPageRenderResultMetadata;
}): Promise<HandleActionResult>;
export {};

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
import type { AfterTaskAsyncStorage } from './after-task-async-storage.external';
export declare const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage;

View File

@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "afterTaskAsyncStorageInstance", {
enumerable: true,
get: function() {
return afterTaskAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const afterTaskAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=after-task-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage-instance.ts"],"sourcesContent":["import type { AfterTaskAsyncStorage } from './after-task-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["afterTaskAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAF2B;AAEjC,MAAMA,gCACXC,IAAAA,0CAAuB","ignoreList":[0]}

View File

@@ -0,0 +1,13 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance';
import type { WorkUnitStore } from './work-unit-async-storage.external';
export interface AfterTaskStore {
/** The phase in which the topmost `after` was called.
*
* NOTE: Can be undefined when running `generateStaticParams`,
* where we only have a `workStore`, no `workUnitStore`.
*/
readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined;
}
export type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>;
export { afterTaskAsyncStorage };

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "afterTaskAsyncStorage", {
enumerable: true,
get: function() {
return _aftertaskasyncstorageinstance.afterTaskAsyncStorageInstance;
}
});
const _aftertaskasyncstorageinstance = require("./after-task-async-storage-instance");
//# sourceMappingURL=after-task-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nimport type { WorkUnitStore } from './work-unit-async-storage.external'\n\nexport interface AfterTaskStore {\n /** The phase in which the topmost `after` was called.\n *\n * NOTE: Can be undefined when running `generateStaticParams`,\n * where we only have a `workStore`, no `workUnitStore`.\n */\n readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined\n}\n\nexport type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>\n\nexport { afterTaskAsyncStorage }\n"],"names":["afterTaskAsyncStorage"],"mappings":";;;;+BAiBSA;;;eAAAA,4DAAqB;;;+CAdyC","ignoreList":[0]}

View File

@@ -0,0 +1,26 @@
export declare class ReactServerResult {
private _stream;
constructor(stream: ReadableStream<Uint8Array>);
tee(): ReadableStream<Uint8Array<ArrayBufferLike>>;
consume(): ReadableStream<Uint8Array<ArrayBufferLike>>;
}
export type ReactServerPrerenderResolveToType = {
prelude: ReadableStream<Uint8Array>;
};
export declare function createReactServerPrerenderResult(underlying: Promise<ReactServerPrerenderResolveToType>): Promise<ReactServerPrerenderResult>;
export declare function createReactServerPrerenderResultFromRender(underlying: ReadableStream<Uint8Array>): Promise<ReactServerPrerenderResult>;
export declare class ReactServerPrerenderResult {
private _chunks;
private assertChunks;
private consumeChunks;
consume(): void;
constructor(chunks: Array<Uint8Array>);
asUnclosingStream(): ReadableStream<Uint8Array>;
consumeAsUnclosingStream(): ReadableStream<Uint8Array>;
asStream(): ReadableStream<Uint8Array>;
consumeAsStream(): ReadableStream<Uint8Array>;
}
export declare function processPrelude(unprocessedPrelude: ReadableStream<Uint8Array>): Promise<{
prelude: ReadableStream<Uint8Array<ArrayBufferLike>>;
preludeIsEmpty: boolean;
}>;

View File

@@ -0,0 +1,167 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
ReactServerPrerenderResult: null,
ReactServerResult: null,
createReactServerPrerenderResult: null,
createReactServerPrerenderResultFromRender: null,
processPrelude: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
ReactServerPrerenderResult: function() {
return ReactServerPrerenderResult;
},
ReactServerResult: function() {
return ReactServerResult;
},
createReactServerPrerenderResult: function() {
return createReactServerPrerenderResult;
},
createReactServerPrerenderResultFromRender: function() {
return createReactServerPrerenderResultFromRender;
},
processPrelude: function() {
return processPrelude;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
class ReactServerResult {
constructor(stream){
this._stream = stream;
}
tee() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot tee a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E106",
enumerable: false,
configurable: true
});
}
const tee = this._stream.tee();
this._stream = tee[0];
return tee[1];
}
consume() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot consume a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E470",
enumerable: false,
configurable: true
});
}
const stream = this._stream;
this._stream = null;
return stream;
}
}
async function createReactServerPrerenderResult(underlying) {
const chunks = [];
const { prelude } = await underlying;
const reader = prelude.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
return new ReactServerPrerenderResult(chunks);
} else {
chunks.push(value);
}
}
}
async function createReactServerPrerenderResultFromRender(underlying) {
const chunks = [];
const reader = underlying.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
break;
} else {
chunks.push(value);
}
}
return new ReactServerPrerenderResult(chunks);
}
class ReactServerPrerenderResult {
assertChunks(expression) {
if (this._chunks === null) {
throw Object.defineProperty(new _invarianterror.InvariantError(`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`), "__NEXT_ERROR_CODE", {
value: "E593",
enumerable: false,
configurable: true
});
}
return this._chunks;
}
consumeChunks(expression) {
const chunks = this.assertChunks(expression);
this.consume();
return chunks;
}
consume() {
this._chunks = null;
}
constructor(chunks){
this._chunks = chunks;
}
asUnclosingStream() {
const chunks = this.assertChunks('asUnclosingStream()');
return createUnclosingStream(chunks);
}
consumeAsUnclosingStream() {
const chunks = this.consumeChunks('consumeAsUnclosingStream()');
return createUnclosingStream(chunks);
}
asStream() {
const chunks = this.assertChunks('asStream()');
return createClosingStream(chunks);
}
consumeAsStream() {
const chunks = this.consumeChunks('consumeAsStream()');
return createClosingStream(chunks);
}
}
function createUnclosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
}
// we intentionally keep the stream open. The consumer will clear
// out chunks once finished and the remaining memory will be GC'd
// when this object goes out of scope
}
});
}
function createClosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
} else {
controller.close();
}
}
});
}
async function processPrelude(unprocessedPrelude) {
const [prelude, peek] = unprocessedPrelude.tee();
const reader = peek.getReader();
const firstResult = await reader.read();
reader.cancel();
const preludeIsEmpty = firstResult.done === true;
return {
prelude,
preludeIsEmpty
};
}
//# sourceMappingURL=app-render-prerender-utils.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
*
* The first function runs in the first task. Each subsequent function runs in its own task.
* The returned promise resolves after the last task completes.
*/
export declare function runInSequentialTasks<R>(first: () => R, ...rest: Array<() => void>): Promise<Awaited<R>>;

View File

@@ -0,0 +1,76 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "runInSequentialTasks", {
enumerable: true,
get: function() {
return runInSequentialTasks;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
const _apprenderscheduling = require("./app-render-scheduling");
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
const _isthenable = require("../../shared/lib/is-thenable");
function noop() {}
function runInSequentialTasks(first, ...rest) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new _invarianterror.InvariantError('`runInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E1054",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = (0, _apprenderscheduling.createAtomicTimerGroup)();
const ids = [];
let result;
ids.push(scheduleTimeout(()=>{
try {
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
result = first();
// If the first function returns a thenable, suppress unhandled
// rejections. A later task in the sequence (e.g. an abort) may
// cause the promise to reject, and we don't want that to surface
// as an unhandled rejection — the caller will observe the
// rejection when they await the returned promise.
if ((0, _isthenable.isThenable)(result)) {
result.then(noop, noop);
}
} catch (err) {
for(let i = 1; i < ids.length; i++){
clearTimeout(ids[i]);
}
reject(err);
}
}));
for(let i = 0; i < rest.length; i++){
const fn = rest[i];
let index = ids.length;
ids.push(scheduleTimeout(()=>{
try {
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
fn();
} catch (err) {
// clear remaining timeouts
while(++index < ids.length){
clearTimeout(ids[index]);
}
reject(err);
}
}));
}
// We wait a task before resolving
ids.push(scheduleTimeout(()=>{
try {
(0, _fastsetimmediateexternal.expectNoPendingImmediates)();
resolve(result);
} catch (err) {
reject(err);
}
}));
});
}
}
//# sourceMappingURL=app-render-render-utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/app-render-render-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\nimport { createAtomicTimerGroup } from './app-render-scheduling'\nimport {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask,\n expectNoPendingImmediates,\n} from '../node-environment-extensions/fast-set-immediate.external'\nimport { isThenable } from '../../shared/lib/is-thenable'\n\nfunction noop() {}\n\n/**\n * This is a utility function to make scheduling sequential tasks that run back to back easier.\n * We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.\n *\n * The first function runs in the first task. Each subsequent function runs in its own task.\n * The returned promise resolves after the last task completes.\n */\nexport function runInSequentialTasks<R>(\n first: () => R,\n ...rest: Array<() => void>\n): Promise<Awaited<R>> {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n '`runInSequentialTasks` should not be called in edge runtime.'\n )\n } else {\n return new Promise((resolve, reject) => {\n const scheduleTimeout = createAtomicTimerGroup()\n const ids: ReturnType<typeof scheduleTimeout>[] = []\n\n let result: R\n ids.push(\n scheduleTimeout(() => {\n try {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask()\n result = first()\n // If the first function returns a thenable, suppress unhandled\n // rejections. A later task in the sequence (e.g. an abort) may\n // cause the promise to reject, and we don't want that to surface\n // as an unhandled rejection — the caller will observe the\n // rejection when they await the returned promise.\n if (isThenable(result)) {\n result.then(noop, noop)\n }\n } catch (err) {\n for (let i = 1; i < ids.length; i++) {\n clearTimeout(ids[i])\n }\n reject(err)\n }\n })\n )\n\n for (let i = 0; i < rest.length; i++) {\n const fn = rest[i]\n let index = ids.length\n\n ids.push(\n scheduleTimeout(() => {\n try {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask()\n fn()\n } catch (err) {\n // clear remaining timeouts\n while (++index < ids.length) {\n clearTimeout(ids[index])\n }\n reject(err)\n }\n })\n )\n }\n\n // We wait a task before resolving\n ids.push(\n scheduleTimeout(() => {\n try {\n expectNoPendingImmediates()\n resolve(result as Awaited<R>)\n } catch (err) {\n reject(err)\n }\n })\n )\n })\n }\n}\n"],"names":["runInSequentialTasks","noop","first","rest","process","env","NEXT_RUNTIME","InvariantError","Promise","resolve","reject","scheduleTimeout","createAtomicTimerGroup","ids","result","push","DANGEROUSLY_runPendingImmediatesAfterCurrentTask","isThenable","then","err","i","length","clearTimeout","fn","index","expectNoPendingImmediates"],"mappings":";;;;+BAiBgBA;;;eAAAA;;;gCAjBe;qCACQ;0CAIhC;4BACoB;AAE3B,SAASC,QAAQ;AASV,SAASD,qBACdE,KAAc,EACd,GAAGC,IAAuB;IAE1B,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,iEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF,OAAO;QACL,OAAO,IAAIC,QAAQ,CAACC,SAASC;YAC3B,MAAMC,kBAAkBC,IAAAA,2CAAsB;YAC9C,MAAMC,MAA4C,EAAE;YAEpD,IAAIC;YACJD,IAAIE,IAAI,CACNJ,gBAAgB;gBACd,IAAI;oBACFK,IAAAA,0EAAgD;oBAChDF,SAASZ;oBACT,+DAA+D;oBAC/D,+DAA+D;oBAC/D,iEAAiE;oBACjE,0DAA0D;oBAC1D,kDAAkD;oBAClD,IAAIe,IAAAA,sBAAU,EAACH,SAAS;wBACtBA,OAAOI,IAAI,CAACjB,MAAMA;oBACpB;gBACF,EAAE,OAAOkB,KAAK;oBACZ,IAAK,IAAIC,IAAI,GAAGA,IAAIP,IAAIQ,MAAM,EAAED,IAAK;wBACnCE,aAAaT,GAAG,CAACO,EAAE;oBACrB;oBACAV,OAAOS;gBACT;YACF;YAGF,IAAK,IAAIC,IAAI,GAAGA,IAAIjB,KAAKkB,MAAM,EAAED,IAAK;gBACpC,MAAMG,KAAKpB,IAAI,CAACiB,EAAE;gBAClB,IAAII,QAAQX,IAAIQ,MAAM;gBAEtBR,IAAIE,IAAI,CACNJ,gBAAgB;oBACd,IAAI;wBACFK,IAAAA,0EAAgD;wBAChDO;oBACF,EAAE,OAAOJ,KAAK;wBACZ,2BAA2B;wBAC3B,MAAO,EAAEK,QAAQX,IAAIQ,MAAM,CAAE;4BAC3BC,aAAaT,GAAG,CAACW,MAAM;wBACzB;wBACAd,OAAOS;oBACT;gBACF;YAEJ;YAEA,kCAAkC;YAClCN,IAAIE,IAAI,CACNJ,gBAAgB;gBACd,IAAI;oBACFc,IAAAA,mDAAyB;oBACzBhB,QAAQK;gBACV,EAAE,OAAOK,KAAK;oBACZT,OAAOS;gBACT;YACF;QAEJ;IACF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,8 @@
/**
* Allows scheduling multiple timers (equivalent to `setTimeout(cb, delayMs)`)
* that are guaranteed to run in the same iteration of the event loop.
*
* @param delayMs - the delay to pass to `setTimeout`. (default: 0)
*
* */
export declare function createAtomicTimerGroup(delayMs?: number): (callback: () => void) => NodeJS.Timeout;

View File

@@ -0,0 +1,188 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "createAtomicTimerGroup", {
enumerable: true,
get: function() {
return createAtomicTimerGroup;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
/*
==========================
| Background |
==========================
Node.js does not guarantee that two timers scheduled back to back will run
on the same iteration of the event loop:
```ts
setTimeout(one, 0)
setTimeout(two, 0)
```
Internally, each timer is assigned a `_idleStart` property that holds
an internal libuv timestamp in millisecond resolution.
This will be used to determine if the timer is already "expired" and should be executed.
However, even in sync code, it's possible for two timers to get different `_idleStart` values.
This can cause one of the timers to be executed, and the other to be delayed until the next timer phase.
The delaying happens [here](https://github.com/nodejs/node/blob/c208ffc66bb9418ff026c4e3fa82e5b4387bd147/lib/internal/timers.js#L556-L564).
and can be debugged by running node with `NODE_DEBUG=timer`.
The easiest way to observe it is to run this program in a loop until it exits with status 1:
```
// test.js
let immediateRan = false
const t1 = setTimeout(() => {
console.log('timeout 1')
setImmediate(() => {
console.log('immediate 1')
immediateRan = true
})
})
const t2 = setTimeout(() => {
console.log('timeout 2')
if (immediateRan) {
console.log('immediate ran before the second timeout!')
console.log(
`t1._idleStart: ${t1._idleStart}, t2_idleStart: ${t2._idleStart}`
);
process.exit(1)
}
})
```
```bash
#!/usr/bin/env bash
i=1;
while true; do
output="$(NODE_DEBUG=timer node test.js 2>&1)";
if [ "$?" -eq 1 ]; then
echo "failed after $i iterations";
echo "$output";
break;
fi;
i=$((i+1));
done
```
If `t2` is deferred to the next iteration of the event loop,
then the immediate scheduled from inside `t1` will run first.
When this occurs, `_idleStart` is reliably different between `t1` and `t2`.
==========================
| Solution |
==========================
We can guarantee that multiple timers (with the same delay, usually `0`)
run together without any delays by making sure that their `_idleStart`s are the same,
because that's what's used to determine if a timer should be deferred or not.
Luckily, this property is currently exposed to userland and mutable,
so we can patch it.
Another related trick we could potentially apply is making
a timer immediately be considered expired by doing `timer._idleStart -= 2`.
(the value must be more than `1`, the delay that actually gets set for `setTimeout(cb, 0)`).
This makes node view this timer as "a 1ms timer scheduled 2ms ago",
meaning that it should definitely run in the next timer phase.
However, I'm not confident we know all the side effects of doing this,
so for now, simply ensuring coordination is enough.
*/ let shouldAttemptPatching = true;
function warnAboutTimers() {
console.warn("Next.js cannot guarantee that Cache Components will run as expected due to the current runtime's implementation of `setTimeout()`.\nPlease report a github issue here: https://github.com/vercel/next.js/issues/new/");
}
function createAtomicTimerGroup(delayMs = 0) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new _invarianterror.InvariantError('createAtomicTimerGroup cannot be called in the edge runtime'), "__NEXT_ERROR_CODE", {
value: "E934",
enumerable: false,
configurable: true
});
} else {
let isFirstCallback = true;
let firstTimerIdleStart = null;
let didFirstTimerRun = false;
// As a sanity check, we schedule an immediate from the first timeout
// to check if the execution was interrupted (i.e. if it ran between the timeouts).
// Note that we're deliberately bypassing the "fast setImmediate" patch here --
// otherwise, this check would always fail, because the immediate
// would always run before the second timeout.
let didImmediateRun = false;
function runFirstCallback(callback) {
didFirstTimerRun = true;
if (shouldAttemptPatching) {
(0, _fastsetimmediateexternal.unpatchedSetImmediate)(()=>{
didImmediateRun = true;
});
}
return callback();
}
function runSubsequentCallback(callback) {
if (shouldAttemptPatching) {
if (didImmediateRun) {
// If the immediate managed to run between the timers, then we're not
// able to provide the guarantees that we're supposed to
shouldAttemptPatching = false;
warnAboutTimers();
}
}
return callback();
}
return function scheduleTimeout(callback) {
if (didFirstTimerRun) {
throw Object.defineProperty(new _invarianterror.InvariantError('Cannot schedule more timers into a group that already executed'), "__NEXT_ERROR_CODE", {
value: "E935",
enumerable: false,
configurable: true
});
}
const timer = setTimeout(isFirstCallback ? runFirstCallback : runSubsequentCallback, delayMs, callback);
isFirstCallback = false;
if (!shouldAttemptPatching) {
// We already tried patching some timers, and it didn't work.
// No point trying again.
return timer;
}
// NodeJS timers have a `_idleStart` property, but it doesn't exist e.g. in Bun.
// If it's not present, we'll warn and try to continue.
try {
if ('_idleStart' in timer && typeof timer._idleStart === 'number') {
// If this is the first timer that was scheduled, save its `_idleStart`.
// We'll copy it onto subsequent timers to guarantee that they'll all be
// considered expired in the same iteration of the event loop
// and thus will all be executed in the same timer phase.
if (firstTimerIdleStart === null) {
firstTimerIdleStart = timer._idleStart;
} else {
timer._idleStart = firstTimerIdleStart;
}
} else {
shouldAttemptPatching = false;
warnAboutTimers();
}
} catch (err) {
// This should never fail in current Node, but it might start failing in the future.
// We might be okay even without tweaking the timers, so warn and try to continue.
console.error(Object.defineProperty(new _invarianterror.InvariantError('An unexpected error occurred while adjusting `_idleStart` on an atomic timer', {
cause: err
}), "__NEXT_ERROR_CODE", {
value: "E933",
enumerable: false,
configurable: true
}));
shouldAttemptPatching = false;
warnAboutTimers();
}
return timer;
};
}
}
//# sourceMappingURL=app-render-scheduling.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,90 @@
import type { RenderOpts, PreloadCallbacks } from './types';
import type { ActionResult, DynamicParamTypesShort, DynamicSegmentTuple, FlightRouterState, CacheNodeSeedData } from '../../shared/lib/app-router-types';
import { type WorkStore } from '../app-render/work-async-storage.external';
import type { RequestStore } from '../app-render/work-unit-async-storage.external';
import type { NextParsedUrlQuery } from '../request-meta';
import type { LoaderTree } from '../lib/app-dir-module';
import type { AppPageModule } from '../route-modules/app-page/module';
import type { BaseNextRequest, BaseNextResponse } from '../base-http';
import RenderResult, { type AppPageRenderResultMetadata } from '../render-result';
import { type ImplicitTags } from '../lib/implicit-tags';
import { parseRelativeUrl } from '../../shared/lib/router/utils/parse-relative-url';
import type { ServerComponentsHmrCache } from '../response-cache';
import { type OpaqueFallbackRouteParams } from '../request/fallback-params';
import type { Params } from '../request/params';
export type GetDynamicParamFromSegment = (loaderTree: LoaderTree) => DynamicParam | null;
export type DynamicParam = {
param: string;
value: string | string[] | null;
treeSegment: DynamicSegmentTuple;
type: DynamicParamTypesShort;
};
export type GenerateFlight = typeof generateDynamicFlightRenderResult;
export type AppSharedContext = {
buildId: string;
deploymentId: string;
clientAssetToken: string;
};
export type AppRenderContext = {
sharedContext: AppSharedContext;
workStore: WorkStore;
url: ReturnType<typeof parseRelativeUrl>;
componentMod: AppPageModule;
renderOpts: RenderOpts;
parsedRequestHeaders: ParsedRequestHeaders;
getDynamicParamFromSegment: GetDynamicParamFromSegment;
interpolatedParams: Params;
query: NextParsedUrlQuery;
isPrefetch: boolean;
isPossibleServerAction: boolean;
requestTimestamp: number;
appUsingSizeAdjustment: boolean;
flightRouterState?: FlightRouterState;
requestId: string;
htmlRequestId: string;
pagePath: string;
assetPrefix: string;
isNotFoundPath: boolean;
nonce: string | undefined;
res: BaseNextResponse;
/**
* For now, the implicit tags are common for the whole route. If we ever start
* rendering/revalidating segments independently, they need to move to the
* work unit store.
*/
implicitTags: ImplicitTags;
};
interface ParsedRequestHeaders {
/**
* Router state provided from the client-side router. Used to handle rendering
* from the common layout down. This value will be undefined if the request is
* not a client-side navigation request, or if the request is a prefetch
* request.
*/
readonly flightRouterState: FlightRouterState | undefined;
readonly isPrefetchRequest: boolean;
readonly isRuntimePrefetchRequest: boolean;
readonly isRouteTreePrefetchRequest: boolean;
readonly isHmrRefresh: boolean;
readonly isRSCRequest: boolean;
readonly nonce: string | undefined;
readonly previouslyRevalidatedTags: string[];
readonly requestId: string | undefined;
readonly htmlRequestId: string | undefined;
}
/**
* Produces a RenderResult containing the Flight data for the given request. See
* `generateDynamicRSCPayload` for information on the contents of the render result.
*/
declare function generateDynamicFlightRenderResult(req: BaseNextRequest, ctx: AppRenderContext, requestStore: RequestStore, options?: {
actionResult: ActionResult;
skipPageRendering: boolean;
componentTree?: CacheNodeSeedData;
preloadCallbacks?: PreloadCallbacks;
temporaryReferences?: WeakMap<any, string>;
waitUntil?: Promise<unknown>;
}): Promise<RenderResult>;
export type BinaryStreamOf<T> = ReadableStream<Uint8Array>;
export type AppPageRender = (req: BaseNextRequest, res: BaseNextResponse, pagePath: string, query: NextParsedUrlQuery, fallbackRouteParams: OpaqueFallbackRouteParams | null, renderOpts: RenderOpts, serverComponentsHmrCache: ServerComponentsHmrCache | undefined, sharedContext: AppSharedContext) => Promise<RenderResult<AppPageRenderResultMetadata>>;
export declare const renderToHTMLOrFlight: AppPageRender;
export {};

4469
node_modules/next/dist/server/app-render/app-render.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import type { AsyncLocalStorage } from 'async_hooks';
export declare function createAsyncLocalStorage<Store extends {}>(): AsyncLocalStorage<Store>;
export declare function bindSnapshot<T>(fn: T): T;
export declare function createSnapshot(): <R, TArgs extends any[]>(fn: (...args: TArgs) => R, ...args: TArgs) => R;

View File

@@ -0,0 +1,76 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
bindSnapshot: null,
createAsyncLocalStorage: null,
createSnapshot: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
bindSnapshot: function() {
return bindSnapshot;
},
createAsyncLocalStorage: function() {
return createAsyncLocalStorage;
},
createSnapshot: function() {
return createSnapshot;
}
});
const sharedAsyncLocalStorageNotAvailableError = Object.defineProperty(new Error('Invariant: AsyncLocalStorage accessed in runtime where it is not available'), "__NEXT_ERROR_CODE", {
value: "E504",
enumerable: false,
configurable: true
});
class FakeAsyncLocalStorage {
disable() {
throw sharedAsyncLocalStorageNotAvailableError;
}
getStore() {
// This fake implementation of AsyncLocalStorage always returns `undefined`.
return undefined;
}
run() {
throw sharedAsyncLocalStorageNotAvailableError;
}
exit() {
throw sharedAsyncLocalStorageNotAvailableError;
}
enterWith() {
throw sharedAsyncLocalStorageNotAvailableError;
}
static bind(fn) {
return fn;
}
}
const maybeGlobalAsyncLocalStorage = typeof globalThis !== 'undefined' && globalThis.AsyncLocalStorage;
function createAsyncLocalStorage() {
if (maybeGlobalAsyncLocalStorage) {
return new maybeGlobalAsyncLocalStorage();
}
return new FakeAsyncLocalStorage();
}
function bindSnapshot(// WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911
fn) {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.bind(fn);
}
return FakeAsyncLocalStorage.bind(fn);
}
function createSnapshot() {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.snapshot();
}
return function(fn, ...args) {
return fn(...args);
};
}
//# sourceMappingURL=async-local-storage.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/async-local-storage.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\nconst sharedAsyncLocalStorageNotAvailableError = new Error(\n 'Invariant: AsyncLocalStorage accessed in runtime where it is not available'\n)\n\nclass FakeAsyncLocalStorage<Store extends {}>\n implements AsyncLocalStorage<Store>\n{\n disable(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n getStore(): Store | undefined {\n // This fake implementation of AsyncLocalStorage always returns `undefined`.\n return undefined\n }\n\n run<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n exit<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n enterWith(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n static bind<T>(fn: T): T {\n return fn\n }\n}\n\nconst maybeGlobalAsyncLocalStorage =\n typeof globalThis !== 'undefined' && (globalThis as any).AsyncLocalStorage\n\nexport function createAsyncLocalStorage<\n Store extends {},\n>(): AsyncLocalStorage<Store> {\n if (maybeGlobalAsyncLocalStorage) {\n return new maybeGlobalAsyncLocalStorage()\n }\n return new FakeAsyncLocalStorage()\n}\n\nexport function bindSnapshot<T>(\n // WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911\n fn: T\n): T {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.bind(fn)\n }\n return FakeAsyncLocalStorage.bind(fn)\n}\n\nexport function createSnapshot(): <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.snapshot()\n }\n return function (fn: any, ...args: any[]) {\n return fn(...args)\n }\n}\n"],"names":["bindSnapshot","createAsyncLocalStorage","createSnapshot","sharedAsyncLocalStorageNotAvailableError","Error","FakeAsyncLocalStorage","disable","getStore","undefined","run","exit","enterWith","bind","fn","maybeGlobalAsyncLocalStorage","globalThis","AsyncLocalStorage","snapshot","args"],"mappings":";;;;;;;;;;;;;;;;IA+CgBA,YAAY;eAAZA;;IATAC,uBAAuB;eAAvBA;;IAmBAC,cAAc;eAAdA;;;AAvDhB,MAAMC,2CAA2C,qBAEhD,CAFgD,IAAIC,MACnD,+EAD+C,qBAAA;WAAA;gBAAA;kBAAA;AAEjD;AAEA,MAAMC;IAGJC,UAAgB;QACd,MAAMH;IACR;IAEAI,WAA8B;QAC5B,4EAA4E;QAC5E,OAAOC;IACT;IAEAC,MAAY;QACV,MAAMN;IACR;IAEAO,OAAa;QACX,MAAMP;IACR;IAEAQ,YAAkB;QAChB,MAAMR;IACR;IAEA,OAAOS,KAAQC,EAAK,EAAK;QACvB,OAAOA;IACT;AACF;AAEA,MAAMC,+BACJ,OAAOC,eAAe,eAAe,AAACA,WAAmBC,iBAAiB;AAErE,SAASf;IAGd,IAAIa,8BAA8B;QAChC,OAAO,IAAIA;IACb;IACA,OAAO,IAAIT;AACb;AAEO,SAASL,aACd,2GAA2G;AAC3Ga,EAAK;IAEL,IAAIC,8BAA8B;QAChC,OAAOA,6BAA6BF,IAAI,CAACC;IAC3C;IACA,OAAOR,sBAAsBO,IAAI,CAACC;AACpC;AAEO,SAASX;IAId,IAAIY,8BAA8B;QAChC,OAAOA,6BAA6BG,QAAQ;IAC9C;IACA,OAAO,SAAUJ,EAAO,EAAE,GAAGK,IAAW;QACtC,OAAOL,MAAMK;IACf;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,34 @@
/**
* This class is used to detect when all cache reads for a given render are settled.
* We do this to allow for cache warming the prerender without having to continue rendering
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
* and should only be used in codepaths gated with this feature.
*/
export declare class CacheSignal {
private count;
private earlyListeners;
private listeners;
private tickPending;
private pendingTimeoutCleanup;
private subscribedSignals;
constructor();
private noMorePendingCaches;
private invokeListenersIfNoPendingReads;
/**
* This promise waits until there are no more in progress cache reads but no later.
* This allows for adding more cache reads after to delay cacheReady.
*/
inputReady(): Promise<void>;
/**
* If there are inflight cache reads this Promise can resolve in a microtask however
* if there are no inflight cache reads then we wait at least one task to allow initial
* cache reads to be initiated.
*/
cacheReady(): Promise<void>;
beginRead(): void;
endRead(): void;
hasPendingReads(): boolean;
trackRead<T>(promise: Promise<T>): Promise<T>;
subscribeToReads(subscriber: CacheSignal): () => void;
unsubscribeFromReads(subscriber: CacheSignal): void;
}

View File

@@ -0,0 +1,181 @@
/**
* This class is used to detect when all cache reads for a given render are settled.
* We do this to allow for cache warming the prerender without having to continue rendering
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
* and should only be used in codepaths gated with this feature.
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "CacheSignal", {
enumerable: true,
get: function() {
return CacheSignal;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
class CacheSignal {
constructor(){
this.count = 0;
this.earlyListeners = [];
this.listeners = [];
this.tickPending = false;
this.pendingTimeoutCleanup = null;
this.subscribedSignals = null;
this.invokeListenersIfNoPendingReads = ()=>{
this.pendingTimeoutCleanup = null;
if (this.count === 0) {
for(let i = 0; i < this.listeners.length; i++){
this.listeners[i]();
}
this.listeners.length = 0;
}
};
if (process.env.NEXT_RUNTIME === 'edge') {
// we rely on `process.nextTick`, which is not supported in edge
throw Object.defineProperty(new _invarianterror.InvariantError('CacheSignal cannot be used in the edge runtime, because `cacheComponents` does not support it.'), "__NEXT_ERROR_CODE", {
value: "E728",
enumerable: false,
configurable: true
});
}
}
noMorePendingCaches() {
if (!this.tickPending) {
this.tickPending = true;
queueMicrotask(()=>process.nextTick(()=>{
this.tickPending = false;
if (this.count === 0) {
for(let i = 0; i < this.earlyListeners.length; i++){
this.earlyListeners[i]();
}
this.earlyListeners.length = 0;
}
}));
}
// After a cache resolves, React will schedule new rendering work:
// - in a microtask (when prerendering)
// - in setImmediate (when rendering)
// To cover both of these, we have to make sure that we let immediates execute at least once after each cache resolved.
// We don't know when the pending timeout was scheduled (and if it's about to resolve),
// so by scheduling a new one, we can be sure that we'll go around the event loop at least once.
if (this.pendingTimeoutCleanup) {
// We cancel the timeout in beginRead, so this shouldn't ever be active here,
// but we still cancel it defensively.
this.pendingTimeoutCleanup();
}
this.pendingTimeoutCleanup = scheduleImmediateAndTimeoutWithCleanup(this.invokeListenersIfNoPendingReads);
}
/**
* This promise waits until there are no more in progress cache reads but no later.
* This allows for adding more cache reads after to delay cacheReady.
*/ inputReady() {
return new Promise((resolve)=>{
this.earlyListeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
/**
* If there are inflight cache reads this Promise can resolve in a microtask however
* if there are no inflight cache reads then we wait at least one task to allow initial
* cache reads to be initiated.
*/ cacheReady() {
return new Promise((resolve)=>{
this.listeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
beginRead() {
this.count++;
// There's a new pending cache, so if there's a `noMorePendingCaches` timeout running,
// we should cancel it.
if (this.pendingTimeoutCleanup) {
this.pendingTimeoutCleanup();
this.pendingTimeoutCleanup = null;
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.beginRead();
}
}
}
endRead() {
if (this.count === 0) {
throw Object.defineProperty(new _invarianterror.InvariantError('CacheSignal got more endRead() calls than beginRead() calls'), "__NEXT_ERROR_CODE", {
value: "E678",
enumerable: false,
configurable: true
});
}
// If this is the last read we need to wait a task before we can claim the cache is settled.
// The cache read will likely ping a Server Component which can read from the cache again and this
// will play out in a microtask so we need to only resolve pending listeners if we're still at 0
// after at least one task.
// We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.
// If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency
this.count--;
if (this.count === 0) {
this.noMorePendingCaches();
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.endRead();
}
}
}
hasPendingReads() {
return this.count > 0;
}
trackRead(promise) {
this.beginRead();
// `promise.finally()` still rejects, so don't use it here to avoid unhandled rejections
const onFinally = this.endRead.bind(this);
promise.then(onFinally, onFinally);
return promise;
}
subscribeToReads(subscriber) {
if (subscriber === this) {
throw Object.defineProperty(new _invarianterror.InvariantError('A CacheSignal cannot subscribe to itself'), "__NEXT_ERROR_CODE", {
value: "E679",
enumerable: false,
configurable: true
});
}
if (this.subscribedSignals === null) {
this.subscribedSignals = new Set();
}
this.subscribedSignals.add(subscriber);
// we'll notify the subscriber of each endRead() on this signal,
// so we need to give it a corresponding beginRead() for each read we have in flight now.
for(let i = 0; i < this.count; i++){
subscriber.beginRead();
}
return this.unsubscribeFromReads.bind(this, subscriber);
}
unsubscribeFromReads(subscriber) {
if (!this.subscribedSignals) {
return;
}
this.subscribedSignals.delete(subscriber);
// we don't need to set the set back to `null` if it's empty --
// if other signals are subscribing to this one, it'll likely get more subscriptions later,
// so we'd have to allocate a fresh set again when that happens.
}
}
function scheduleImmediateAndTimeoutWithCleanup(cb) {
// If we decide to clean up the timeout, we want to remove
// either the immediate or the timeout, whichever is still pending.
let clearPending;
const immediate = setImmediate(()=>{
const timeout = setTimeout(cb, 0);
clearPending = clearTimeout.bind(null, timeout);
});
clearPending = clearImmediate.bind(null, immediate);
return ()=>clearPending();
}
//# sourceMappingURL=cache-signal.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,71 @@
import type { DynamicParamTypesShort, PrefetchHints } from '../../shared/lib/app-router-types';
import type { ManifestNode } from '../../build/webpack/plugins/flight-manifest-plugin';
import { type SegmentRequestKey } from '../../shared/lib/segment-cache/segment-value-encoding';
export type RootTreePrefetch = {
buildId?: string;
tree: TreePrefetch;
staleTime: number;
};
export type TreePrefetchParam = {
type: DynamicParamTypesShort;
key: string | null;
siblings: readonly string[] | null;
};
export type TreePrefetch = {
name: string;
param: TreePrefetchParam | null;
slots: null | {
[parallelRouteKey: string]: TreePrefetch;
};
/** Bitmask of PrefetchHint flags for this segment and its subtree */
prefetchHints: number;
};
export type SegmentPrefetch = {
buildId?: string;
rsc: React.ReactNode | null;
isPartial: boolean;
staleTime: number;
/**
* The set of params that this segment's output depends on. Used by the client
* cache to determine which entries can be reused across different param
* values.
* - `null` means vary params were not tracked (conservative: assume all
* params matter)
* - Empty set means no params were accessed (segment is reusable for any
* param values)
*/
varyParams: Set<string> | null;
};
/**
* A node in the inlined prefetch tree. Wraps a SegmentPrefetch with child
* slots so all segments for a route can be bundled into a single response.
*
* This is a separate type from SegmentPrefetch because the inlined flow is
* still gated behind a feature flag. Eventually inlining will always be
* enabled, and the per-segment and inlined paths will merge.
*/
export type InlinedSegmentPrefetch = {
segment: SegmentPrefetch;
slots: null | {
[parallelRouteKey: string]: InlinedSegmentPrefetch;
};
};
/**
* The response shape for the /_inlined prefetch endpoint. Contains all segment
* data for a route bundled into a single tree structure, plus the head segment.
*/
export type InlinedPrefetchResponse = {
tree: InlinedSegmentPrefetch;
head: SegmentPrefetch;
};
export declare function collectSegmentData(isCacheComponentsEnabled: boolean, fullPageDataBuffer: Buffer, staleTime: number, clientModules: ManifestNode, serverConsumerManifest: any, prefetchInlining: boolean, hints: PrefetchHints | null): Promise<Map<SegmentRequestKey, Buffer>>;
/**
* Compute prefetch hints for a route by measuring segment sizes and deciding
* which segments should be inlined. Only runs at build time. The results are
* written to prefetch-hints.json and loaded at server startup.
*
* This is a separate pass from collectSegmentData so that the inlining
* decisions can be fed back into collectSegmentData to control which segments
* are output as separate entries vs. inlined into their parent.
*/
export declare function collectPrefetchHints(fullPageDataBuffer: Buffer, staleTime: number, clientModules: ManifestNode, serverConsumerManifest: any, maxSize: number, maxBundleSize: number): Promise<PrefetchHints>;

View File

@@ -0,0 +1,600 @@
/* eslint-disable @next/internal/no-ambiguous-jsx -- Bundled in entry-base so it gets the right JSX runtime. */ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
collectPrefetchHints: null,
collectSegmentData: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
collectPrefetchHints: function() {
return collectPrefetchHints;
},
collectSegmentData: function() {
return collectSegmentData;
}
});
const _jsxruntime = require("react/jsx-runtime");
const _approutertypes = require("../../shared/lib/app-router-types");
const _varyparamsdecoding = require("../../shared/lib/segment-cache/vary-params-decoding");
const _segment = require("../../shared/lib/segment");
const _client = require("react-server-dom-webpack/client");
const _static = require("react-server-dom-webpack/static");
const _nodewebstreamshelper = require("../stream-utils/node-web-streams-helper");
const _scheduler = require("../../lib/scheduler");
const _segmentvalueencoding = require("../../shared/lib/segment-cache/segment-value-encoding");
const _createerrorhandler = require("./create-error-handler");
const _prospectiverenderutils = require("./prospective-render-utils");
const _workasyncstorageexternal = require("./work-async-storage.external");
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
function onSegmentPrerenderError(error) {
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
if (digest) {
return digest;
}
// We don't need to log the errors because we would have already done that
// when generating the original Flight stream for the whole page.
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
(0, _prospectiverenderutils.printDebugThrownValueForProspectiveRender)(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', _prospectiverenderutils.Phase.SegmentCollection);
}
}
/**
* Extract the FlightRouterState, seed data, and head from a prerendered
* InitialRSCPayload. Returns null if the payload doesn't match the expected
* shape (single path with 3 elements).
*/ function extractFlightData(initialRSCPayload) {
const flightDataPaths = initialRSCPayload.f;
// FlightDataPath is an unsound type, hence the additional checks.
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
return null;
}
return {
buildId: initialRSCPayload.b,
flightRouterState: flightDataPaths[0][0],
seedData: flightDataPaths[0][1],
head: flightDataPaths[0][2]
};
}
async function collectSegmentData(isCacheComponentsEnabled, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, prefetchInlining, hints) {
// Traverse the router tree and generate a prefetch response for each segment.
// A mutable map to collect the results as we traverse the route tree.
const resultMap = new Map();
// Before we start, warm up the module cache by decoding the page data once.
// Then we can assume that any remaining async tasks that occur the next time
// are due to hanging promises caused by dynamic data access. Note we only
// have to do this once per page, not per individual segment.
//
try {
await (0, _client.createFromReadableStream)((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
} catch {}
// Create an abort controller that we'll use to stop the stream.
const abortController = new AbortController();
const onCompletedProcessingRouteTree = async ()=>{
// Since all we're doing is decoding and re-encoding a cached prerender, if
// serializing the stream takes longer than a microtask, it must because of
// hanging promises caused by dynamic data.
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
abortController.abort();
};
// Generate a stream for the route tree prefetch. While we're walking the
// tree, we'll also spawn additional tasks to generate the segment prefetches.
// The promises for these tasks are pushed to a mutable array that we will
// await once the route tree is fully rendered.
const segmentTasks = [];
const { prelude: treeStream } = await (0, _static.prerender)(// RootTreePrefetch is not a valid return type for a React component, but
// we need to use a component so that when we decode the original stream
// inside of it, the side effects are transferred to the new stream.
// @ts-expect-error
/*#__PURE__*/ (0, _jsxruntime.jsx)(PrefetchTreeData, {
isClientParamParsingEnabled: isCacheComponentsEnabled,
fullPageDataBuffer: fullPageDataBuffer,
serverConsumerManifest: serverConsumerManifest,
clientModules: clientModules,
staleTime: staleTime,
segmentTasks: segmentTasks,
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree,
prefetchInlining: prefetchInlining,
hints: hints
}), clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
// Write the route tree to a special `/_tree` segment.
const treeBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(treeStream);
resultMap.set('/_tree', treeBuffer);
// Also output the entire full page data response
resultMap.set('/_full', fullPageDataBuffer);
// Now that we've finished rendering the route tree, all the segment tasks
// should have been spawned. Await them in parallel and write the segment
// prefetches to the result map.
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
resultMap.set(segmentPath, buffer);
}
return resultMap;
}
async function collectPrefetchHints(fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, maxSize, maxBundleSize) {
// Warm up the module cache, same as collectSegmentData.
try {
await (0, _client.createFromReadableStream)((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
} catch {}
// Decode the Flight data to walk the route tree.
const initialRSCPayload = await (0, _client.createFromReadableStream)(createUnclosingPrefetchStream((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const flightData = extractFlightData(initialRSCPayload);
if (flightData === null) {
return {
hints: 0,
slots: null
};
}
const { buildId, flightRouterState, seedData, head } = flightData;
// Measure the head (metadata/viewport) gzip size so the main traversal
// can decide whether to inline it into a page's bundle.
const headVaryParamsThenable = initialRSCPayload.h;
const headVaryParams = headVaryParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(headVaryParamsThenable) : null;
const [, headBuffer] = await renderSegmentPrefetch(buildId, staleTime, head, _segmentvalueencoding.HEAD_REQUEST_KEY, headVaryParams, clientModules);
const headGzipSize = await getGzipSize(headBuffer);
// Mutable accumulator: the first page leaf that can fit the head sets
// this to true. Once set, subsequent leaves skip the check.
const headInlineState = {
inlined: false
};
// Walk the tree with the parent-first, child-decides algorithm.
const { node } = await collectPrefetchHintsImpl(flightRouterState, buildId, staleTime, seedData, clientModules, _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY, null, maxSize, maxBundleSize, headGzipSize, headInlineState);
if (!headInlineState.inlined) {
// No page could accept the head. Set HeadOutlined on the root so the
// client knows to fetch the head separately.
node.hints |= _approutertypes.PrefetchHint.HeadOutlined;
}
return node;
}
// Measure a segment's gzip size and decide whether it should be inlined.
//
// These hints are computed once during build and never change for the
// lifetime of that deployment. The client can assume that hints delivered as
// part of one request will be the same during a subsequent request, given
// the same build ID. There's no skew to worry about as long as the build
// itself is consistent.
//
// In the Segment Cache, we split page prefetches into multiple requests so
// that each one can be cached and deduped independently. However, some
// segments are small enough that the potential caching benefits are not worth
// the additional network overhead. For these, we inline a parent's data into
// one of its children's responses, avoiding a separate request. The parent
// is inlined into the child (not the other way around) because the parent's
// response is more likely to be shared across multiple pages. The child's
// response is already page-specific, so adding the parent's data there
// doesn't meaningfully reduce deduplication. It's similar to how JS bundlers
// decide whether to inline a module into a chunk.
//
// The algorithm is parent-first, child-decides: the parent measures itself
// and passes its gzip size down. Each child decides whether to accept. A
// child rejects if the parent exceeds maxSize or if accepting would push
// the cumulative inlined bytes past maxBundleSize. This produces
// both ParentInlinedIntoSelf (on the child) and InlinedIntoChild (on the
// parent) in a single pass.
async function collectPrefetchHintsImpl(route, buildId, staleTime, seedData, clientModules, // TODO: Consider persisting the computed requestKey into the hints output
// so it doesn't need to be recomputed during the build. This might also
// suggest renaming prefetch-hints.json to something like
// segment-manifest.json, since it would contain more than just hints.
requestKey, parentGzipSize, maxSize, maxBundleSize, headGzipSize, headInlineState) {
// Render current segment and measure its gzip size.
let currentGzipSize = null;
if (seedData !== null) {
const varyParamsThenable = seedData[4];
const varyParams = varyParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(varyParamsThenable) : null;
const [, buffer] = await renderSegmentPrefetch(buildId, staleTime, seedData[0], requestKey, varyParams, clientModules);
currentGzipSize = await getGzipSize(buffer);
}
// Only offer this segment to its children for inlining if its gzip size
// is below maxSize. Segments above this get their own response.
const sizeToInline = currentGzipSize !== null && currentGzipSize < maxSize ? currentGzipSize : null;
// Process children serially (not in parallel) to ensure deterministic
// results. Since this only runs at build time and the rendering is just
// re-encoding cached prerenders, this won't impact build times. Each child
// receives our gzip size and decides whether to inline us. Once a child
// accepts, we stop offering to remaining siblings — the parent is only
// inlined into one child. In parallel routes, this avoids duplicating the
// parent's data across multiple sibling responses.
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
let slots = null;
let didInlineIntoChild = false;
let acceptingChildInlinedBytes = 0;
// Track the smallest inlinedBytes across all children so we know how much
// budget remains along the best path. When our own parent asks whether we
// can accept its data, the parent's bytes would flow through to the child
// with the most remaining headroom.
let smallestChildInlinedBytes = Infinity;
let hasChildren = false;
for(const parallelRouteKey in children){
hasChildren = true;
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = (0, _segmentvalueencoding.appendSegmentRequestKeyPart)(requestKey, parallelRouteKey, (0, _segmentvalueencoding.createSegmentRequestKeyPart)(childSegment));
const childResult = await collectPrefetchHintsImpl(childRoute, buildId, staleTime, childSeedData, clientModules, childRequestKey, // Once a child has accepted us, stop offering to remaining siblings.
didInlineIntoChild ? null : sizeToInline, maxSize, maxBundleSize, headGzipSize, headInlineState);
if (slots === null) {
slots = {};
}
slots[parallelRouteKey] = childResult.node;
if (childResult.node.hints & _approutertypes.PrefetchHint.ParentInlinedIntoSelf) {
// This child accepted our data — it will include our segment's
// response in its own. No need to track headroom anymore since
// we already know which child we're inlined into.
didInlineIntoChild = true;
acceptingChildInlinedBytes = childResult.inlinedBytes;
} else if (!didInlineIntoChild) {
// Track the child with the most remaining headroom. Used below
// when deciding whether to accept our own parent's data.
if (childResult.inlinedBytes < smallestChildInlinedBytes) {
smallestChildInlinedBytes = childResult.inlinedBytes;
}
}
}
// Leaf segment: no children have consumed any budget yet.
if (!hasChildren) {
smallestChildInlinedBytes = 0;
}
// Mark this segment as InlinedIntoChild if one of its children accepted.
// This means this segment doesn't need its own prefetch response — its
// data is included in the accepting child's response instead.
let hints = 0;
if (didInlineIntoChild) {
hints |= _approutertypes.PrefetchHint.InlinedIntoChild;
}
// inlinedBytes represents the total gzipped bytes of parent data inlined
// into the deepest "inlining target" along this branch. It starts at 0 at
// the leaves and grows as parents are inlined going back up the tree. If a
// child accepted us, our size is already counted in that child's value.
let inlinedBytes = didInlineIntoChild ? acceptingChildInlinedBytes : smallestChildInlinedBytes;
// At leaf nodes (pages), try to inline the head (metadata/viewport) into
// this page's response. The head is treated like an additional inlined
// entry — it counts against the same total budget. Only the first page
// that has room gets the head; subsequent pages skip via the shared
// headInlineState accumulator.
if (!hasChildren && !headInlineState.inlined) {
if (inlinedBytes + headGzipSize < maxBundleSize) {
hints |= _approutertypes.PrefetchHint.HeadInlinedIntoSelf;
inlinedBytes += headGzipSize;
headInlineState.inlined = true;
}
}
// Decide whether to accept our own parent's data. Two conditions:
//
// 1. The parent offered us a size (parentGzipSize is not null). It's null
// when the parent is too large to inline or when this is the root.
//
// 2. The total inlined bytes along this branch wouldn't exceed the budget.
// Even if each segment is individually small, at some point it no
// longer makes sense to keep adding bytes because the combined response
// is unique per URL and can't be deduped.
//
// A node can be both InlinedIntoChild and ParentInlinedIntoSelf. This
// happens in multi-level chains: GP → P → C where all are small. C
// accepts P (P is InlinedIntoChild), then P also accepts GP (P is
// ParentInlinedIntoSelf). The result: C's response includes both P's
// and GP's data. The parent's data flows through to the deepest
// accepting descendant.
if (parentGzipSize !== null) {
if (inlinedBytes + parentGzipSize < maxBundleSize) {
hints |= _approutertypes.PrefetchHint.ParentInlinedIntoSelf;
inlinedBytes += parentGzipSize;
}
}
return {
node: {
hints,
slots
},
inlinedBytes
};
}
// We use gzip size rather than raw size because it better reflects the actual
// transfer cost. The inlining trade-off is about whether the overhead of an
// additional HTTP request (connection setup, headers, round trip) is worth
// the deduplication benefit of keeping a segment separate. Below some
// compressed size, the request overhead dominates and inlining is better.
// Above it, the deduplication benefit of a cacheable standalone response
// wins out.
async function getGzipSize(buffer) {
const stream = new Blob([
new Uint8Array(buffer)
]).stream().pipeThrough(new CompressionStream('gzip'));
const compressedBlob = await new Response(stream).blob();
return compressedBlob.size;
}
async function PrefetchTreeData({ isClientParamParsingEnabled, fullPageDataBuffer, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree, prefetchInlining, hints }) {
// We're currently rendering a Flight response for the route tree prefetch.
// Inside this component, decode the Flight stream for the whole page. This is
// a hack to transfer the side effects from the original Flight stream (e.g.
// Float preloads) onto the Flight stream for the tree prefetch.
// TODO: React needs a better way to do this. Needed for Server Actions, too.
const initialRSCPayload = await (0, _client.createFromReadableStream)(createUnclosingPrefetchStream((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const flightData = extractFlightData(initialRSCPayload);
if (flightData === null) {
return null;
}
const { buildId, flightRouterState, seedData, head } = flightData;
// Extract the head vary params from the decoded response.
// The head vary params thenable should be fulfilled by now; if not, treat
// as unknown (null).
const headVaryParamsThenable = initialRSCPayload.h;
const headVaryParams = headVaryParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(headVaryParamsThenable) : null;
// Compute the route metadata tree by traversing the FlightRouterState. As we
// walk the tree, we will also spawn a task to produce a prefetch response for
// each segment (unless prefetch inlining is enabled, in which case all
// segments are bundled into a single /_inlined response).
const tree = collectSegmentDataImpl(isClientParamParsingEnabled, flightRouterState, buildId, staleTime, seedData, clientModules, _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY, segmentTasks, prefetchInlining, hints);
if (prefetchInlining) {
// When prefetch inlining is enabled, bundle all segment data into a single
// /_inlined response instead of individual per-segment responses. The head
// is also included in the inlined response.
segmentTasks.push((0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderInlinedPrefetchResponse(flightRouterState, buildId, staleTime, seedData, head, headVaryParams, clientModules)));
} else {
// Also spawn a task to produce a prefetch response for the "head" segment.
// The head contains metadata, like the title; it's not really a route
// segment, but it contains RSC data, so it's treated like a segment by
// the client cache.
segmentTasks.push((0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, staleTime, head, _segmentvalueencoding.HEAD_REQUEST_KEY, headVaryParams, clientModules)));
}
// Notify the abort controller that we're done processing the route tree.
// Anything async that happens after this point must be due to hanging
// promises in the original stream.
onCompletedProcessingRouteTree();
// Render the route tree to a special `/_tree` segment.
const treePrefetch = {
tree,
staleTime
};
if (buildId) {
treePrefetch.buildId = buildId;
}
return treePrefetch;
}
function collectSegmentDataImpl(isClientParamParsingEnabled, route, buildId, staleTime, seedData, clientModules, requestKey, segmentTasks, prefetchInlining, hintTree) {
// Metadata about the segment. Sent as part of the tree prefetch. Null if
// there are no children.
let slotMetadata = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = (0, _segmentvalueencoding.appendSegmentRequestKeyPart)(requestKey, parallelRouteKey, (0, _segmentvalueencoding.createSegmentRequestKeyPart)(childSegment));
const childHintTree = hintTree !== null && hintTree.slots !== null ? hintTree.slots[parallelRouteKey] ?? null : null;
const childTree = collectSegmentDataImpl(isClientParamParsingEnabled, childRoute, buildId, staleTime, childSeedData, clientModules, childRequestKey, segmentTasks, prefetchInlining, childHintTree);
if (slotMetadata === null) {
slotMetadata = {};
}
slotMetadata[parallelRouteKey] = childTree;
}
// Union the hints already embedded in the FlightRouterState with the
// separately-computed build-time hints. During the initial build, the
// FlightRouterState was produced before collectPrefetchHints ran, so
// inlining hints (ParentInlinedIntoSelf, InlinedIntoChild) won't be in
// route[4] yet. On subsequent renders the hints are already in the
// FlightRouterState, so the union is idempotent.
const prefetchHints = (route[4] ?? 0) | (hintTree !== null ? hintTree.hints : 0);
// Determine which params this segment varies on.
// Read the vary params thenable directly from the seed data. By the time
// collectSegmentData runs, the thenable should be fulfilled. If it's not
// fulfilled or null, treat as unknown (null means we can't share cache
// entries across param values).
const varyParamsThenable = seedData !== null ? seedData[4] : null;
const varyParams = varyParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(varyParamsThenable) : null;
if (!prefetchInlining) {
// When prefetch inlining is disabled, spawn individual segment tasks.
// When enabled, segment data is bundled into the /_inlined response
// instead, so we skip per-segment tasks here.
if (seedData !== null) {
// Spawn a task to write the segment data to a new Flight stream.
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
// current task to escape the current rendering context.
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, staleTime, seedData[0], requestKey, varyParams, clientModules)));
} else {
// This segment does not have any seed data. Skip generating a prefetch
// response for it. We'll still include it in the route tree, though.
// TODO: We should encode in the route tree whether a segment is missing
// so we don't attempt to fetch it for no reason. As of now this shouldn't
// ever happen in practice, though.
}
}
const segment = route[0];
let name;
let param;
if (typeof segment === 'string') {
name = segment;
param = null;
} else {
name = segment[0];
param = {
type: segment[2],
// This value is omitted from the prefetch response when cacheComponents
// is enabled.
key: isClientParamParsingEnabled ? null : segment[1],
siblings: segment[3]
};
}
// Metadata about the segment. Sent to the client as part of the
// tree prefetch.
return {
name,
param,
prefetchHints,
slots: slotMetadata
};
}
async function renderSegmentPrefetch(buildId, staleTime, rsc, requestKey, varyParams, clientModules) {
// Render the segment data to a stream.
const segmentPrefetch = {
rsc,
isPartial: await isPartialRSCData(rsc, clientModules),
staleTime,
varyParams
};
if (buildId) {
segmentPrefetch.buildId = buildId;
}
// Since all we're doing is decoding and re-encoding a cached prerender, if
// it takes longer than a microtask, it must because of hanging promises
// caused by dynamic data. Abort the stream at the end of the current task.
const abortController = new AbortController();
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>abortController.abort());
const { prelude: segmentStream } = await (0, _static.prerender)(segmentPrefetch, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const segmentBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(segmentStream);
if (requestKey === _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY) {
return [
'/_index',
segmentBuffer
];
} else {
return [
requestKey,
segmentBuffer
];
}
}
async function renderInlinedPrefetchResponse(route, buildId, staleTime, seedData, head, headVaryParams, clientModules) {
// Build the inlined tree by walking the route and collecting all segments.
const inlinedTree = await buildInlinedSegmentPrefetch(route, buildId, staleTime, seedData, clientModules);
// Build the head segment.
const headPrefetch = {
rsc: head,
isPartial: await isPartialRSCData(head, clientModules),
staleTime,
varyParams: headVaryParams
};
if (buildId) {
headPrefetch.buildId = buildId;
}
const response = {
tree: inlinedTree,
head: headPrefetch
};
// Render as a single Flight response.
const abortController = new AbortController();
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>abortController.abort());
const { prelude } = await (0, _static.prerender)(response, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const buffer = await (0, _nodewebstreamshelper.streamToBuffer)(prelude);
return [
'/' + _segment.PAGE_SEGMENT_KEY,
buffer
];
}
async function buildInlinedSegmentPrefetch(route, buildId, staleTime, seedData, clientModules) {
let slots = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childPrefetch = await buildInlinedSegmentPrefetch(childRoute, buildId, staleTime, childSeedData, clientModules);
if (slots === null) {
slots = {};
}
slots[parallelRouteKey] = childPrefetch;
}
const rsc = seedData !== null ? seedData[0] : null;
const varyParamsThenable = seedData !== null ? seedData[4] : null;
const varyParams = varyParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(varyParamsThenable) : null;
const segment = {
rsc,
isPartial: rsc !== null ? await isPartialRSCData(rsc, clientModules) : true,
staleTime,
varyParams
};
if (buildId) {
segment.buildId = buildId;
}
return {
segment,
slots
};
}
async function isPartialRSCData(rsc, clientModules) {
// We can determine if a segment contains only partial data if it takes longer
// than a task to encode, because dynamic data is encoded as an infinite
// promise. We must do this in a separate Flight prerender from the one that
// actually generates the prefetch stream because we need to include
// `isPartial` in the stream itself.
let isPartial = false;
const abortController = new AbortController();
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>{
// If we haven't yet finished the outer task, then it must be because we
// accessed dynamic data.
isPartial = true;
abortController.abort();
});
await (0, _static.prerender)(rsc, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError () {}
});
return isPartial;
}
function createUnclosingPrefetchStream(originalFlightStream) {
// When PPR is enabled, prefetch streams may contain references that never
// resolve, because that's how we encode dynamic data access. In the decoded
// object returned by the Flight client, these are reified into hanging
// promises that suspend during render, which is effectively what we want.
// The UI resolves when it switches to the dynamic data stream
// (via useDeferredValue(dynamic, static)).
//
// However, the Flight implementation currently errors if the server closes
// the response before all the references are resolved. As a cheat to work
// around this, we wrap the original stream in a new stream that never closes,
// and therefore doesn't error.
const reader = originalFlightStream.getReader();
return new ReadableStream({
async pull (controller) {
while(true){
const { done, value } = await reader.read();
if (!done) {
// Pass to the target stream and keep consuming the Flight response
// from the server.
controller.enqueue(value);
continue;
}
// The server stream has closed. Exit, but intentionally do not close
// the target stream.
return;
}
}
});
}
//# sourceMappingURL=collect-segment-data.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
import type { ConsoleAsyncStorage } from './console-async-storage.external';
export declare const consoleAsyncStorageInstance: ConsoleAsyncStorage;

View File

@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "consoleAsyncStorageInstance", {
enumerable: true,
get: function() {
return consoleAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const consoleAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=console-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/console-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { ConsoleAsyncStorage } from './console-async-storage.external'\n\nexport const consoleAsyncStorageInstance: ConsoleAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["consoleAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAH2B;AAGjC,MAAMA,8BACXC,IAAAA,0CAAuB","ignoreList":[0]}

View File

@@ -0,0 +1,12 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { consoleAsyncStorageInstance } from './console-async-storage-instance';
export interface ConsoleStore {
/**
* if true the color of logs output will be dimmed to indicate the log is
* from a repeat or validation render that is not typically relevant to
* the primary action the server is taking.
*/
readonly dim: boolean;
}
export type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>;
export { consoleAsyncStorageInstance as consoleAsyncStorage };

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "consoleAsyncStorage", {
enumerable: true,
get: function() {
return _consoleasyncstorageinstance.consoleAsyncStorageInstance;
}
});
const _consoleasyncstorageinstance = require("./console-async-storage-instance");
//# sourceMappingURL=console-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/console-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { consoleAsyncStorageInstance } from './console-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface ConsoleStore {\n /**\n * if true the color of logs output will be dimmed to indicate the log is\n * from a repeat or validation render that is not typically relevant to\n * the primary action the server is taking.\n */\n readonly dim: boolean\n}\n\nexport type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>\n\nexport { consoleAsyncStorageInstance as consoleAsyncStorage }\n"],"names":["consoleAsyncStorage","consoleAsyncStorageInstance"],"mappings":";;;;+BAgBwCA;;;eAA/BC,wDAA2B;;;6CAbQ","ignoreList":[0]}

View File

@@ -0,0 +1,8 @@
import type { AppRenderContext } from './app-render';
export declare function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx, }: {
filePath: string;
getComponent: () => any;
injectedCSS: Set<string>;
injectedJS: Set<string>;
ctx: AppRenderContext;
}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]>;

View File

@@ -0,0 +1,33 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "createComponentStylesAndScripts", {
enumerable: true,
get: function() {
return createComponentStylesAndScripts;
}
});
const _interopdefault = require("./interop-default");
const _getcssinlinedlinktags = require("./get-css-inlined-link-tags");
const _getassetquerystring = require("./get-asset-query-string");
const _encodeuripath = require("../../shared/lib/encode-uri-path");
const _rendercssresource = require("./render-css-resource");
async function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx }) {
const { componentMod: { createElement } } = ctx;
const { styles: entryCssFiles, scripts: jsHrefs } = (0, _getcssinlinedlinktags.getLinkAndScriptTags)(filePath, injectedCSS, injectedJS);
const styles = (0, _rendercssresource.renderCssResource)(entryCssFiles, ctx);
const scripts = jsHrefs ? jsHrefs.map((href, index)=>createElement('script', {
src: `${ctx.assetPrefix}/_next/${(0, _encodeuripath.encodeURIPath)(href)}${(0, _getassetquerystring.getAssetQueryString)(ctx, true)}`,
async: true,
key: `script-${index}`
})) : null;
const Comp = (0, _interopdefault.interopDefault)(await getComponent());
return [
Comp,
styles,
scripts
];
}
//# sourceMappingURL=create-component-styles-and-scripts.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/create-component-styles-and-scripts.tsx"],"sourcesContent":["import { interopDefault } from './interop-default'\nimport { getLinkAndScriptTags } from './get-css-inlined-link-tags'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport { renderCssResource } from './render-css-resource'\n\nexport async function createComponentStylesAndScripts({\n filePath,\n getComponent,\n injectedCSS,\n injectedJS,\n ctx,\n}: {\n filePath: string\n getComponent: () => any\n injectedCSS: Set<string>\n injectedJS: Set<string>\n ctx: AppRenderContext\n}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]> {\n const {\n componentMod: { createElement },\n } = ctx\n const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(\n filePath,\n injectedCSS,\n injectedJS\n )\n\n const styles = renderCssResource(entryCssFiles, ctx)\n\n const scripts = jsHrefs\n ? jsHrefs.map((href, index) =>\n createElement('script', {\n src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,\n async: true,\n key: `script-${index}`,\n })\n )\n : null\n\n const Comp = interopDefault(await getComponent())\n\n return [Comp, styles, scripts]\n}\n"],"names":["createComponentStylesAndScripts","filePath","getComponent","injectedCSS","injectedJS","ctx","componentMod","createElement","styles","entryCssFiles","scripts","jsHrefs","getLinkAndScriptTags","renderCssResource","map","href","index","src","assetPrefix","encodeURIPath","getAssetQueryString","async","key","Comp","interopDefault"],"mappings":";;;;+BAOsBA;;;eAAAA;;;gCAPS;uCACM;qCAED;+BACN;mCACI;AAE3B,eAAeA,gCAAgC,EACpDC,QAAQ,EACRC,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,GAAG,EAOJ;IACC,MAAM,EACJC,cAAc,EAAEC,aAAa,EAAE,EAChC,GAAGF;IACJ,MAAM,EAAEG,QAAQC,aAAa,EAAEC,SAASC,OAAO,EAAE,GAAGC,IAAAA,2CAAoB,EACtEX,UACAE,aACAC;IAGF,MAAMI,SAASK,IAAAA,oCAAiB,EAACJ,eAAeJ;IAEhD,MAAMK,UAAUC,UACZA,QAAQG,GAAG,CAAC,CAACC,MAAMC,QACjBT,cAAc,UAAU;YACtBU,KAAK,GAAGZ,IAAIa,WAAW,CAAC,OAAO,EAAEC,IAAAA,4BAAa,EAACJ,QAAQK,IAAAA,wCAAmB,EAACf,KAAK,OAAO;YACvFgB,OAAO;YACPC,KAAK,CAAC,OAAO,EAAEN,OAAO;QACxB,MAEF;IAEJ,MAAMO,OAAOC,IAAAA,8BAAc,EAAC,MAAMtB;IAElC,OAAO;QAACqB;QAAMf;QAAQE;KAAQ;AAChC","ignoreList":[0]}

View File

@@ -0,0 +1,25 @@
import type { ComponentType } from 'react';
import type { CacheNodeSeedData } from '../../shared/lib/app-router-types';
import type { PreloadCallbacks } from './types';
import type { LoaderTree } from '../lib/app-dir-module';
import type { AppRenderContext, GetDynamicParamFromSegment } from './app-render';
import type { Params } from '../request/params';
/**
* Use the provided loader tree to create the React Component tree.
*/
export declare function createComponentTree(props: {
loaderTree: LoaderTree;
parentParams: Params;
parentOptionalCatchAllParamName: string | null;
parentRuntimePrefetchable: false;
rootLayoutIncluded: boolean;
injectedCSS: Set<string>;
injectedJS: Set<string>;
injectedFontPreloadTags: Set<string>;
ctx: AppRenderContext;
missingSlots?: Set<string>;
preloadCallbacks: PreloadCallbacks;
authInterrupts: boolean;
MetadataOutlet: ComponentType;
}): Promise<CacheNodeSeedData>;
export declare function getRootParams(loaderTree: LoaderTree, getDynamicParamFromSegment: GetDynamicParamFromSegment): Params;

View File

@@ -0,0 +1,840 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createComponentTree: null,
getRootParams: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createComponentTree: function() {
return createComponentTree;
},
getRootParams: function() {
return getRootParams;
}
});
const _clientandserverreferences = require("../../lib/client-and-server-references");
const _appdirmodule = require("../lib/app-dir-module");
const _interopdefault = require("./interop-default");
const _parseloadertree = require("../../shared/lib/router/utils/parse-loader-tree");
const _createcomponentstylesandscripts = require("./create-component-styles-and-scripts");
const _getlayerassets = require("./get-layer-assets");
const _hasloadingcomponentintree = require("./has-loading-component-in-tree");
const _patchfetch = require("../lib/patch-fetch");
const _default = require("../../client/components/builtin/default");
const _tracer = require("../lib/trace/tracer");
const _constants = require("../lib/trace/constants");
const _staticgenerationbailout = require("../../client/components/static-generation-bailout");
const _workunitasyncstorageexternal = require("./work-unit-async-storage.external");
const _varyparams = require("./vary-params");
const _segment = require("../../shared/lib/segment");
const _segmentexplorerpath = require("./segment-explorer-path");
const _stagedrendering = require("./staged-rendering");
function createComponentTree(props) {
return (0, _tracer.getTracer)().trace(_constants.NextNodeServerSpan.createComponentTree, {
spanName: 'build component tree'
}, ()=>createComponentTreeInternal(props, true));
}
function errorMissingDefaultExport(pagePath, convention) {
const normalizedPagePath = pagePath === '/' ? '' : pagePath;
throw Object.defineProperty(new Error(`The default export is not a React Component in "${normalizedPagePath}/${convention}"`), "__NEXT_ERROR_CODE", {
value: "E45",
enumerable: false,
configurable: true
});
}
const cacheNodeKey = 'c';
async function createComponentTreeInternal({ loaderTree: tree, parentParams, parentOptionalCatchAllParamName, parentRuntimePrefetchable, rootLayoutIncluded, injectedCSS, injectedJS, injectedFontPreloadTags, ctx, missingSlots, preloadCallbacks, authInterrupts, MetadataOutlet }, isRoot) {
const { renderOpts: { nextConfigOutput, experimental, cacheComponents }, workStore, componentMod: { createElement, Fragment, SegmentViewNode, HTTPAccessFallbackBoundary, LayoutRouter, RenderFromTemplateContext, ClientPageRoot, ClientSegmentRoot, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks: { DynamicServerError }, Postpone }, pagePath, getDynamicParamFromSegment, isPrefetch, query } = ctx;
const { page, conventionPath, segment, modules, parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
const { layout, template, error, loading, 'not-found': notFound, forbidden, unauthorized } = modules;
const injectedCSSWithCurrentLayout = new Set(injectedCSS);
const injectedJSWithCurrentLayout = new Set(injectedJS);
const injectedFontPreloadTagsWithCurrentLayout = new Set(injectedFontPreloadTags);
const layerAssets = (0, _getlayerassets.getLayerAssets)({
preloadCallbacks,
ctx,
layoutOrPagePath: conventionPath,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout
});
const [Template, templateStyles, templateScripts] = template ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: template[1],
getComponent: template[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [
Fragment
];
const [ErrorComponent, errorStyles, errorScripts] = error ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: error[1],
getComponent: error[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Loading, loadingStyles, loadingScripts] = loading ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: loading[1],
getComponent: loading[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const isLayout = typeof layout !== 'undefined';
const isPage = typeof page !== 'undefined';
const { mod: layoutOrPageMod, modType } = await (0, _tracer.getTracer)().trace(_constants.NextNodeServerSpan.getLayoutOrPageModule, {
hideSpan: !(isLayout || isPage),
spanName: 'resolve segment modules',
attributes: {
'next.segment': segment
}
}, ()=>(0, _appdirmodule.getLayoutOrPageModule)(tree));
/**
* Checks if the current segment is a root layout.
*/ const rootLayoutAtThisLevel = isLayout && !rootLayoutIncluded;
/**
* Checks if the current segment or any level above it has a root layout.
*/ const rootLayoutIncludedAtThisLevelOrAbove = rootLayoutIncluded || rootLayoutAtThisLevel;
const [NotFound, notFoundStyles] = notFound ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: notFound[1],
getComponent: notFound[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
const hasRuntimePrefetch = instantConfig && typeof instantConfig === 'object' ? instantConfig.prefetch === 'runtime' : false;
const isRuntimePrefetchable = hasRuntimePrefetch || parentRuntimePrefetchable;
const [Forbidden, forbiddenStyles] = authInterrupts && forbidden ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: forbidden[1],
getComponent: forbidden[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Unauthorized, unauthorizedStyles] = authInterrupts && unauthorized ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: unauthorized[1],
getComponent: unauthorized[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
let dynamic = layoutOrPageMod == null ? void 0 : layoutOrPageMod.dynamic;
if (nextConfigOutput === 'export') {
if (!dynamic || dynamic === 'auto') {
dynamic = 'error';
} else if (dynamic === 'force-dynamic') {
// force-dynamic is always incompatible with 'export'. We must interrupt the build
throw Object.defineProperty(new _staticgenerationbailout.StaticGenBailoutError(`Page with \`dynamic = "force-dynamic"\` couldn't be exported. \`output: "export"\` requires all pages be renderable statically because there is no runtime server to dynamically render routes in this output format. Learn more: https://nextjs.org/docs/app/building-your-application/deploying/static-exports`), "__NEXT_ERROR_CODE", {
value: "E527",
enumerable: false,
configurable: true
});
}
}
if (typeof dynamic === 'string') {
// the nested most config wins so we only force-static
// if it's configured above any parent that configured
// otherwise
if (dynamic === 'error') {
workStore.dynamicShouldError = true;
} else if (dynamic === 'force-dynamic') {
workStore.forceDynamic = true;
// TODO: (PPR) remove this bailout once PPR is the default
if (workStore.isStaticGeneration && !experimental.isRoutePPREnabled) {
// If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
const err = Object.defineProperty(new DynamicServerError(`Page with \`dynamic = "force-dynamic"\` won't be rendered statically.`), "__NEXT_ERROR_CODE", {
value: "E585",
enumerable: false,
configurable: true
});
workStore.dynamicUsageDescription = err.message;
workStore.dynamicUsageStack = err.stack;
throw err;
}
} else {
workStore.dynamicShouldError = false;
workStore.forceStatic = dynamic === 'force-static';
}
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache) === 'string') {
workStore.fetchCache = layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache;
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) !== 'undefined') {
(0, _patchfetch.validateRevalidate)(layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate, workStore.route);
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) === 'number') {
const defaultRevalidate = layoutOrPageMod.revalidate;
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.revalidate > defaultRevalidate) {
workUnitStore.revalidate = defaultRevalidate;
}
break;
case 'request':
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'validation-client':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
if (!workStore.forceStatic && workStore.isStaticGeneration && defaultRevalidate === 0 && // If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
!experimental.isRoutePPREnabled) {
const dynamicUsageDescription = `revalidate: 0 configured ${segment}`;
workStore.dynamicUsageDescription = dynamicUsageDescription;
throw Object.defineProperty(new DynamicServerError(dynamicUsageDescription), "__NEXT_ERROR_CODE", {
value: "E1005",
enumerable: false,
configurable: true
});
}
}
// Read unstable_dynamicStaleTime from page modules (not layouts) and track it on
// the store's stale field. This affects the segment cache stale time via
// the StaleTimeIterable.
if (isPage && typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.unstable_dynamicStaleTime) === 'number') {
const pageStaleTime = layoutOrPageMod.unstable_dynamicStaleTime;
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.stale > pageStaleTime) {
workUnitStore.stale = pageStaleTime;
}
break;
case 'request':
if (workUnitStore.stale === undefined || workUnitStore.stale > pageStaleTime) {
workUnitStore.stale = pageStaleTime;
}
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'validation-client':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
}
const isStaticGeneration = workStore.isStaticGeneration;
// Assume the segment we're rendering contains only partial data if PPR is
// enabled and this is a statically generated response. This is used by the
// client Segment Cache after a prefetch to determine if it can skip the
// second request to fill in the dynamic data.
//
// It's OK for this to be `true` when the data is actually fully static, but
// it's not OK for this to be `false` when the data possibly contains holes.
// Although the value here is overly pessimistic, for prefetches, it will be
// replaced by a more specific value when the data is later processed into
// per-segment responses (see collect-segment-data.tsx)
//
// For dynamic requests, this must always be `false` because dynamic responses
// are never partial.
const isPossiblyPartialResponse = isStaticGeneration && experimental.isRoutePPREnabled === true;
const LayoutOrPage = layoutOrPageMod ? (0, _interopdefault.interopDefault)(layoutOrPageMod) : undefined;
/**
* The React Component to render.
*/ let MaybeComponent = LayoutOrPage;
if (process.env.NODE_ENV === 'development' || isStaticGeneration) {
const { isValidElementType } = require('next/dist/compiled/react-is');
if (typeof MaybeComponent !== 'undefined' && !isValidElementType(MaybeComponent)) {
errorMissingDefaultExport(pagePath, modType ?? 'page');
}
if (typeof ErrorComponent !== 'undefined' && !isValidElementType(ErrorComponent)) {
errorMissingDefaultExport(pagePath, 'error');
}
if (typeof Loading !== 'undefined' && !isValidElementType(Loading)) {
errorMissingDefaultExport(pagePath, 'loading');
}
if (typeof NotFound !== 'undefined' && !isValidElementType(NotFound)) {
errorMissingDefaultExport(pagePath, 'not-found');
}
if (typeof Forbidden !== 'undefined' && !isValidElementType(Forbidden)) {
errorMissingDefaultExport(pagePath, 'forbidden');
}
if (typeof Unauthorized !== 'undefined' && !isValidElementType(Unauthorized)) {
errorMissingDefaultExport(pagePath, 'unauthorized');
}
}
// Handle dynamic segment params.
const segmentParam = getDynamicParamFromSegment(tree);
// Create object holding the parent params and current params
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
// Track optional catch-all params with no value (e.g., [[...slug]] at /).
// These params won't exist as properties on the params object, so vary
// params tracking needs to use a Proxy to detect access. We propagate this
// through the tree so that child segments (like __PAGE__) also know about
// the missing param. In practice, this only gets passed down one level —
// from the optional catch-all layout segment to the page segment — so it's
// always very close to the leaf of the tree.
const optionalCatchAllParamName = (segmentParam == null ? void 0 : segmentParam.type) === 'oc' && segmentParam.value === null ? segmentParam.param : parentOptionalCatchAllParamName;
// Resolve the segment param
const isSegmentViewEnabled = !!process.env.__NEXT_DEV_SERVER;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const [notFoundElement, notFoundFilePath] = await createBoundaryConventionElement({
ctx,
conventionName: 'not-found',
Component: NotFound,
styles: notFoundStyles,
tree
});
const [forbiddenElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'forbidden',
Component: Forbidden,
styles: forbiddenStyles,
tree
});
const [unauthorizedElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'unauthorized',
Component: Unauthorized,
styles: unauthorizedStyles,
tree
});
// TODO: Combine this `map` traversal with the loop below that turns the array
// into an object.
const parallelRouteMap = await Promise.all(Object.keys(parallelRoutes).map(async (parallelRouteKey)=>{
const isChildrenRouteKey = parallelRouteKey === 'children';
const parallelRoute = parallelRoutes[parallelRouteKey];
const notFoundComponent = isChildrenRouteKey ? notFoundElement : undefined;
const forbiddenComponent = isChildrenRouteKey ? forbiddenElement : undefined;
const unauthorizedComponent = isChildrenRouteKey ? unauthorizedElement : undefined;
// if we're prefetching and that there's a Loading component, we bail out
// otherwise we keep rendering for the prefetch.
// We also want to bail out if there's no Loading component in the tree.
let childCacheNodeSeedData = null;
if (// Before PPR, the way instant navigations work in Next.js is we
// prefetch everything up to the first route segment that defines a
// loading.tsx boundary. (We do the same if there's no loading
// boundary in the entire tree, because we don't want to prefetch too
// much) The rest of the tree is deferred until the actual navigation.
// It does not take into account whether the data is dynamic — even if
// the tree is completely static, it will still defer everything
// inside the loading boundary.
//
// This behavior predates PPR and is only relevant if the
// PPR flag is not enabled.
isPrefetch && (Loading || !(0, _hasloadingcomponentintree.hasLoadingComponentInTree)(parallelRoute)) && // The approach with PPR is different — loading.tsx behaves like a
// regular Suspense boundary and has no special behavior.
//
// With PPR, we prefetch as deeply as possible, and only defer when
// dynamic data is accessed. If so, we only defer the nearest parent
// Suspense boundary of the dynamic data access, regardless of whether
// the boundary is defined by loading.tsx or a normal <Suspense>
// component in userspace.
//
// NOTE: In practice this usually means we'll end up prefetching more
// than we were before PPR, which may or may not be considered a
// performance regression by some apps. The plan is to address this
// before General Availability of PPR by introducing granular
// per-segment fetching, so we can reuse as much of the tree as
// possible during both prefetches and dynamic navigations. But during
// the beta period, we should be clear about this trade off in our
// communications.
!experimental.isRoutePPREnabled) {
// Don't prefetch this child. This will trigger a lazy fetch by the
// client router.
} else {
// Create the child component
if (process.env.NODE_ENV === 'development' && missingSlots) {
var _parsedTree_conventionPath;
// When we detect the default fallback (which triggers a 404), we collect the missing slots
// to provide more helpful debug information during development mode.
const parsedTree = (0, _parseloadertree.parseLoaderTree)(parallelRoute);
if ((_parsedTree_conventionPath = parsedTree.conventionPath) == null ? void 0 : _parsedTree_conventionPath.endsWith(_default.PARALLEL_ROUTE_DEFAULT_PATH)) {
missingSlots.add(parallelRouteKey);
}
}
const seedData = await createComponentTreeInternal({
loaderTree: parallelRoute,
parentParams: currentParams,
parentOptionalCatchAllParamName: optionalCatchAllParamName,
parentRuntimePrefetchable: isRuntimePrefetchable,
rootLayoutIncluded: rootLayoutIncludedAtThisLevelOrAbove,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout,
ctx,
missingSlots,
preloadCallbacks,
authInterrupts,
// `StreamingMetadataOutlet` is used to conditionally throw. In the case of parallel routes we will have more than one page
// but we only want to throw on the first one.
MetadataOutlet: isChildrenRouteKey ? MetadataOutlet : null
}, false);
childCacheNodeSeedData = seedData;
}
const templateNode = createElement(Template, null, createElement(RenderFromTemplateContext, null));
const templateFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'template');
const errorFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'error');
const loadingFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'loading');
const globalErrorFilePath = isRoot ? (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'global-error') : undefined;
const wrappedErrorStyles = isSegmentViewEnabled && errorFilePath ? createElement(SegmentViewNode, {
type: 'error',
pagePath: errorFilePath
}, errorStyles) : errorStyles;
// Add a suffix to avoid conflict with the segment view node representing rendered file.
// existence: not-found.tsx@boundary
// rendered: not-found.tsx
const fileNameSuffix = _segmentexplorerpath.BOUNDARY_SUFFIX;
const segmentViewBoundaries = isSegmentViewEnabled ? createElement(Fragment, null, notFoundFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}not-found`,
pagePath: notFoundFilePath + fileNameSuffix
}), loadingFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}loading`,
pagePath: loadingFilePath + fileNameSuffix
}), errorFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}error`,
pagePath: errorFilePath + fileNameSuffix
}), globalErrorFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}global-error`,
pagePath: (0, _segmentexplorerpath.isNextjsBuiltinFilePath)(globalErrorFilePath) ? `${_segmentexplorerpath.BUILTIN_PREFIX}global-error.js${fileNameSuffix}` : globalErrorFilePath
})) : null;
return [
parallelRouteKey,
createElement(LayoutRouter, {
parallelRouterKey: parallelRouteKey,
error: ErrorComponent,
errorStyles: wrappedErrorStyles,
errorScripts: errorScripts,
template: isSegmentViewEnabled && templateFilePath ? createElement(SegmentViewNode, {
type: 'template',
pagePath: templateFilePath
}, templateNode) : templateNode,
templateStyles: templateStyles,
templateScripts: templateScripts,
notFound: notFoundComponent,
forbidden: forbiddenComponent,
unauthorized: unauthorizedComponent,
...isSegmentViewEnabled && {
segmentViewBoundaries
}
}),
childCacheNodeSeedData
];
}));
// Convert the parallel route map into an object after all promises have been resolved.
let parallelRouteProps = {};
let parallelRouteCacheNodeSeedData = {};
for (const parallelRoute of parallelRouteMap){
const [parallelRouteKey, parallelRouteProp, flightData] = parallelRoute;
parallelRouteProps[parallelRouteKey] = parallelRouteProp;
parallelRouteCacheNodeSeedData[parallelRouteKey] = flightData;
}
let loadingElement = Loading ? createElement(Loading, {
key: 'l'
}) : null;
const loadingFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'loading');
if (isSegmentViewEnabled && loadingElement) {
if (loadingFilePath) {
loadingElement = createElement(SegmentViewNode, {
key: cacheNodeKey + '-loading',
type: 'loading',
pagePath: loadingFilePath
}, loadingElement);
}
}
const loadingData = loadingElement ? [
loadingElement,
loadingStyles,
loadingScripts
] : null;
// When the segment does not have a layout or page we still have to add the layout router to ensure the path holds the loading component
if (!MaybeComponent) {
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, parallelRouteProps.children), parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, // No user-provided component, so no params will be accessed. Use the
// pre-resolved empty tracker.
_varyparams.emptyVaryParamsAccumulator);
}
const Component = MaybeComponent;
// If force-dynamic is used and the current render supports postponing, we
// replace it with a node that will postpone the render. This ensures that the
// postpone is invoked during the react render phase and not during the next
// render phase.
// @TODO this does not actually do what it seems like it would or should do. The idea is that
// if we are rendering in a force-dynamic mode and we can postpone we should only make the segments
// that ask for force-dynamic to be dynamic, allowing other segments to still prerender. However
// because this comes after the children traversal and the static generation store is mutated every segment
// along the parent path of a force-dynamic segment will hit this condition effectively making the entire
// render force-dynamic. We should refactor this function so that we can correctly track which segments
// need to be dynamic
if (workStore.isStaticGeneration && workStore.forceDynamic && experimental.isRoutePPREnabled) {
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, createElement(Postpone, {
reason: 'dynamic = "force-dynamic" was used',
route: workStore.route
}), layerAssets), parallelRouteCacheNodeSeedData, loadingData, true, isRuntimePrefetchable, // force-dynamic postpones without rendering the component, so no params
// are accessed. The vary params are empty.
_varyparams.emptyVaryParamsAccumulator);
}
const isClientComponent = (0, _clientandserverreferences.isClientReference)(layoutOrPageMod);
const varyParamsAccumulator = isClientComponent && cacheComponents ? // from the server, so they have an empty vary params set.
_varyparams.emptyVaryParamsAccumulator : (0, _varyparams.createVaryParamsAccumulator)();
if (process.env.NODE_ENV === 'development' && 'params' in parallelRouteProps) {
// @TODO consider making this an error and running the check in build as well
console.error(`"params" is a reserved prop in Layouts and Pages and cannot be used as the name of a parallel route in ${segment}`);
}
if (isPage) {
const PageComponent = Component;
// Assign searchParams to props if this is a page
let pageElement;
if (isClientComponent) {
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
const promiseOfSearchParams = createPrerenderSearchParamsForClientPage();
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: [
promiseOfSearchParams,
promiseOfParams
]
}
});
} else {
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: null
}
});
}
} else {
// If we are passing params to a server component Page we need to track
// their usage in case the current render mode tracks dynamic API usage.
const params = createServerParamsForServerSegment(currentParams, optionalCatchAllParamName, varyParamsAccumulator, isRuntimePrefetchable);
// If we are passing searchParams to a server component Page we need to
// track their usage in case the current render mode tracks dynamic API
// usage.
let searchParams = createServerSearchParamsForServerPage(query, varyParamsAccumulator, isRuntimePrefetchable);
if ((0, _clientandserverreferences.isUseCacheFunction)(PageComponent)) {
const UseCachePageComponent = PageComponent;
pageElement = createElement(UseCachePageComponent, {
params: params,
searchParams: searchParams,
$$isPage: true
});
} else {
pageElement = createElement(PageComponent, {
params: params,
searchParams: searchParams
});
}
}
const isDefaultSegment = segment === _segment.DEFAULT_SEGMENT_KEY;
const pageFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'page') ?? (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'defaultPage');
const segmentType = isDefaultSegment ? 'default' : 'page';
const wrappedPageElement = isSegmentViewEnabled && pageFilePath ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + segmentType,
type: segmentType,
pagePath: pageFilePath
}, pageElement) : pageElement;
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, wrappedPageElement, layerAssets, MetadataOutlet ? createElement(MetadataOutlet, null) : null), parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator);
} else {
const SegmentComponent = Component;
const isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot = rootLayoutAtThisLevel && 'children' in parallelRoutes && Object.keys(parallelRoutes).length > 1;
let segmentNode;
if (isClientComponent) {
let clientSegment;
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: [
promiseOfParams
]
}
});
} else {
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: null
}
});
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
let notfoundClientSegment;
let forbiddenClientSegment;
let unauthorizedClientSegment;
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
notfoundClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: NotFound,
errorElement: notFoundElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
forbiddenClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Forbidden,
errorElement: forbiddenElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
unauthorizedClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Unauthorized,
errorElement: unauthorizedElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
if (notfoundClientSegment || forbiddenClientSegment || unauthorizedClientSegment) {
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notfoundClientSegment,
forbidden: forbiddenClientSegment,
unauthorized: unauthorizedClientSegment
}, layerAssets, clientSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
const params = createServerParamsForServerSegment(currentParams, optionalCatchAllParamName, varyParamsAccumulator, isRuntimePrefetchable);
let serverSegment;
if ((0, _clientandserverreferences.isUseCacheFunction)(SegmentComponent)) {
const UseCacheLayoutComponent = SegmentComponent;
serverSegment = createElement(UseCacheLayoutComponent, {
...parallelRouteProps,
params: params,
$$isLayout: true
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
} else {
serverSegment = createElement(SegmentComponent, {
...parallelRouteProps,
params: params
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notFoundElement ? createElement(Fragment, null, layerAssets, createElement(SegmentComponent, {
params: params
}, notFoundStyles, notFoundElement)) : undefined
}, layerAssets, serverSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, serverSegment);
}
}
const layoutFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'layout');
const wrappedSegmentNode = isSegmentViewEnabled && layoutFilePath ? createElement(SegmentViewNode, {
key: 'layout',
type: 'layout',
pagePath: layoutFilePath
}, segmentNode) : segmentNode;
// For layouts we just render the component
return createSeedData(ctx, wrappedSegmentNode, parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator);
}
}
function createErrorBoundaryClientSegmentRoot({ ctx, ErrorBoundaryComponent, errorElement, ClientSegmentRoot, layerAssets, SegmentComponent, currentParams }) {
const { componentMod: { createElement, Fragment } } = ctx;
if (ErrorBoundaryComponent) {
const notFoundParallelRouteProps = {
children: errorElement
};
return createElement(Fragment, null, layerAssets, createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: notFoundParallelRouteProps,
params: currentParams
}));
}
return null;
}
function getRootParams(loaderTree, getDynamicParamFromSegment) {
return getRootParamsImpl({}, loaderTree, getDynamicParamFromSegment);
}
function getRootParamsImpl(parentParams, loaderTree, getDynamicParamFromSegment) {
const { modules: { layout }, parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(loaderTree);
const segmentParam = getDynamicParamFromSegment(loaderTree);
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
const isRootLayout = typeof layout !== 'undefined';
if (isRootLayout) {
return currentParams;
} else if (!parallelRoutes.children) {
// This should really be an error but there are bugs in Turbopack that cause
// the _not-found LoaderTree to not have any layouts. For rootParams sake
// this is somewhat irrelevant when you are not customizing the 404 page.
// If you are customizing 404
// TODO update rootParams to make all params optional if `/app/not-found.tsx` is defined
return currentParams;
} else {
return getRootParamsImpl(currentParams, // We stop looking for root params as soon as we hit the first layout
// and it is not possible to use parallel route children above the root layout
// so every parallelRoutes object that this function can visit will necessarily
// have a single `children` prop and no others.
parallelRoutes.children, getDynamicParamFromSegment);
}
}
async function createBoundaryConventionElement({ ctx, conventionName, Component, styles, tree }) {
const { componentMod: { createElement, Fragment } } = ctx;
const isSegmentViewEnabled = !!process.env.__NEXT_DEV_SERVER;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const { SegmentViewNode } = ctx.componentMod;
const element = Component ? createElement(Fragment, null, createElement(Component, null), styles) : undefined;
const pagePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, conventionName);
const wrappedElement = isSegmentViewEnabled && element ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + conventionName,
type: conventionName,
// TODO: Discovered when moving to `createElement`.
// `SegmentViewNode` doesn't support undefined `pagePath`
pagePath: pagePath
}, element) : element;
return [
wrappedElement,
pagePath
];
}
function createSeedData(ctx, rsc, parallelRoutes, loading, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator) {
const createElement = ctx.componentMod.createElement;
// When this segment is NOT runtime-prefetchable, delay it until the Static
// stage by wrapping the node in a promise. This allows runtime-prefetchable
// segments (the lower tree) to render first during EarlyStatic, so their
// runtime data resolves in EarlyRuntime where sync IO can be checked.
// React will suspend on the thenable and resume when the stage advances.
if (!isRuntimePrefetchable) {
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
let stagedRendering;
switch(workUnitStore.type){
case 'request':
case 'prerender-runtime':
stagedRendering = workUnitStore.stagedRendering;
if (stagedRendering) {
const deferredRsc = rsc;
rsc = stagedRendering.waitForStage(_stagedrendering.RenderStage.Static).then(()=>deferredRsc);
}
break;
case 'prerender':
case 'prerender-client':
case 'validation-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
}
if (loading !== null) {
// If a loading.tsx boundary is present, wrap the component data in an
// additional context provider to pass the loading data to the next
// set of children.
// NOTE: The reason this is a separate wrapper from LayoutRouter is because
// not all segments render a LayoutRouter component, e.g. the root segment.
const LoadingBoundaryProvider = ctx.componentMod.LoadingBoundaryProvider;
rsc = createElement(LoadingBoundaryProvider, {
loading: loading,
children: rsc
});
}
return [
rsc,
parallelRoutes,
null,
isPossiblyPartialResponse,
varyParamsAccumulator ? (0, _varyparams.getVaryParamsThenable)(varyParamsAccumulator) : null
];
}
//# sourceMappingURL=create-component-tree.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,20 @@
import type { ErrorInfo } from 'react';
declare global {
var __next_log_error__: undefined | ((err: unknown) => void);
}
type RSCErrorHandler = (err: unknown) => string | undefined;
type SSRErrorHandler = (err: unknown, errorInfo?: ErrorInfo) => string | undefined;
export type DigestedError = Error & {
digest: string;
environmentName?: string;
};
/**
* Returns a digest for well-known Next.js errors, otherwise `undefined`. If a
* digest is returned this also means that the error does not need to be
* reported.
*/
export declare function getDigestForWellKnownError(error: unknown): string | undefined;
export declare function createReactServerErrorHandler(shouldFormatError: boolean, isBuildTimePrerendering: boolean, reactServerErrors: Map<string, DigestedError>, onReactServerRenderError: (err: DigestedError, silenceLog: boolean) => void, spanToRecordOn?: any): RSCErrorHandler;
export declare function createHTMLErrorHandler(shouldFormatError: boolean, isBuildTimePrerendering: boolean, reactServerErrors: Map<string, DigestedError>, allCapturedErrors: Array<unknown>, onHTMLRenderSSRError: (err: DigestedError, errorInfo?: ErrorInfo) => void, spanToRecordOn?: any): SSRErrorHandler;
export declare function isUserLandError(err: any): boolean;
export {};

View File

@@ -0,0 +1,189 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createHTMLErrorHandler: null,
createReactServerErrorHandler: null,
getDigestForWellKnownError: null,
isUserLandError: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createHTMLErrorHandler: function() {
return createHTMLErrorHandler;
},
createReactServerErrorHandler: function() {
return createReactServerErrorHandler;
},
getDigestForWellKnownError: function() {
return getDigestForWellKnownError;
},
isUserLandError: function() {
return isUserLandError;
}
});
const _stringhash = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/string-hash"));
const _formatservererror = require("../../lib/format-server-error");
const _tracer = require("../lib/trace/tracer");
const _pipereadable = require("../pipe-readable");
const _bailouttocsr = require("../../shared/lib/lazy-dynamic/bailout-to-csr");
const _hooksservercontext = require("../../client/components/hooks-server-context");
const _isnextroutererror = require("../../client/components/is-next-router-error");
const _dynamicrendering = require("./dynamic-rendering");
const _iserror = require("../../lib/is-error");
const _errortelemetryutils = require("../../lib/error-telemetry-utils");
const _reactlargeshellerror = require("./react-large-shell-error");
const _instantvalidationerror = require("./instant-validation/instant-validation-error");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function getDigestForWellKnownError(error) {
// If we're bailing out to CSR, we don't need to log the error.
if ((0, _bailouttocsr.isBailoutToCSRError)(error)) return error.digest;
// If this is a navigation error, we don't need to log the error.
if ((0, _isnextroutererror.isNextRouterError)(error)) return error.digest;
// If this error occurs, we know that we should be stopping the static
// render. This is only thrown in static generation when PPR is not enabled,
// which causes the whole page to be marked as dynamic. We don't need to
// tell the user about this error, as it's not actionable.
if ((0, _hooksservercontext.isDynamicServerError)(error)) return error.digest;
// If this is a prerender interrupted error, we don't need to log the error.
if ((0, _dynamicrendering.isPrerenderInterruptedError)(error)) return error.digest;
if ((0, _instantvalidationerror.isInstantValidationError)(error)) return error.digest;
return undefined;
}
function createReactServerErrorHandler(shouldFormatError, isBuildTimePrerendering, reactServerErrors, onReactServerRenderError, spanToRecordOn) {
return (thrownValue)=>{
var _err_message;
if (typeof thrownValue === 'string') {
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
return (0, _stringhash.default)(thrownValue).toString();
}
// If the response was closed, we don't need to log the error.
if ((0, _pipereadable.isAbortError)(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
if ((0, _reactlargeshellerror.isReactLargeShellError)(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let err = (0, _iserror.getProperError)(thrownValue);
let silenceLog = false;
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (process.env.NODE_ENV === 'production' && reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from another react-server
// environment (e.g. 'use cache'). We recover the original error here
// for reporting purposes.
err = reactServerErrors.get(err.digest);
// We don't log it again though, as it was already logged in the
// original environment.
silenceLog = true;
} else {
// Either we're in development (where we want to keep the transported
// error with environmentName), or the error is not in reactServerErrors
// but has a digest from other means. Keep the error as-is.
}
} else {
err.digest = (0, _errortelemetryutils.createDigestWithErrorCode)(err, // TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
(0, _stringhash.default)(err.message + (err.stack || '')).toString());
}
// @TODO by putting this here and not at the top it is possible that
// we don't error the build in places we actually expect to
if (!reactServerErrors.has(err.digest)) {
reactServerErrors.set(err.digest, err);
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
(0, _formatservererror.formatServerError)(err);
}
// Don't log the suppressed error during export
if (!(isBuildTimePrerendering && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? (0, _tracer.getTracer)().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: _tracer.SpanStatusCode.ERROR,
message: err.message
});
}
onReactServerRenderError(err, silenceLog);
}
return err.digest;
};
}
function createHTMLErrorHandler(shouldFormatError, isBuildTimePrerendering, reactServerErrors, allCapturedErrors, onHTMLRenderSSRError, spanToRecordOn) {
return (thrownValue, errorInfo)=>{
var _err_message;
if ((0, _reactlargeshellerror.isReactLargeShellError)(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let isSSRError = true;
allCapturedErrors.push(thrownValue);
// If the response was closed, we don't need to log the error.
if ((0, _pipereadable.isAbortError)(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
const err = (0, _iserror.getProperError)(thrownValue);
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from react-server.
// We recover the original error here.
thrownValue = reactServerErrors.get(err.digest);
isSSRError = false;
} else {
// The error is not from react-server but has a digest
// from other means so we don't need to produce a new one
}
} else {
err.digest = (0, _errortelemetryutils.createDigestWithErrorCode)(err, (0, _stringhash.default)(err.message + ((errorInfo == null ? void 0 : errorInfo.componentStack) || err.stack || '')).toString());
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
(0, _formatservererror.formatServerError)(err);
}
// Don't log the suppressed error during export
if (!(isBuildTimePrerendering && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// HTML errors contain RSC errors as well, filter them out before reporting
if (isSSRError) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? (0, _tracer.getTracer)().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: _tracer.SpanStatusCode.ERROR,
message: err.message
});
}
onHTMLRenderSSRError(err, errorInfo);
}
}
return err.digest;
};
}
function isUserLandError(err) {
return !(0, _pipereadable.isAbortError)(err) && !(0, _bailouttocsr.isBailoutToCSRError)(err) && !(0, _isnextroutererror.isNextRouterError)(err);
}
//# sourceMappingURL=create-error-handler.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,5 @@
import type { LoaderTree } from '../lib/app-dir-module';
import { type FlightRouterState, type PrefetchHints } from '../../shared/lib/app-router-types';
import type { GetDynamicParamFromSegment } from './app-render';
export declare function createFlightRouterStateFromLoaderTree(loaderTree: LoaderTree, hintTree: PrefetchHints | null, getDynamicParamFromSegment: GetDynamicParamFromSegment, searchParams: any): Promise<FlightRouterState>;
export declare function createRouteTreePrefetch(loaderTree: LoaderTree, hintTree: PrefetchHints | null, getDynamicParamFromSegment: GetDynamicParamFromSegment): Promise<FlightRouterState>;

View File

@@ -0,0 +1,102 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createFlightRouterStateFromLoaderTree: null,
createRouteTreePrefetch: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createFlightRouterStateFromLoaderTree: function() {
return createFlightRouterStateFromLoaderTree;
},
createRouteTreePrefetch: function() {
return createRouteTreePrefetch;
}
});
const _approutertypes = require("../../shared/lib/app-router-types");
const _segment = require("../../shared/lib/segment");
async function createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout) {
const [segment, parallelRoutes, { layout, loading, page }] = loaderTree;
const dynamicParam = getDynamicParamFromSegment(loaderTree);
const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment;
const segmentTree = [
(0, _segment.addSearchParamsIfPageSegment)(treeSegment, searchParams),
{}
];
// Load the layout or page module to check for unstable_instant config
const mod = layout ? await layout[0]() : page ? await page[0]() : undefined;
const instantConfig = mod ? mod.unstable_instant : undefined;
let prefetchHints = 0;
// Union in the precomputed build-time hints (e.g. segment inlining
// decisions) if available. When hints are not available (e.g. dev mode or
// if prefetch-hints.json was not generated), we fall through and still
// compute the other hints below. In the future this should be a build
// error, but for now we gracefully degrade.
//
// TODO: Move more of the hints computation (IsRootLayout, instant config,
// loading boundary detection) into the build-time measurement step in
// collectPrefetchHints, so this function only needs to union the
// precomputed bitmask rather than re-derive hints on every render.
if (hintTree !== null) {
prefetchHints |= hintTree.hints;
}
// Mark the first segment that has a layout as the "root" layout
if (!didFindRootLayout && typeof layout !== 'undefined') {
didFindRootLayout = true;
prefetchHints |= _approutertypes.PrefetchHint.IsRootLayout;
}
if (instantConfig && typeof instantConfig === 'object') {
prefetchHints |= _approutertypes.PrefetchHint.SubtreeHasInstant;
if (instantConfig.prefetch === 'runtime') {
prefetchHints |= _approutertypes.PrefetchHint.HasRuntimePrefetch;
}
}
// Check if this segment has a loading boundary
if (loading) {
prefetchHints |= _approutertypes.PrefetchHint.SegmentHasLoadingBoundary;
}
const children = {};
for(const parallelRouteKey in parallelRoutes){
var _hintTree_slots;
// Look up the child hint node by parallel route key, traversing the
// hint tree in parallel with the loader tree.
const childHintNode = (hintTree == null ? void 0 : (_hintTree_slots = hintTree.slots) == null ? void 0 : _hintTree_slots[parallelRouteKey]) ?? null;
const child = await createFlightRouterStateFromLoaderTreeImpl(parallelRoutes[parallelRouteKey], childHintNode, getDynamicParamFromSegment, searchParams, didFindRootLayout);
// Propagate subtree flags from children
if (child[4] !== undefined) {
prefetchHints |= child[4] & (_approutertypes.PrefetchHint.SubtreeHasInstant | _approutertypes.PrefetchHint.SubtreeHasLoadingBoundary);
// If a child has a loading boundary (either directly or in its subtree),
// propagate that as SubtreeHasLoadingBoundary to this segment.
if (child[4] & (_approutertypes.PrefetchHint.SegmentHasLoadingBoundary | _approutertypes.PrefetchHint.SubtreeHasLoadingBoundary)) {
prefetchHints |= _approutertypes.PrefetchHint.SubtreeHasLoadingBoundary;
}
}
children[parallelRouteKey] = child;
}
segmentTree[1] = children;
if (prefetchHints !== 0) {
segmentTree[4] = prefetchHints;
}
return segmentTree;
}
async function createFlightRouterStateFromLoaderTree(loaderTree, hintTree, getDynamicParamFromSegment, searchParams) {
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout);
}
async function createRouteTreePrefetch(loaderTree, hintTree, getDynamicParamFromSegment) {
// Search params should not be added to page segment's cache key during a
// route tree prefetch request, because they do not affect the structure of
// the route. The client cache has its own logic to handle search params.
const searchParams = {};
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout);
}
//# sourceMappingURL=create-flight-router-state-from-loader-tree.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
export declare const isCsrfOriginAllowed: (originDomain: string, allowedOrigins?: string[]) => boolean;

View File

@@ -0,0 +1,86 @@
// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function
// can be run from edge. This is a simple implementation that safely achieves the required functionality.
// the goal is to match the functionality for remotePatterns as defined here -
// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns
// TODO - retrofit micromatch to work in edge and use that instead
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "isCsrfOriginAllowed", {
enumerable: true,
get: function() {
return isCsrfOriginAllowed;
}
});
function matchWildcardDomain(domain, pattern) {
// DNS names are case-insensitive per RFC 1035
// Use ASCII-only toLowerCase to avoid unicode issues
const normalizedDomain = domain.replace(/[A-Z]/g, (c)=>c.toLowerCase());
const normalizedPattern = pattern.replace(/[A-Z]/g, (c)=>c.toLowerCase());
const domainParts = normalizedDomain.split('.');
const patternParts = normalizedPattern.split('.');
if (patternParts.length < 1) {
// pattern is empty and therefore invalid to match against
return false;
}
if (domainParts.length < patternParts.length) {
// domain has too few segments and thus cannot match
return false;
}
// Prevent wildcards from matching entire domains (e.g. '**' or '*.com')
// This ensures wildcards can only match subdomains, not the main domain
if (patternParts.length === 1 && (patternParts[0] === '*' || patternParts[0] === '**')) {
return false;
}
while(patternParts.length){
const patternPart = patternParts.pop();
const domainPart = domainParts.pop();
switch(patternPart){
case '':
{
// invalid pattern. pattern segments must be non empty
return false;
}
case '*':
{
// wildcard matches anything so we continue if the domain part is non-empty
if (domainPart) {
continue;
} else {
return false;
}
}
case '**':
{
// if this is not the last item in the pattern the pattern is invalid
if (patternParts.length > 0) {
return false;
}
// recursive wildcard matches anything so we terminate here if the domain part is non empty
return domainPart !== undefined;
}
case undefined:
default:
{
if (domainPart !== patternPart) {
return false;
}
}
}
}
// We exhausted the pattern. If we also exhausted the domain we have a match
return domainParts.length === 0;
}
const isCsrfOriginAllowed = (originDomain, allowedOrigins = [])=>{
// DNS names are case-insensitive per RFC 1035
// Use ASCII-only toLowerCase to avoid unicode issues
const normalizedOrigin = originDomain.replace(/[A-Z]/g, (c)=>c.toLowerCase());
return allowedOrigins.some((allowedOrigin)=>{
if (!allowedOrigin) return false;
const normalizedAllowed = allowedOrigin.replace(/[A-Z]/g, (c)=>c.toLowerCase());
return normalizedAllowed === normalizedOrigin || matchWildcardDomain(originDomain, allowedOrigin);
});
};
//# sourceMappingURL=csrf-protection.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
/**
* Compile-time switcher for debug channel operations.
*
* Simple re-export from the web implementation.
* A future change will add a conditional branch for node streams.
*/
export type { DebugChannelPair, DebugChannelServer, } from './debug-channel-server.web';
export { createDebugChannel, toNodeDebugChannel, } from './debug-channel-server.web';

Some files were not shown because too many files have changed in this diff Show More